applied-ai-018 commited on
Commit
7d19378
·
verified ·
1 Parent(s): 1472e57

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpts/universal/global_step80/zero/14.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt +3 -0
  2. ckpts/universal/global_step80/zero/20.mlp.dense_4h_to_h.weight/fp32.pt +3 -0
  3. ckpts/universal/global_step80/zero/24.attention.query_key_value.weight/exp_avg_sq.pt +3 -0
  4. venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/INSTALLER +1 -0
  5. venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/LICENSE +502 -0
  6. venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/METADATA +97 -0
  7. venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/RECORD +107 -0
  8. venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/WHEEL +5 -0
  9. venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/entry_points.txt +2 -0
  10. venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/top_level.txt +1 -0
  11. venv/lib/python3.10/site-packages/hjson-3.1.0.dist-info/RECORD +76 -0
  12. venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/INSTALLER +1 -0
  13. venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/License.txt +1568 -0
  14. venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/METADATA +36 -0
  15. venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/RECORD +17 -0
  16. venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/WHEEL +5 -0
  17. venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/top_level.txt +1 -0
  18. venv/lib/python3.10/site-packages/pandas/__init__.py +367 -0
  19. venv/lib/python3.10/site-packages/pandas/_typing.py +525 -0
  20. venv/lib/python3.10/site-packages/pandas/_version.py +692 -0
  21. venv/lib/python3.10/site-packages/pandas/_version_meson.py +2 -0
  22. venv/lib/python3.10/site-packages/pandas/api/__init__.py +16 -0
  23. venv/lib/python3.10/site-packages/pandas/api/__pycache__/__init__.cpython-310.pyc +0 -0
  24. venv/lib/python3.10/site-packages/pandas/api/extensions/__init__.py +33 -0
  25. venv/lib/python3.10/site-packages/pandas/api/extensions/__pycache__/__init__.cpython-310.pyc +0 -0
  26. venv/lib/python3.10/site-packages/pandas/api/indexers/__init__.py +17 -0
  27. venv/lib/python3.10/site-packages/pandas/api/indexers/__pycache__/__init__.cpython-310.pyc +0 -0
  28. venv/lib/python3.10/site-packages/pandas/api/interchange/__init__.py +8 -0
  29. venv/lib/python3.10/site-packages/pandas/api/interchange/__pycache__/__init__.cpython-310.pyc +0 -0
  30. venv/lib/python3.10/site-packages/pandas/api/types/__init__.py +23 -0
  31. venv/lib/python3.10/site-packages/pandas/api/types/__pycache__/__init__.cpython-310.pyc +0 -0
  32. venv/lib/python3.10/site-packages/pandas/api/typing/__init__.py +55 -0
  33. venv/lib/python3.10/site-packages/pandas/api/typing/__pycache__/__init__.cpython-310.pyc +0 -0
  34. venv/lib/python3.10/site-packages/pandas/conftest.py +1965 -0
  35. venv/lib/python3.10/site-packages/pandas/pyproject.toml +801 -0
  36. venv/lib/python3.10/site-packages/pandas/testing.py +18 -0
  37. venv/lib/python3.10/site-packages/pandas/tests/__init__.py +0 -0
  38. venv/lib/python3.10/site-packages/pandas/tests/frame/__init__.py +0 -0
  39. venv/lib/python3.10/site-packages/pandas/tests/frame/common.py +63 -0
  40. venv/lib/python3.10/site-packages/pandas/tests/frame/conftest.py +100 -0
  41. venv/lib/python3.10/site-packages/pandas/tests/frame/test_alter_axes.py +30 -0
  42. venv/lib/python3.10/site-packages/pandas/tests/frame/test_arithmetic.py +2136 -0
  43. venv/lib/python3.10/site-packages/pandas/tests/frame/test_arrow_interface.py +45 -0
  44. venv/lib/python3.10/site-packages/pandas/tests/frame/test_block_internals.py +457 -0
  45. venv/lib/python3.10/site-packages/pandas/tests/frame/test_constructors.py +0 -0
  46. venv/lib/python3.10/site-packages/pandas/tests/frame/test_cumulative.py +81 -0
  47. venv/lib/python3.10/site-packages/pandas/tests/frame/test_iteration.py +160 -0
  48. venv/lib/python3.10/site-packages/pandas/tests/frame/test_logical_ops.py +218 -0
  49. venv/lib/python3.10/site-packages/pandas/tests/frame/test_nonunique_indexes.py +337 -0
  50. venv/lib/python3.10/site-packages/pandas/tests/frame/test_npfuncs.py +89 -0
ckpts/universal/global_step80/zero/14.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12d3aaf19635df34c0d00d013d2be4625b14e08027937b94a97e91a9d03a08bd
3
+ size 33555627
ckpts/universal/global_step80/zero/20.mlp.dense_4h_to_h.weight/fp32.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56c3e5a58bd448987d60457944256d02648c7eb8c8d1b942da6731e1c929c994
3
+ size 33555533
ckpts/universal/global_step80/zero/24.attention.query_key_value.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d0ea9e34122f1c7124b679892b05c4c6610f2c20aaa7d5e2751c11eda885125
3
+ size 50332843
venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/LICENSE ADDED
@@ -0,0 +1,502 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ GNU LESSER GENERAL PUBLIC LICENSE
2
+ Version 2.1, February 1999
3
+
4
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
5
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
6
+ Everyone is permitted to copy and distribute verbatim copies
7
+ of this license document, but changing it is not allowed.
8
+
9
+ [This is the first released version of the Lesser GPL. It also counts
10
+ as the successor of the GNU Library Public License, version 2, hence
11
+ the version number 2.1.]
12
+
13
+ Preamble
14
+
15
+ The licenses for most software are designed to take away your
16
+ freedom to share and change it. By contrast, the GNU General Public
17
+ Licenses are intended to guarantee your freedom to share and change
18
+ free software--to make sure the software is free for all its users.
19
+
20
+ This license, the Lesser General Public License, applies to some
21
+ specially designated software packages--typically libraries--of the
22
+ Free Software Foundation and other authors who decide to use it. You
23
+ can use it too, but we suggest you first think carefully about whether
24
+ this license or the ordinary General Public License is the better
25
+ strategy to use in any particular case, based on the explanations below.
26
+
27
+ When we speak of free software, we are referring to freedom of use,
28
+ not price. Our General Public Licenses are designed to make sure that
29
+ you have the freedom to distribute copies of free software (and charge
30
+ for this service if you wish); that you receive source code or can get
31
+ it if you want it; that you can change the software and use pieces of
32
+ it in new free programs; and that you are informed that you can do
33
+ these things.
34
+
35
+ To protect your rights, we need to make restrictions that forbid
36
+ distributors to deny you these rights or to ask you to surrender these
37
+ rights. These restrictions translate to certain responsibilities for
38
+ you if you distribute copies of the library or if you modify it.
39
+
40
+ For example, if you distribute copies of the library, whether gratis
41
+ or for a fee, you must give the recipients all the rights that we gave
42
+ you. You must make sure that they, too, receive or can get the source
43
+ code. If you link other code with the library, you must provide
44
+ complete object files to the recipients, so that they can relink them
45
+ with the library after making changes to the library and recompiling
46
+ it. And you must show them these terms so they know their rights.
47
+
48
+ We protect your rights with a two-step method: (1) we copyright the
49
+ library, and (2) we offer you this license, which gives you legal
50
+ permission to copy, distribute and/or modify the library.
51
+
52
+ To protect each distributor, we want to make it very clear that
53
+ there is no warranty for the free library. Also, if the library is
54
+ modified by someone else and passed on, the recipients should know
55
+ that what they have is not the original version, so that the original
56
+ author's reputation will not be affected by problems that might be
57
+ introduced by others.
58
+
59
+ Finally, software patents pose a constant threat to the existence of
60
+ any free program. We wish to make sure that a company cannot
61
+ effectively restrict the users of a free program by obtaining a
62
+ restrictive license from a patent holder. Therefore, we insist that
63
+ any patent license obtained for a version of the library must be
64
+ consistent with the full freedom of use specified in this license.
65
+
66
+ Most GNU software, including some libraries, is covered by the
67
+ ordinary GNU General Public License. This license, the GNU Lesser
68
+ General Public License, applies to certain designated libraries, and
69
+ is quite different from the ordinary General Public License. We use
70
+ this license for certain libraries in order to permit linking those
71
+ libraries into non-free programs.
72
+
73
+ When a program is linked with a library, whether statically or using
74
+ a shared library, the combination of the two is legally speaking a
75
+ combined work, a derivative of the original library. The ordinary
76
+ General Public License therefore permits such linking only if the
77
+ entire combination fits its criteria of freedom. The Lesser General
78
+ Public License permits more lax criteria for linking other code with
79
+ the library.
80
+
81
+ We call this license the "Lesser" General Public License because it
82
+ does Less to protect the user's freedom than the ordinary General
83
+ Public License. It also provides other free software developers Less
84
+ of an advantage over competing non-free programs. These disadvantages
85
+ are the reason we use the ordinary General Public License for many
86
+ libraries. However, the Lesser license provides advantages in certain
87
+ special circumstances.
88
+
89
+ For example, on rare occasions, there may be a special need to
90
+ encourage the widest possible use of a certain library, so that it becomes
91
+ a de-facto standard. To achieve this, non-free programs must be
92
+ allowed to use the library. A more frequent case is that a free
93
+ library does the same job as widely used non-free libraries. In this
94
+ case, there is little to gain by limiting the free library to free
95
+ software only, so we use the Lesser General Public License.
96
+
97
+ In other cases, permission to use a particular library in non-free
98
+ programs enables a greater number of people to use a large body of
99
+ free software. For example, permission to use the GNU C Library in
100
+ non-free programs enables many more people to use the whole GNU
101
+ operating system, as well as its variant, the GNU/Linux operating
102
+ system.
103
+
104
+ Although the Lesser General Public License is Less protective of the
105
+ users' freedom, it does ensure that the user of a program that is
106
+ linked with the Library has the freedom and the wherewithal to run
107
+ that program using a modified version of the Library.
108
+
109
+ The precise terms and conditions for copying, distribution and
110
+ modification follow. Pay close attention to the difference between a
111
+ "work based on the library" and a "work that uses the library". The
112
+ former contains code derived from the library, whereas the latter must
113
+ be combined with the library in order to run.
114
+
115
+ GNU LESSER GENERAL PUBLIC LICENSE
116
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
117
+
118
+ 0. This License Agreement applies to any software library or other
119
+ program which contains a notice placed by the copyright holder or
120
+ other authorized party saying it may be distributed under the terms of
121
+ this Lesser General Public License (also called "this License").
122
+ Each licensee is addressed as "you".
123
+
124
+ A "library" means a collection of software functions and/or data
125
+ prepared so as to be conveniently linked with application programs
126
+ (which use some of those functions and data) to form executables.
127
+
128
+ The "Library", below, refers to any such software library or work
129
+ which has been distributed under these terms. A "work based on the
130
+ Library" means either the Library or any derivative work under
131
+ copyright law: that is to say, a work containing the Library or a
132
+ portion of it, either verbatim or with modifications and/or translated
133
+ straightforwardly into another language. (Hereinafter, translation is
134
+ included without limitation in the term "modification".)
135
+
136
+ "Source code" for a work means the preferred form of the work for
137
+ making modifications to it. For a library, complete source code means
138
+ all the source code for all modules it contains, plus any associated
139
+ interface definition files, plus the scripts used to control compilation
140
+ and installation of the library.
141
+
142
+ Activities other than copying, distribution and modification are not
143
+ covered by this License; they are outside its scope. The act of
144
+ running a program using the Library is not restricted, and output from
145
+ such a program is covered only if its contents constitute a work based
146
+ on the Library (independent of the use of the Library in a tool for
147
+ writing it). Whether that is true depends on what the Library does
148
+ and what the program that uses the Library does.
149
+
150
+ 1. You may copy and distribute verbatim copies of the Library's
151
+ complete source code as you receive it, in any medium, provided that
152
+ you conspicuously and appropriately publish on each copy an
153
+ appropriate copyright notice and disclaimer of warranty; keep intact
154
+ all the notices that refer to this License and to the absence of any
155
+ warranty; and distribute a copy of this License along with the
156
+ Library.
157
+
158
+ You may charge a fee for the physical act of transferring a copy,
159
+ and you may at your option offer warranty protection in exchange for a
160
+ fee.
161
+
162
+ 2. You may modify your copy or copies of the Library or any portion
163
+ of it, thus forming a work based on the Library, and copy and
164
+ distribute such modifications or work under the terms of Section 1
165
+ above, provided that you also meet all of these conditions:
166
+
167
+ a) The modified work must itself be a software library.
168
+
169
+ b) You must cause the files modified to carry prominent notices
170
+ stating that you changed the files and the date of any change.
171
+
172
+ c) You must cause the whole of the work to be licensed at no
173
+ charge to all third parties under the terms of this License.
174
+
175
+ d) If a facility in the modified Library refers to a function or a
176
+ table of data to be supplied by an application program that uses
177
+ the facility, other than as an argument passed when the facility
178
+ is invoked, then you must make a good faith effort to ensure that,
179
+ in the event an application does not supply such function or
180
+ table, the facility still operates, and performs whatever part of
181
+ its purpose remains meaningful.
182
+
183
+ (For example, a function in a library to compute square roots has
184
+ a purpose that is entirely well-defined independent of the
185
+ application. Therefore, Subsection 2d requires that any
186
+ application-supplied function or table used by this function must
187
+ be optional: if the application does not supply it, the square
188
+ root function must still compute square roots.)
189
+
190
+ These requirements apply to the modified work as a whole. If
191
+ identifiable sections of that work are not derived from the Library,
192
+ and can be reasonably considered independent and separate works in
193
+ themselves, then this License, and its terms, do not apply to those
194
+ sections when you distribute them as separate works. But when you
195
+ distribute the same sections as part of a whole which is a work based
196
+ on the Library, the distribution of the whole must be on the terms of
197
+ this License, whose permissions for other licensees extend to the
198
+ entire whole, and thus to each and every part regardless of who wrote
199
+ it.
200
+
201
+ Thus, it is not the intent of this section to claim rights or contest
202
+ your rights to work written entirely by you; rather, the intent is to
203
+ exercise the right to control the distribution of derivative or
204
+ collective works based on the Library.
205
+
206
+ In addition, mere aggregation of another work not based on the Library
207
+ with the Library (or with a work based on the Library) on a volume of
208
+ a storage or distribution medium does not bring the other work under
209
+ the scope of this License.
210
+
211
+ 3. You may opt to apply the terms of the ordinary GNU General Public
212
+ License instead of this License to a given copy of the Library. To do
213
+ this, you must alter all the notices that refer to this License, so
214
+ that they refer to the ordinary GNU General Public License, version 2,
215
+ instead of to this License. (If a newer version than version 2 of the
216
+ ordinary GNU General Public License has appeared, then you can specify
217
+ that version instead if you wish.) Do not make any other change in
218
+ these notices.
219
+
220
+ Once this change is made in a given copy, it is irreversible for
221
+ that copy, so the ordinary GNU General Public License applies to all
222
+ subsequent copies and derivative works made from that copy.
223
+
224
+ This option is useful when you wish to copy part of the code of
225
+ the Library into a program that is not a library.
226
+
227
+ 4. You may copy and distribute the Library (or a portion or
228
+ derivative of it, under Section 2) in object code or executable form
229
+ under the terms of Sections 1 and 2 above provided that you accompany
230
+ it with the complete corresponding machine-readable source code, which
231
+ must be distributed under the terms of Sections 1 and 2 above on a
232
+ medium customarily used for software interchange.
233
+
234
+ If distribution of object code is made by offering access to copy
235
+ from a designated place, then offering equivalent access to copy the
236
+ source code from the same place satisfies the requirement to
237
+ distribute the source code, even though third parties are not
238
+ compelled to copy the source along with the object code.
239
+
240
+ 5. A program that contains no derivative of any portion of the
241
+ Library, but is designed to work with the Library by being compiled or
242
+ linked with it, is called a "work that uses the Library". Such a
243
+ work, in isolation, is not a derivative work of the Library, and
244
+ therefore falls outside the scope of this License.
245
+
246
+ However, linking a "work that uses the Library" with the Library
247
+ creates an executable that is a derivative of the Library (because it
248
+ contains portions of the Library), rather than a "work that uses the
249
+ library". The executable is therefore covered by this License.
250
+ Section 6 states terms for distribution of such executables.
251
+
252
+ When a "work that uses the Library" uses material from a header file
253
+ that is part of the Library, the object code for the work may be a
254
+ derivative work of the Library even though the source code is not.
255
+ Whether this is true is especially significant if the work can be
256
+ linked without the Library, or if the work is itself a library. The
257
+ threshold for this to be true is not precisely defined by law.
258
+
259
+ If such an object file uses only numerical parameters, data
260
+ structure layouts and accessors, and small macros and small inline
261
+ functions (ten lines or less in length), then the use of the object
262
+ file is unrestricted, regardless of whether it is legally a derivative
263
+ work. (Executables containing this object code plus portions of the
264
+ Library will still fall under Section 6.)
265
+
266
+ Otherwise, if the work is a derivative of the Library, you may
267
+ distribute the object code for the work under the terms of Section 6.
268
+ Any executables containing that work also fall under Section 6,
269
+ whether or not they are linked directly with the Library itself.
270
+
271
+ 6. As an exception to the Sections above, you may also combine or
272
+ link a "work that uses the Library" with the Library to produce a
273
+ work containing portions of the Library, and distribute that work
274
+ under terms of your choice, provided that the terms permit
275
+ modification of the work for the customer's own use and reverse
276
+ engineering for debugging such modifications.
277
+
278
+ You must give prominent notice with each copy of the work that the
279
+ Library is used in it and that the Library and its use are covered by
280
+ this License. You must supply a copy of this License. If the work
281
+ during execution displays copyright notices, you must include the
282
+ copyright notice for the Library among them, as well as a reference
283
+ directing the user to the copy of this License. Also, you must do one
284
+ of these things:
285
+
286
+ a) Accompany the work with the complete corresponding
287
+ machine-readable source code for the Library including whatever
288
+ changes were used in the work (which must be distributed under
289
+ Sections 1 and 2 above); and, if the work is an executable linked
290
+ with the Library, with the complete machine-readable "work that
291
+ uses the Library", as object code and/or source code, so that the
292
+ user can modify the Library and then relink to produce a modified
293
+ executable containing the modified Library. (It is understood
294
+ that the user who changes the contents of definitions files in the
295
+ Library will not necessarily be able to recompile the application
296
+ to use the modified definitions.)
297
+
298
+ b) Use a suitable shared library mechanism for linking with the
299
+ Library. A suitable mechanism is one that (1) uses at run time a
300
+ copy of the library already present on the user's computer system,
301
+ rather than copying library functions into the executable, and (2)
302
+ will operate properly with a modified version of the library, if
303
+ the user installs one, as long as the modified version is
304
+ interface-compatible with the version that the work was made with.
305
+
306
+ c) Accompany the work with a written offer, valid for at
307
+ least three years, to give the same user the materials
308
+ specified in Subsection 6a, above, for a charge no more
309
+ than the cost of performing this distribution.
310
+
311
+ d) If distribution of the work is made by offering access to copy
312
+ from a designated place, offer equivalent access to copy the above
313
+ specified materials from the same place.
314
+
315
+ e) Verify that the user has already received a copy of these
316
+ materials or that you have already sent this user a copy.
317
+
318
+ For an executable, the required form of the "work that uses the
319
+ Library" must include any data and utility programs needed for
320
+ reproducing the executable from it. However, as a special exception,
321
+ the materials to be distributed need not include anything that is
322
+ normally distributed (in either source or binary form) with the major
323
+ components (compiler, kernel, and so on) of the operating system on
324
+ which the executable runs, unless that component itself accompanies
325
+ the executable.
326
+
327
+ It may happen that this requirement contradicts the license
328
+ restrictions of other proprietary libraries that do not normally
329
+ accompany the operating system. Such a contradiction means you cannot
330
+ use both them and the Library together in an executable that you
331
+ distribute.
332
+
333
+ 7. You may place library facilities that are a work based on the
334
+ Library side-by-side in a single library together with other library
335
+ facilities not covered by this License, and distribute such a combined
336
+ library, provided that the separate distribution of the work based on
337
+ the Library and of the other library facilities is otherwise
338
+ permitted, and provided that you do these two things:
339
+
340
+ a) Accompany the combined library with a copy of the same work
341
+ based on the Library, uncombined with any other library
342
+ facilities. This must be distributed under the terms of the
343
+ Sections above.
344
+
345
+ b) Give prominent notice with the combined library of the fact
346
+ that part of it is a work based on the Library, and explaining
347
+ where to find the accompanying uncombined form of the same work.
348
+
349
+ 8. You may not copy, modify, sublicense, link with, or distribute
350
+ the Library except as expressly provided under this License. Any
351
+ attempt otherwise to copy, modify, sublicense, link with, or
352
+ distribute the Library is void, and will automatically terminate your
353
+ rights under this License. However, parties who have received copies,
354
+ or rights, from you under this License will not have their licenses
355
+ terminated so long as such parties remain in full compliance.
356
+
357
+ 9. You are not required to accept this License, since you have not
358
+ signed it. However, nothing else grants you permission to modify or
359
+ distribute the Library or its derivative works. These actions are
360
+ prohibited by law if you do not accept this License. Therefore, by
361
+ modifying or distributing the Library (or any work based on the
362
+ Library), you indicate your acceptance of this License to do so, and
363
+ all its terms and conditions for copying, distributing or modifying
364
+ the Library or works based on it.
365
+
366
+ 10. Each time you redistribute the Library (or any work based on the
367
+ Library), the recipient automatically receives a license from the
368
+ original licensor to copy, distribute, link with or modify the Library
369
+ subject to these terms and conditions. You may not impose any further
370
+ restrictions on the recipients' exercise of the rights granted herein.
371
+ You are not responsible for enforcing compliance by third parties with
372
+ this License.
373
+
374
+ 11. If, as a consequence of a court judgment or allegation of patent
375
+ infringement or for any other reason (not limited to patent issues),
376
+ conditions are imposed on you (whether by court order, agreement or
377
+ otherwise) that contradict the conditions of this License, they do not
378
+ excuse you from the conditions of this License. If you cannot
379
+ distribute so as to satisfy simultaneously your obligations under this
380
+ License and any other pertinent obligations, then as a consequence you
381
+ may not distribute the Library at all. For example, if a patent
382
+ license would not permit royalty-free redistribution of the Library by
383
+ all those who receive copies directly or indirectly through you, then
384
+ the only way you could satisfy both it and this License would be to
385
+ refrain entirely from distribution of the Library.
386
+
387
+ If any portion of this section is held invalid or unenforceable under any
388
+ particular circumstance, the balance of the section is intended to apply,
389
+ and the section as a whole is intended to apply in other circumstances.
390
+
391
+ It is not the purpose of this section to induce you to infringe any
392
+ patents or other property right claims or to contest validity of any
393
+ such claims; this section has the sole purpose of protecting the
394
+ integrity of the free software distribution system which is
395
+ implemented by public license practices. Many people have made
396
+ generous contributions to the wide range of software distributed
397
+ through that system in reliance on consistent application of that
398
+ system; it is up to the author/donor to decide if he or she is willing
399
+ to distribute software through any other system and a licensee cannot
400
+ impose that choice.
401
+
402
+ This section is intended to make thoroughly clear what is believed to
403
+ be a consequence of the rest of this License.
404
+
405
+ 12. If the distribution and/or use of the Library is restricted in
406
+ certain countries either by patents or by copyrighted interfaces, the
407
+ original copyright holder who places the Library under this License may add
408
+ an explicit geographical distribution limitation excluding those countries,
409
+ so that distribution is permitted only in or among countries not thus
410
+ excluded. In such case, this License incorporates the limitation as if
411
+ written in the body of this License.
412
+
413
+ 13. The Free Software Foundation may publish revised and/or new
414
+ versions of the Lesser General Public License from time to time.
415
+ Such new versions will be similar in spirit to the present version,
416
+ but may differ in detail to address new problems or concerns.
417
+
418
+ Each version is given a distinguishing version number. If the Library
419
+ specifies a version number of this License which applies to it and
420
+ "any later version", you have the option of following the terms and
421
+ conditions either of that version or of any later version published by
422
+ the Free Software Foundation. If the Library does not specify a
423
+ license version number, you may choose any version ever published by
424
+ the Free Software Foundation.
425
+
426
+ 14. If you wish to incorporate parts of the Library into other free
427
+ programs whose distribution conditions are incompatible with these,
428
+ write to the author to ask for permission. For software which is
429
+ copyrighted by the Free Software Foundation, write to the Free
430
+ Software Foundation; we sometimes make exceptions for this. Our
431
+ decision will be guided by the two goals of preserving the free status
432
+ of all derivatives of our free software and of promoting the sharing
433
+ and reuse of software generally.
434
+
435
+ NO WARRANTY
436
+
437
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
438
+ WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
439
+ EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
440
+ OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
441
+ KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
442
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
443
+ PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
444
+ LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
445
+ THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
446
+
447
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
448
+ WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
449
+ AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
450
+ FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
451
+ CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
452
+ LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
453
+ RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
454
+ FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
455
+ SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
456
+ DAMAGES.
457
+
458
+ END OF TERMS AND CONDITIONS
459
+
460
+ How to Apply These Terms to Your New Libraries
461
+
462
+ If you develop a new library, and you want it to be of the greatest
463
+ possible use to the public, we recommend making it free software that
464
+ everyone can redistribute and change. You can do so by permitting
465
+ redistribution under these terms (or, alternatively, under the terms of the
466
+ ordinary General Public License).
467
+
468
+ To apply these terms, attach the following notices to the library. It is
469
+ safest to attach them to the start of each source file to most effectively
470
+ convey the exclusion of warranty; and each file should have at least the
471
+ "copyright" line and a pointer to where the full notice is found.
472
+
473
+ <one line to give the library's name and a brief idea of what it does.>
474
+ Copyright (C) <year> <name of author>
475
+
476
+ This library is free software; you can redistribute it and/or
477
+ modify it under the terms of the GNU Lesser General Public
478
+ License as published by the Free Software Foundation; either
479
+ version 2.1 of the License, or (at your option) any later version.
480
+
481
+ This library is distributed in the hope that it will be useful,
482
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
483
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
484
+ Lesser General Public License for more details.
485
+
486
+ You should have received a copy of the GNU Lesser General Public
487
+ License along with this library; if not, write to the Free Software
488
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
489
+
490
+ Also add information on how to contact you by electronic and paper mail.
491
+
492
+ You should also get your employer (if you work as a programmer) or your
493
+ school, if any, to sign a "copyright disclaimer" for the library, if
494
+ necessary. Here is a sample; alter the names:
495
+
496
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
497
+ library `Frob' (a library for tweaking knobs) written by James Random Hacker.
498
+
499
+ <signature of Ty Coon>, 1 April 1990
500
+ Ty Coon, President of Vice
501
+
502
+ That's all there is to it!
venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/METADATA ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: chardet
3
+ Version: 5.2.0
4
+ Summary: Universal encoding detector for Python 3
5
+ Home-page: https://github.com/chardet/chardet
6
+ Author: Mark Pilgrim
7
+ Author-email: [email protected]
8
+ Maintainer: Daniel Blanchard
9
+ Maintainer-email: [email protected]
10
+ License: LGPL
11
+ Project-URL: Documentation, https://chardet.readthedocs.io/
12
+ Project-URL: GitHub Project, https://github.com/chardet/chardet
13
+ Project-URL: Issue Tracker, https://github.com/chardet/chardet/issues
14
+ Keywords: encoding,i18n,xml
15
+ Classifier: Development Status :: 5 - Production/Stable
16
+ Classifier: Intended Audience :: Developers
17
+ Classifier: License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)
18
+ Classifier: Operating System :: OS Independent
19
+ Classifier: Programming Language :: Python
20
+ Classifier: Programming Language :: Python :: 3
21
+ Classifier: Programming Language :: Python :: 3.7
22
+ Classifier: Programming Language :: Python :: 3.8
23
+ Classifier: Programming Language :: Python :: 3.9
24
+ Classifier: Programming Language :: Python :: 3.10
25
+ Classifier: Programming Language :: Python :: 3.11
26
+ Classifier: Programming Language :: Python :: Implementation :: CPython
27
+ Classifier: Programming Language :: Python :: Implementation :: PyPy
28
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
29
+ Classifier: Topic :: Text Processing :: Linguistic
30
+ Requires-Python: >=3.7
31
+ License-File: LICENSE
32
+
33
+ Chardet: The Universal Character Encoding Detector
34
+ --------------------------------------------------
35
+
36
+ .. image:: https://img.shields.io/travis/chardet/chardet/stable.svg
37
+ :alt: Build status
38
+ :target: https://travis-ci.org/chardet/chardet
39
+
40
+ .. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg
41
+ :target: https://coveralls.io/r/chardet/chardet
42
+
43
+ .. image:: https://img.shields.io/pypi/v/chardet.svg
44
+ :target: https://warehouse.python.org/project/chardet/
45
+ :alt: Latest version on PyPI
46
+
47
+ .. image:: https://img.shields.io/pypi/l/chardet.svg
48
+ :alt: License
49
+
50
+
51
+ Detects
52
+ - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants)
53
+ - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese)
54
+ - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese)
55
+ - EUC-KR, ISO-2022-KR, Johab (Korean)
56
+ - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic)
57
+ - ISO-8859-5, windows-1251 (Bulgarian)
58
+ - ISO-8859-1, windows-1252, MacRoman (Western European languages)
59
+ - ISO-8859-7, windows-1253 (Greek)
60
+ - ISO-8859-8, windows-1255 (Visual and Logical Hebrew)
61
+ - TIS-620 (Thai)
62
+
63
+ .. note::
64
+ Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily
65
+ disabled until we can retrain the models.
66
+
67
+ Requires Python 3.7+.
68
+
69
+ Installation
70
+ ------------
71
+
72
+ Install from `PyPI <https://pypi.org/project/chardet/>`_::
73
+
74
+ pip install chardet
75
+
76
+ Documentation
77
+ -------------
78
+
79
+ For users, docs are now available at https://chardet.readthedocs.io/.
80
+
81
+ Command-line Tool
82
+ -----------------
83
+
84
+ chardet comes with a command-line script which reports on the encodings of one
85
+ or more files::
86
+
87
+ % chardetect somefile someotherfile
88
+ somefile: windows-1252 with confidence 0.5
89
+ someotherfile: ascii with confidence 1.0
90
+
91
+ About
92
+ -----
93
+
94
+ This is a continuation of Mark Pilgrim's excellent original chardet port from C, and `Ian Cordasco <https://github.com/sigmavirus24>`_'s
95
+ `charade <https://github.com/sigmavirus24/charade>`_ Python 3-compatible fork.
96
+
97
+ :maintainer: Dan Blanchard
venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/RECORD ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ../../../bin/chardetect,sha256=_kiNDcCkZV9nIa4bKK1rfjfgbUvQBc5-VGUKJHh7Y2c,250
2
+ chardet-5.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
3
+ chardet-5.2.0.dist-info/LICENSE,sha256=3GJlINzVOiL3J68-5Cx3DlbJemT-OtsGN5nYqwMv5VE,26530
4
+ chardet-5.2.0.dist-info/METADATA,sha256=PAr2NQ6hQWpjyFnwlI7MoxHt2S_6oRiUsucOKMNhzGw,3418
5
+ chardet-5.2.0.dist-info/RECORD,,
6
+ chardet-5.2.0.dist-info/WHEEL,sha256=AtBG6SXL3KF_v0NxLf0ehyVOh0cold-JbJYXNGorC6Q,92
7
+ chardet-5.2.0.dist-info/entry_points.txt,sha256=_cdvYc4jyY68GYfsQAAthNMxO-yodcGkvNC1xOEsLmI,59
8
+ chardet-5.2.0.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8
9
+ chardet/__init__.py,sha256=57R-HSxj0PWmILMN0GFmUNqEMfrEVSamXyjD-W6_fbs,4797
10
+ chardet/__main__.py,sha256=puNj2o_QfBRKElEkiVp1zEIL1gGYD2o-JuXLFlqHDC4,123
11
+ chardet/__pycache__/__init__.cpython-310.pyc,,
12
+ chardet/__pycache__/__main__.cpython-310.pyc,,
13
+ chardet/__pycache__/big5freq.cpython-310.pyc,,
14
+ chardet/__pycache__/big5prober.cpython-310.pyc,,
15
+ chardet/__pycache__/chardistribution.cpython-310.pyc,,
16
+ chardet/__pycache__/charsetgroupprober.cpython-310.pyc,,
17
+ chardet/__pycache__/charsetprober.cpython-310.pyc,,
18
+ chardet/__pycache__/codingstatemachine.cpython-310.pyc,,
19
+ chardet/__pycache__/codingstatemachinedict.cpython-310.pyc,,
20
+ chardet/__pycache__/cp949prober.cpython-310.pyc,,
21
+ chardet/__pycache__/enums.cpython-310.pyc,,
22
+ chardet/__pycache__/escprober.cpython-310.pyc,,
23
+ chardet/__pycache__/escsm.cpython-310.pyc,,
24
+ chardet/__pycache__/eucjpprober.cpython-310.pyc,,
25
+ chardet/__pycache__/euckrfreq.cpython-310.pyc,,
26
+ chardet/__pycache__/euckrprober.cpython-310.pyc,,
27
+ chardet/__pycache__/euctwfreq.cpython-310.pyc,,
28
+ chardet/__pycache__/euctwprober.cpython-310.pyc,,
29
+ chardet/__pycache__/gb2312freq.cpython-310.pyc,,
30
+ chardet/__pycache__/gb2312prober.cpython-310.pyc,,
31
+ chardet/__pycache__/hebrewprober.cpython-310.pyc,,
32
+ chardet/__pycache__/jisfreq.cpython-310.pyc,,
33
+ chardet/__pycache__/johabfreq.cpython-310.pyc,,
34
+ chardet/__pycache__/johabprober.cpython-310.pyc,,
35
+ chardet/__pycache__/jpcntx.cpython-310.pyc,,
36
+ chardet/__pycache__/langbulgarianmodel.cpython-310.pyc,,
37
+ chardet/__pycache__/langgreekmodel.cpython-310.pyc,,
38
+ chardet/__pycache__/langhebrewmodel.cpython-310.pyc,,
39
+ chardet/__pycache__/langhungarianmodel.cpython-310.pyc,,
40
+ chardet/__pycache__/langrussianmodel.cpython-310.pyc,,
41
+ chardet/__pycache__/langthaimodel.cpython-310.pyc,,
42
+ chardet/__pycache__/langturkishmodel.cpython-310.pyc,,
43
+ chardet/__pycache__/latin1prober.cpython-310.pyc,,
44
+ chardet/__pycache__/macromanprober.cpython-310.pyc,,
45
+ chardet/__pycache__/mbcharsetprober.cpython-310.pyc,,
46
+ chardet/__pycache__/mbcsgroupprober.cpython-310.pyc,,
47
+ chardet/__pycache__/mbcssm.cpython-310.pyc,,
48
+ chardet/__pycache__/resultdict.cpython-310.pyc,,
49
+ chardet/__pycache__/sbcharsetprober.cpython-310.pyc,,
50
+ chardet/__pycache__/sbcsgroupprober.cpython-310.pyc,,
51
+ chardet/__pycache__/sjisprober.cpython-310.pyc,,
52
+ chardet/__pycache__/universaldetector.cpython-310.pyc,,
53
+ chardet/__pycache__/utf1632prober.cpython-310.pyc,,
54
+ chardet/__pycache__/utf8prober.cpython-310.pyc,,
55
+ chardet/__pycache__/version.cpython-310.pyc,,
56
+ chardet/big5freq.py,sha256=ltcfP-3PjlNHCoo5e4a7C4z-2DhBTXRfY6jbMbB7P30,31274
57
+ chardet/big5prober.py,sha256=lPMfwCX6v2AaPgvFh_cSWZcgLDbWiFCHLZ_p9RQ9uxE,1763
58
+ chardet/chardistribution.py,sha256=13B8XUG4oXDuLdXvfbIWwLFeR-ZU21AqTS1zcdON8bU,10032
59
+ chardet/charsetgroupprober.py,sha256=UKK3SaIZB2PCdKSIS0gnvMtLR9JJX62M-fZJu3OlWyg,3915
60
+ chardet/charsetprober.py,sha256=L3t8_wIOov8em-vZWOcbkdsrwe43N6_gqNh5pH7WPd4,5420
61
+ chardet/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
+ chardet/cli/__pycache__/__init__.cpython-310.pyc,,
63
+ chardet/cli/__pycache__/chardetect.cpython-310.pyc,,
64
+ chardet/cli/chardetect.py,sha256=zibMVg5RpKb-ME9_7EYG4ZM2Sf07NHcQzZ12U-rYJho,3242
65
+ chardet/codingstatemachine.py,sha256=K7k69sw3jY5DmTXoSJQVsUtFIQKYPQVOSJJhBuGv_yE,3732
66
+ chardet/codingstatemachinedict.py,sha256=0GY3Hi2qIZvDrOOJ3AtqppM1RsYxr_66ER4EHjuMiMc,542
67
+ chardet/cp949prober.py,sha256=0jKRV7fECuWI16rNnks0ZECKA1iZYCIEaP8A1ZvjUSI,1860
68
+ chardet/enums.py,sha256=TzECiZoCKNMqgwU76cPCeKWFBqaWvAdLMev5_bCkhY8,1683
69
+ chardet/escprober.py,sha256=Kho48X65xE0scFylIdeJjM2bcbvRvv0h0WUbMWrJD3A,4006
70
+ chardet/escsm.py,sha256=AqyXpA2FQFD7k-buBty_7itGEYkhmVa8X09NLRul3QM,12176
71
+ chardet/eucjpprober.py,sha256=5KYaM9fsxkRYzw1b5k0fL-j_-ezIw-ij9r97a9MHxLY,3934
72
+ chardet/euckrfreq.py,sha256=3mHuRvXfsq_QcQysDQFb8qSudvTiol71C6Ic2w57tKM,13566
73
+ chardet/euckrprober.py,sha256=hiFT6wM174GIwRvqDsIcuOc-dDsq2uPKMKbyV8-1Xnc,1753
74
+ chardet/euctwfreq.py,sha256=2alILE1Lh5eqiFJZjzRkMQXolNJRHY5oBQd-vmZYFFM,36913
75
+ chardet/euctwprober.py,sha256=NxbpNdBtU0VFI0bKfGfDkpP7S2_8_6FlO87dVH0ogws,1753
76
+ chardet/gb2312freq.py,sha256=49OrdXzD-HXqwavkqjo8Z7gvs58hONNzDhAyMENNkvY,20735
77
+ chardet/gb2312prober.py,sha256=KPEBueaSLSvBpFeINMu0D6TgHcR90e5PaQawifzF4o0,1759
78
+ chardet/hebrewprober.py,sha256=96T_Lj_OmW-fK7JrSHojYjyG3fsGgbzkoTNleZ3kfYE,14537
79
+ chardet/jisfreq.py,sha256=mm8tfrwqhpOd3wzZKS4NJqkYBQVcDfTM2JiQ5aW932E,25796
80
+ chardet/johabfreq.py,sha256=dBpOYG34GRX6SL8k_LbS9rxZPMjLjoMlgZ03Pz5Hmqc,42498
81
+ chardet/johabprober.py,sha256=O1Qw9nVzRnun7vZp4UZM7wvJSv9W941mEU9uDMnY3DU,1752
82
+ chardet/jpcntx.py,sha256=uhHrYWkLxE_rF5OkHKInm0HUsrjgKHHVQvtt3UcvotA,27055
83
+ chardet/langbulgarianmodel.py,sha256=bGoRpxBYtrbSHa6mX6PkEA26v30pWmhDjemhdxmkew8,104550
84
+ chardet/langgreekmodel.py,sha256=3wMlEzQ8oU2MbrL2xN8lkuOB0dCMLBhW6heekxusoc0,98472
85
+ chardet/langhebrewmodel.py,sha256=ZUTqusxMvR_earWPs5w-rH10xoe5sPjd9FLMu1DUIvE,98184
86
+ chardet/langhungarianmodel.py,sha256=N-YtC2EiswyS7XsUicCPRycrIzRNj47Y048odp9qOoo,101351
87
+ chardet/langrussianmodel.py,sha256=6v7RcZKGj0VH0864BHzizKNceAYbHvGts2p00ifC7w4,128023
88
+ chardet/langthaimodel.py,sha256=Mr673U9U8rkQFfUDtLP01pp-0TOsl2o6sb75YEjvpcs,102762
89
+ chardet/langturkishmodel.py,sha256=LkXCjWhGUEzqKXvfasHN0SFBigwKJ3xeWNVZ0EyI0kA,95360
90
+ chardet/latin1prober.py,sha256=p15EEmFbmQUwbKLC7lOJVGHEZwcG45ubEZYTGu01J5g,5380
91
+ chardet/macromanprober.py,sha256=9anfzmY6TBfUPDyBDOdY07kqmTHpZ1tK0jL-p1JWcOY,6077
92
+ chardet/mbcharsetprober.py,sha256=Wr04WNI4F3X_VxEverNG-H25g7u-MDDKlNt-JGj-_uU,3715
93
+ chardet/mbcsgroupprober.py,sha256=iRpaNBjV0DNwYPu_z6TiHgRpwYahiM7ztI_4kZ4Uz9A,2131
94
+ chardet/mbcssm.py,sha256=hUtPvDYgWDaA2dWdgLsshbwRfm3Q5YRlRogdmeRUNQw,30391
95
+ chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
96
+ chardet/metadata/__pycache__/__init__.cpython-310.pyc,,
97
+ chardet/metadata/__pycache__/languages.cpython-310.pyc,,
98
+ chardet/metadata/languages.py,sha256=FhvBIdZFxRQ-dTwkb_0madRKgVBCaUMQz9I5xqjE5iQ,13560
99
+ chardet/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
100
+ chardet/resultdict.py,sha256=ez4FRvN5KaSosJeJ2WzUyKdDdg35HDy_SSLPXKCdt5M,402
101
+ chardet/sbcharsetprober.py,sha256=-nd3F90i7GpXLjehLVHqVBE0KlWzGvQUPETLBNn4o6U,6400
102
+ chardet/sbcsgroupprober.py,sha256=gcgI0fOfgw_3YTClpbra_MNxwyEyJ3eUXraoLHYb59E,4137
103
+ chardet/sjisprober.py,sha256=aqQufMzRw46ZpFlzmYaYeT2-nzmKb-hmcrApppJ862k,4007
104
+ chardet/universaldetector.py,sha256=xYBrg4x0dd9WnT8qclfADVD9ondrUNkqPmvte1pa520,14848
105
+ chardet/utf1632prober.py,sha256=pw1epGdMj1hDGiCu1AHqqzOEfjX8MVdiW7O1BlT8-eQ,8505
106
+ chardet/utf8prober.py,sha256=8m08Ub5490H4jQ6LYXvFysGtgKoKsHUd2zH_i8_TnVw,2812
107
+ chardet/version.py,sha256=jp8ePp1zC63YxruGcHSuKxtf3-fF1LYAMUZD2eDWYok,244
venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.41.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/entry_points.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ [console_scripts]
2
+ chardetect = chardet.cli.chardetect:main
venv/lib/python3.10/site-packages/chardet-5.2.0.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ chardet
venv/lib/python3.10/site-packages/hjson-3.1.0.dist-info/RECORD ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ../../../bin/hjson,sha256=IEtY2FUtkPVgLc7HKm3HJFfMlQs7Hq0B_f5tznSZI4Q,238
2
+ hjson-3.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
3
+ hjson-3.1.0.dist-info/LICENSE.txt,sha256=WRxR9JxoM7zhpdU-Y3haBGet1nRaWogOTRD05kzETEw,10462
4
+ hjson-3.1.0.dist-info/METADATA,sha256=ws3nEqUXFpWNgmH-FYScGumUSgt2dkiPHNm7RZh-gcU,2610
5
+ hjson-3.1.0.dist-info/RECORD,,
6
+ hjson-3.1.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
7
+ hjson-3.1.0.dist-info/entry_points.txt,sha256=sghBTcTrqdK0kKuznOZXeEBtjoBnLweHF83S9NK-r0E,42
8
+ hjson-3.1.0.dist-info/top_level.txt,sha256=earxK7niVoW6ADlKRFHCs3JpUL4FBSnJn9qmrYgnEUE,6
9
+ hjson/__init__.py,sha256=Py1drAVt-Xpi2xJAHxVXDdX-NniC4llxZVPjY7MyPCY,26580
10
+ hjson/__pycache__/__init__.cpython-310.pyc,,
11
+ hjson/__pycache__/compat.cpython-310.pyc,,
12
+ hjson/__pycache__/decoder.cpython-310.pyc,,
13
+ hjson/__pycache__/encoder.cpython-310.pyc,,
14
+ hjson/__pycache__/encoderH.cpython-310.pyc,,
15
+ hjson/__pycache__/ordered_dict.cpython-310.pyc,,
16
+ hjson/__pycache__/scanner.cpython-310.pyc,,
17
+ hjson/__pycache__/tool.cpython-310.pyc,,
18
+ hjson/compat.py,sha256=uvmTYe1Oa18tT_6tFRtYnzTdAkyd73B9zwMF7ZafI04,1036
19
+ hjson/decoder.py,sha256=oHz7g2sQd4S-AJbSzur9VJWqAHDWL25FVQ-G35XlGCA,19563
20
+ hjson/encoder.py,sha256=NhR3YSMVzL3UP8OLAtP2Dr6dW9UbJzjVS-SXp1DzvcY,19168
21
+ hjson/encoderH.py,sha256=wJ8D0gAyR3n6e3MahCIU3OahI5_xuHPWV_ZlI839xCs,20481
22
+ hjson/ordered_dict.py,sha256=DXtgiqkkaNWXDLZ0DGXIjF_CPzGV5qpC-PSeS1zcps8,3370
23
+ hjson/scanner.py,sha256=IL8poQGvCsb82y7qY5jrlSrZ5xcENpPUti3tNKhprYw,1779
24
+ hjson/tests/__init__.py,sha256=_A-1Tn7q7ccNPro_QfbKiXo_bTL9ED5RUX9AeSLG4TA,2011
25
+ hjson/tests/__pycache__/__init__.cpython-310.pyc,,
26
+ hjson/tests/__pycache__/test_bigint_as_string.cpython-310.pyc,,
27
+ hjson/tests/__pycache__/test_bitsize_int_as_string.cpython-310.pyc,,
28
+ hjson/tests/__pycache__/test_check_circular.cpython-310.pyc,,
29
+ hjson/tests/__pycache__/test_decimal.cpython-310.pyc,,
30
+ hjson/tests/__pycache__/test_decode.cpython-310.pyc,,
31
+ hjson/tests/__pycache__/test_default.cpython-310.pyc,,
32
+ hjson/tests/__pycache__/test_dump.cpython-310.pyc,,
33
+ hjson/tests/__pycache__/test_encode_basestring_ascii.cpython-310.pyc,,
34
+ hjson/tests/__pycache__/test_errors.cpython-310.pyc,,
35
+ hjson/tests/__pycache__/test_fail.cpython-310.pyc,,
36
+ hjson/tests/__pycache__/test_float.cpython-310.pyc,,
37
+ hjson/tests/__pycache__/test_for_json.cpython-310.pyc,,
38
+ hjson/tests/__pycache__/test_hjson.cpython-310.pyc,,
39
+ hjson/tests/__pycache__/test_indent.cpython-310.pyc,,
40
+ hjson/tests/__pycache__/test_item_sort_key.cpython-310.pyc,,
41
+ hjson/tests/__pycache__/test_namedtuple.cpython-310.pyc,,
42
+ hjson/tests/__pycache__/test_pass1.cpython-310.pyc,,
43
+ hjson/tests/__pycache__/test_pass2.cpython-310.pyc,,
44
+ hjson/tests/__pycache__/test_pass3.cpython-310.pyc,,
45
+ hjson/tests/__pycache__/test_recursion.cpython-310.pyc,,
46
+ hjson/tests/__pycache__/test_scanstring.cpython-310.pyc,,
47
+ hjson/tests/__pycache__/test_separators.cpython-310.pyc,,
48
+ hjson/tests/__pycache__/test_tool.cpython-310.pyc,,
49
+ hjson/tests/__pycache__/test_tuple.cpython-310.pyc,,
50
+ hjson/tests/__pycache__/test_unicode.cpython-310.pyc,,
51
+ hjson/tests/test_bigint_as_string.py,sha256=bhEtYEXWUhxyi25iLm4sPWFrt5RZ9PfFEknX1cdzP-Y,2265
52
+ hjson/tests/test_bitsize_int_as_string.py,sha256=-73xJ8CE2hDjGOWTERRrYbDHaY0kgBNQXC0g8nIgd4k,2332
53
+ hjson/tests/test_check_circular.py,sha256=64kZhsab6OcwYmJNLRqNW-19dp1UdgYbZiGzopKyR9s,940
54
+ hjson/tests/test_decimal.py,sha256=Qw0IBPSPYoGZXwvXkkM1cz6lpqjkPRzNDBSul-RdR_4,2556
55
+ hjson/tests/test_decode.py,sha256=Sm4052xVjv7ZtZFdRVMsnvQeh2eCNoXv24YOUJJLMdg,4437
56
+ hjson/tests/test_default.py,sha256=WWDLhDVfih4PrenmiEcvshhUOl_bNsm3jML96-AtGmo,224
57
+ hjson/tests/test_dump.py,sha256=5WU4Rd6vsHOwXGpGqQKIw1ZBNgRWUqMY8w3DnJVWfxo,5061
58
+ hjson/tests/test_encode_basestring_ascii.py,sha256=up4y9JMdGXdBXkEjfqwiG-sudSdcKw0RQfO_76za-To,2102
59
+ hjson/tests/test_errors.py,sha256=vg3-z36T9O-UeDHG4ZtW-nQBNAvraWKBrDA70yG989c,1549
60
+ hjson/tests/test_fail.py,sha256=Giinb944NX0bPwBHYUjVZ4ZlNB611Wg0wxVWxv4bDaU,5688
61
+ hjson/tests/test_float.py,sha256=LCUL-2xT8PYq99jQi6-Ddk9pMuC1mLrcJboTfvR08HM,1011
62
+ hjson/tests/test_for_json.py,sha256=ZLtypdX0ALctxMB8c3fQvx3k9OHY5t71gBxGNOXemrc,2778
63
+ hjson/tests/test_hjson.py,sha256=CdvXR05nu8bF_jZ-Hhj3bh8LRi8tdSJTruayj69HoDk,2327
64
+ hjson/tests/test_indent.py,sha256=8oUK5E8DTz1c3RkUU-nOELmr9wOKoaHHOAsxDai66iE,2589
65
+ hjson/tests/test_item_sort_key.py,sha256=piYy-ntwdcb_qS-y8jPFI6rVZlHCNqtTFGnaZSEvWH8,1134
66
+ hjson/tests/test_namedtuple.py,sha256=iK7B95JH4f2L3_MB3rY9NagEVZ1X62JHpjlm1J4t5uM,4066
67
+ hjson/tests/test_pass1.py,sha256=wdnBz55dY4ou8IIdZFypJ_72J6HCtLZw0YesoNOTopQ,1745
68
+ hjson/tests/test_pass2.py,sha256=O389C8IeJ9ysqWKUftXOeHvBi5_47VSdBM_2bCee1SQ,385
69
+ hjson/tests/test_pass3.py,sha256=wwemRholrfv4lwgC0ArnXiS_tfA-NnbRrZqo71YZXaA,481
70
+ hjson/tests/test_recursion.py,sha256=APQAtnchO6KZdduYqvEJDrBOmAKqyORwAX4ldbzAW7A,1694
71
+ hjson/tests/test_scanstring.py,sha256=U-5gUsaO7jEai6zfKMKADfBi-xFg8IHC1dBeElvTsn4,5953
72
+ hjson/tests/test_separators.py,sha256=krudtyZbx0k7OrZZ-jGCZH6yrmsj1seRkUblQLRq4Rw,945
73
+ hjson/tests/test_tool.py,sha256=PyDhrmMTpJSvBXvsWG8P-yrw1BT5kSp6OcSrMLvzmIw,2826
74
+ hjson/tests/test_tuple.py,sha256=FkOyw4s7WZ1HUGxtHYiwaBwhW2thlDB3JFSpUZCbI_I,1976
75
+ hjson/tests/test_unicode.py,sha256=5npz25rj4T3ZhXF_OsSmVveBUtTuz92DQ_cHDe-Jfrw,7099
76
+ hjson/tool.py,sha256=PVlr-lSDWfrGdkh5exWQo_21HGgHLR4OpZ1S41Dqjgo,1897
venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/License.txt ADDED
@@ -0,0 +1,1568 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ End User License Agreement
2
+ --------------------------
3
+
4
+
5
+ Preface
6
+ -------
7
+
8
+ The Software License Agreement in Chapter 1 and the Supplement
9
+ in Chapter 2 contain license terms and conditions that govern
10
+ the use of NVIDIA software. By accepting this agreement, you
11
+ agree to comply with all the terms and conditions applicable
12
+ to the product(s) included herein.
13
+
14
+
15
+ NVIDIA Driver
16
+
17
+
18
+ Description
19
+
20
+ This package contains the operating system driver and
21
+ fundamental system software components for NVIDIA GPUs.
22
+
23
+
24
+ NVIDIA CUDA Toolkit
25
+
26
+
27
+ Description
28
+
29
+ The NVIDIA CUDA Toolkit provides command-line and graphical
30
+ tools for building, debugging and optimizing the performance
31
+ of applications accelerated by NVIDIA GPUs, runtime and math
32
+ libraries, and documentation including programming guides,
33
+ user manuals, and API references.
34
+
35
+
36
+ Default Install Location of CUDA Toolkit
37
+
38
+ Windows platform:
39
+
40
+ %ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.#
41
+
42
+ Linux platform:
43
+
44
+ /usr/local/cuda-#.#
45
+
46
+ Mac platform:
47
+
48
+ /Developer/NVIDIA/CUDA-#.#
49
+
50
+
51
+ NVIDIA CUDA Samples
52
+
53
+
54
+ Description
55
+
56
+ This package includes over 100+ CUDA examples that demonstrate
57
+ various CUDA programming principles, and efficient CUDA
58
+ implementation of algorithms in specific application domains.
59
+
60
+
61
+ Default Install Location of CUDA Samples
62
+
63
+ Windows platform:
64
+
65
+ %ProgramData%\NVIDIA Corporation\CUDA Samples\v#.#
66
+
67
+ Linux platform:
68
+
69
+ /usr/local/cuda-#.#/samples
70
+
71
+ and
72
+
73
+ $HOME/NVIDIA_CUDA-#.#_Samples
74
+
75
+ Mac platform:
76
+
77
+ /Developer/NVIDIA/CUDA-#.#/samples
78
+
79
+
80
+ NVIDIA Nsight Visual Studio Edition (Windows only)
81
+
82
+
83
+ Description
84
+
85
+ NVIDIA Nsight Development Platform, Visual Studio Edition is a
86
+ development environment integrated into Microsoft Visual
87
+ Studio that provides tools for debugging, profiling, analyzing
88
+ and optimizing your GPU computing and graphics applications.
89
+
90
+
91
+ Default Install Location of Nsight Visual Studio Edition
92
+
93
+ Windows platform:
94
+
95
+ %ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.#
96
+
97
+
98
+ 1. License Agreement for NVIDIA Software Development Kits
99
+ ---------------------------------------------------------
100
+
101
+
102
+ Release Date: July 26, 2018
103
+ ---------------------------
104
+
105
+
106
+ Important NoticeRead before downloading, installing,
107
+ copying or using the licensed software:
108
+ -------------------------------------------------------
109
+
110
+ This license agreement, including exhibits attached
111
+ ("Agreement”) is a legal agreement between you and NVIDIA
112
+ Corporation ("NVIDIA") and governs your use of a NVIDIA
113
+ software development kit (“SDK”).
114
+
115
+ Each SDK has its own set of software and materials, but here
116
+ is a description of the types of items that may be included in
117
+ a SDK: source code, header files, APIs, data sets and assets
118
+ (examples include images, textures, models, scenes, videos,
119
+ native API input/output files), binary software, sample code,
120
+ libraries, utility programs, programming code and
121
+ documentation.
122
+
123
+ This Agreement can be accepted only by an adult of legal age
124
+ of majority in the country in which the SDK is used.
125
+
126
+ If you are entering into this Agreement on behalf of a company
127
+ or other legal entity, you represent that you have the legal
128
+ authority to bind the entity to this Agreement, in which case
129
+ “you” will mean the entity you represent.
130
+
131
+ If you don’t have the required age or authority to accept
132
+ this Agreement, or if you don’t accept all the terms and
133
+ conditions of this Agreement, do not download, install or use
134
+ the SDK.
135
+
136
+ You agree to use the SDK only for purposes that are permitted
137
+ by (a) this Agreement, and (b) any applicable law, regulation
138
+ or generally accepted practices or guidelines in the relevant
139
+ jurisdictions.
140
+
141
+
142
+ 1.1. License
143
+
144
+
145
+ 1.1.1. License Grant
146
+
147
+ Subject to the terms of this Agreement, NVIDIA hereby grants
148
+ you a non-exclusive, non-transferable license, without the
149
+ right to sublicense (except as expressly provided in this
150
+ Agreement) to:
151
+
152
+ 1. Install and use the SDK,
153
+
154
+ 2. Modify and create derivative works of sample source code
155
+ delivered in the SDK, and
156
+
157
+ 3. Distribute those portions of the SDK that are identified
158
+ in this Agreement as distributable, as incorporated in
159
+ object code format into a software application that meets
160
+ the distribution requirements indicated in this Agreement.
161
+
162
+
163
+ 1.1.2. Distribution Requirements
164
+
165
+ These are the distribution requirements for you to exercise
166
+ the distribution grant:
167
+
168
+ 1. Your application must have material additional
169
+ functionality, beyond the included portions of the SDK.
170
+
171
+ 2. The distributable portions of the SDK shall only be
172
+ accessed by your application.
173
+
174
+ 3. The following notice shall be included in modifications
175
+ and derivative works of sample source code distributed:
176
+ “This software contains source code provided by NVIDIA
177
+ Corporation.”
178
+
179
+ 4. Unless a developer tool is identified in this Agreement
180
+ as distributable, it is delivered for your internal use
181
+ only.
182
+
183
+ 5. The terms under which you distribute your application
184
+ must be consistent with the terms of this Agreement,
185
+ including (without limitation) terms relating to the
186
+ license grant and license restrictions and protection of
187
+ NVIDIA’s intellectual property rights. Additionally, you
188
+ agree that you will protect the privacy, security and
189
+ legal rights of your application users.
190
+
191
+ 6. You agree to notify NVIDIA in writing of any known or
192
+ suspected distribution or use of the SDK not in compliance
193
+ with the requirements of this Agreement, and to enforce
194
+ the terms of your agreements with respect to distributed
195
+ SDK.
196
+
197
+
198
+ 1.1.3. Authorized Users
199
+
200
+ You may allow employees and contractors of your entity or of
201
+ your subsidiary(ies) to access and use the SDK from your
202
+ secure network to perform work on your behalf.
203
+
204
+ If you are an academic institution you may allow users
205
+ enrolled or employed by the academic institution to access and
206
+ use the SDK from your secure network.
207
+
208
+ You are responsible for the compliance with the terms of this
209
+ Agreement by your authorized users. If you become aware that
210
+ your authorized users didn’t follow the terms of this
211
+ Agreement, you agree to take reasonable steps to resolve the
212
+ non-compliance and prevent new occurrences.
213
+
214
+
215
+ 1.1.4. Pre-Release SDK
216
+
217
+ The SDK versions identified as alpha, beta, preview or
218
+ otherwise as pre-release, may not be fully functional, may
219
+ contain errors or design flaws, and may have reduced or
220
+ different security, privacy, accessibility, availability, and
221
+ reliability standards relative to commercial versions of
222
+ NVIDIA software and materials. Use of a pre-release SDK may
223
+ result in unexpected results, loss of data, project delays or
224
+ other unpredictable damage or loss.
225
+
226
+ You may use a pre-release SDK at your own risk, understanding
227
+ that pre-release SDKs are not intended for use in production
228
+ or business-critical systems.
229
+
230
+ NVIDIA may choose not to make available a commercial version
231
+ of any pre-release SDK. NVIDIA may also choose to abandon
232
+ development and terminate the availability of a pre-release
233
+ SDK at any time without liability.
234
+
235
+
236
+ 1.1.5. Updates
237
+
238
+ NVIDIA may, at its option, make available patches, workarounds
239
+ or other updates to this SDK. Unless the updates are provided
240
+ with their separate governing terms, they are deemed part of
241
+ the SDK licensed to you as provided in this Agreement. You
242
+ agree that the form and content of the SDK that NVIDIA
243
+ provides may change without prior notice to you. While NVIDIA
244
+ generally maintains compatibility between versions, NVIDIA may
245
+ in some cases make changes that introduce incompatibilities in
246
+ future versions of the SDK.
247
+
248
+
249
+ 1.1.6. Third Party Licenses
250
+
251
+ The SDK may come bundled with, or otherwise include or be
252
+ distributed with, third party software licensed by a NVIDIA
253
+ supplier and/or open source software provided under an open
254
+ source license. Use of third party software is subject to the
255
+ third-party license terms, or in the absence of third party
256
+ terms, the terms of this Agreement. Copyright to third party
257
+ software is held by the copyright holders indicated in the
258
+ third-party software or license.
259
+
260
+
261
+ 1.1.7. Reservation of Rights
262
+
263
+ NVIDIA reserves all rights, title, and interest in and to the
264
+ SDK, not expressly granted to you under this Agreement.
265
+
266
+
267
+ 1.2. Limitations
268
+
269
+ The following license limitations apply to your use of the
270
+ SDK:
271
+
272
+ 1. You may not reverse engineer, decompile or disassemble,
273
+ or remove copyright or other proprietary notices from any
274
+ portion of the SDK or copies of the SDK.
275
+
276
+ 2. Except as expressly provided in this Agreement, you may
277
+ not copy, sell, rent, sublicense, transfer, distribute,
278
+ modify, or create derivative works of any portion of the
279
+ SDK. For clarity, you may not distribute or sublicense the
280
+ SDK as a stand-alone product.
281
+
282
+ 3. Unless you have an agreement with NVIDIA for this
283
+ purpose, you may not indicate that an application created
284
+ with the SDK is sponsored or endorsed by NVIDIA.
285
+
286
+ 4. You may not bypass, disable, or circumvent any
287
+ encryption, security, digital rights management or
288
+ authentication mechanism in the SDK.
289
+
290
+ 5. You may not use the SDK in any manner that would cause it
291
+ to become subject to an open source software license. As
292
+ examples, licenses that require as a condition of use,
293
+ modification, and/or distribution that the SDK be:
294
+
295
+ a. Disclosed or distributed in source code form;
296
+
297
+ b. Licensed for the purpose of making derivative works;
298
+ or
299
+
300
+ c. Redistributable at no charge.
301
+
302
+ 6. Unless you have an agreement with NVIDIA for this
303
+ purpose, you may not use the SDK with any system or
304
+ application where the use or failure of the system or
305
+ application can reasonably be expected to threaten or
306
+ result in personal injury, death, or catastrophic loss.
307
+ Examples include use in avionics, navigation, military,
308
+ medical, life support or other life critical applications.
309
+ NVIDIA does not design, test or manufacture the SDK for
310
+ these critical uses and NVIDIA shall not be liable to you
311
+ or any third party, in whole or in part, for any claims or
312
+ damages arising from such uses.
313
+
314
+ 7. You agree to defend, indemnify and hold harmless NVIDIA
315
+ and its affiliates, and their respective employees,
316
+ contractors, agents, officers and directors, from and
317
+ against any and all claims, damages, obligations, losses,
318
+ liabilities, costs or debt, fines, restitutions and
319
+ expenses (including but not limited to attorney’s fees
320
+ and costs incident to establishing the right of
321
+ indemnification) arising out of or related to your use of
322
+ the SDK outside of the scope of this Agreement, or not in
323
+ compliance with its terms.
324
+
325
+
326
+ 1.3. Ownership
327
+
328
+ 1. NVIDIA or its licensors hold all rights, title and
329
+ interest in and to the SDK and its modifications and
330
+ derivative works, including their respective intellectual
331
+ property rights, subject to your rights described in this
332
+ section. This SDK may include software and materials from
333
+ NVIDIA’s licensors, and these licensors are intended
334
+ third party beneficiaries that may enforce this Agreement
335
+ with respect to their intellectual property rights.
336
+
337
+ 2. You hold all rights, title and interest in and to your
338
+ applications and your derivative works of the sample
339
+ source code delivered in the SDK, including their
340
+ respective intellectual property rights, subject to
341
+ NVIDIA’s rights described in this section.
342
+
343
+ 3. You may, but don’t have to, provide to NVIDIA
344
+ suggestions, feature requests or other feedback regarding
345
+ the SDK, including possible enhancements or modifications
346
+ to the SDK. For any feedback that you voluntarily provide,
347
+ you hereby grant NVIDIA and its affiliates a perpetual,
348
+ non-exclusive, worldwide, irrevocable license to use,
349
+ reproduce, modify, license, sublicense (through multiple
350
+ tiers of sublicensees), and distribute (through multiple
351
+ tiers of distributors) it without the payment of any
352
+ royalties or fees to you. NVIDIA will use feedback at its
353
+ choice. NVIDIA is constantly looking for ways to improve
354
+ its products, so you may send feedback to NVIDIA through
355
+ the developer portal at https://developer.nvidia.com.
356
+
357
+
358
+ 1.4. No Warranties
359
+
360
+ THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL
361
+ FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND
362
+ ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND
363
+ OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING,
364
+ BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS
365
+ FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE
366
+ ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO
367
+ WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF
368
+ DEALING OR COURSE OF TRADE.
369
+
370
+
371
+ 1.5. Limitation of Liability
372
+
373
+ TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS
374
+ AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
375
+ PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS
376
+ OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF
377
+ PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION
378
+ WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK,
379
+ WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH
380
+ OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE),
381
+ PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF
382
+ LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES
383
+ TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS
384
+ AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE
385
+ NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS
386
+ LIMIT.
387
+
388
+ These exclusions and limitations of liability shall apply
389
+ regardless if NVIDIA or its affiliates have been advised of
390
+ the possibility of such damages, and regardless of whether a
391
+ remedy fails its essential purpose. These exclusions and
392
+ limitations of liability form an essential basis of the
393
+ bargain between the parties, and, absent any of these
394
+ exclusions or limitations of liability, the provisions of this
395
+ Agreement, including, without limitation, the economic terms,
396
+ would be substantially different.
397
+
398
+
399
+ 1.6. Termination
400
+
401
+ 1. This Agreement will continue to apply until terminated by
402
+ either you or NVIDIA as described below.
403
+
404
+ 2. If you want to terminate this Agreement, you may do so by
405
+ stopping to use the SDK.
406
+
407
+ 3. NVIDIA may, at any time, terminate this Agreement if:
408
+
409
+ a. (i) you fail to comply with any term of this
410
+ Agreement and the non-compliance is not fixed within
411
+ thirty (30) days following notice from NVIDIA (or
412
+ immediately if you violate NVIDIA’s intellectual
413
+ property rights);
414
+
415
+ b. (ii) you commence or participate in any legal
416
+ proceeding against NVIDIA with respect to the SDK; or
417
+
418
+ c. (iii) NVIDIA decides to no longer provide the SDK in
419
+ a country or, in NVIDIA’s sole discretion, the
420
+ continued use of it is no longer commercially viable.
421
+
422
+ 4. Upon any termination of this Agreement, you agree to
423
+ promptly discontinue use of the SDK and destroy all copies
424
+ in your possession or control. Your prior distributions in
425
+ accordance with this Agreement are not affected by the
426
+ termination of this Agreement. Upon written request, you
427
+ will certify in writing that you have complied with your
428
+ commitments under this section. Upon any termination of
429
+ this Agreement all provisions survive except for the
430
+ license grant provisions.
431
+
432
+
433
+ 1.7. General
434
+
435
+ If you wish to assign this Agreement or your rights and
436
+ obligations, including by merger, consolidation, dissolution
437
+ or operation of law, contact NVIDIA to ask for permission. Any
438
+ attempted assignment not approved by NVIDIA in writing shall
439
+ be void and of no effect. NVIDIA may assign, delegate or
440
+ transfer this Agreement and its rights and obligations, and if
441
+ to a non-affiliate you will be notified.
442
+
443
+ You agree to cooperate with NVIDIA and provide reasonably
444
+ requested information to verify your compliance with this
445
+ Agreement.
446
+
447
+ This Agreement will be governed in all respects by the laws of
448
+ the United States and of the State of Delaware as those laws
449
+ are applied to contracts entered into and performed entirely
450
+ within Delaware by Delaware residents, without regard to the
451
+ conflicts of laws principles. The United Nations Convention on
452
+ Contracts for the International Sale of Goods is specifically
453
+ disclaimed. You agree to all terms of this Agreement in the
454
+ English language.
455
+
456
+ The state or federal courts residing in Santa Clara County,
457
+ California shall have exclusive jurisdiction over any dispute
458
+ or claim arising out of this Agreement. Notwithstanding this,
459
+ you agree that NVIDIA shall still be allowed to apply for
460
+ injunctive remedies or an equivalent type of urgent legal
461
+ relief in any jurisdiction.
462
+
463
+ If any court of competent jurisdiction determines that any
464
+ provision of this Agreement is illegal, invalid or
465
+ unenforceable, such provision will be construed as limited to
466
+ the extent necessary to be consistent with and fully
467
+ enforceable under the law and the remaining provisions will
468
+ remain in full force and effect. Unless otherwise specified,
469
+ remedies are cumulative.
470
+
471
+ Each party acknowledges and agrees that the other is an
472
+ independent contractor in the performance of this Agreement.
473
+
474
+ The SDK has been developed entirely at private expense and is
475
+ “commercial items” consisting of “commercial computer
476
+ software” and “commercial computer software
477
+ documentation” provided with RESTRICTED RIGHTS. Use,
478
+ duplication or disclosure by the U.S. Government or a U.S.
479
+ Government subcontractor is subject to the restrictions in
480
+ this Agreement pursuant to DFARS 227.7202-3(a) or as set forth
481
+ in subparagraphs (c)(1) and (2) of the Commercial Computer
482
+ Software - Restricted Rights clause at FAR 52.227-19, as
483
+ applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas
484
+ Expressway, Santa Clara, CA 95051.
485
+
486
+ The SDK is subject to United States export laws and
487
+ regulations. You agree that you will not ship, transfer or
488
+ export the SDK into any country, or use the SDK in any manner,
489
+ prohibited by the United States Bureau of Industry and
490
+ Security or economic sanctions regulations administered by the
491
+ U.S. Department of Treasury’s Office of Foreign Assets
492
+ Control (OFAC), or any applicable export laws, restrictions or
493
+ regulations. These laws include restrictions on destinations,
494
+ end users and end use. By accepting this Agreement, you
495
+ confirm that you are not a resident or citizen of any country
496
+ currently embargoed by the U.S. and that you are not otherwise
497
+ prohibited from receiving the SDK.
498
+
499
+ Any notice delivered by NVIDIA to you under this Agreement
500
+ will be delivered via mail, email or fax. You agree that any
501
+ notices that NVIDIA sends you electronically will satisfy any
502
+ legal communication requirements. Please direct your legal
503
+ notices or other correspondence to NVIDIA Corporation, 2788
504
+ San Tomas Expressway, Santa Clara, California 95051, United
505
+ States of America, Attention: Legal Department.
506
+
507
+ This Agreement and any exhibits incorporated into this
508
+ Agreement constitute the entire agreement of the parties with
509
+ respect to the subject matter of this Agreement and supersede
510
+ all prior negotiations or documentation exchanged between the
511
+ parties relating to this SDK license. Any additional and/or
512
+ conflicting terms on documents issued by you are null, void,
513
+ and invalid. Any amendment or waiver under this Agreement
514
+ shall be in writing and signed by representatives of both
515
+ parties.
516
+
517
+
518
+ 2. CUDA Toolkit Supplement to Software License Agreement for
519
+ NVIDIA Software Development Kits
520
+ ------------------------------------------------------------
521
+
522
+
523
+ Release date: August 16, 2018
524
+ -----------------------------
525
+
526
+ The terms in this supplement govern your use of the NVIDIA
527
+ CUDA Toolkit SDK under the terms of your license agreement
528
+ (“Agreement”) as modified by this supplement. Capitalized
529
+ terms used but not defined below have the meaning assigned to
530
+ them in the Agreement.
531
+
532
+ This supplement is an exhibit to the Agreement and is
533
+ incorporated as an integral part of the Agreement. In the
534
+ event of conflict between the terms in this supplement and the
535
+ terms in the Agreement, the terms in this supplement govern.
536
+
537
+
538
+ 2.1. License Scope
539
+
540
+ The SDK is licensed for you to develop applications only for
541
+ use in systems with NVIDIA GPUs.
542
+
543
+
544
+ 2.2. Distribution
545
+
546
+ The portions of the SDK that are distributable under the
547
+ Agreement are listed in Attachment A.
548
+
549
+
550
+ 2.3. Operating Systems
551
+
552
+ Those portions of the SDK designed exclusively for use on the
553
+ Linux or FreeBSD operating systems, or other operating systems
554
+ derived from the source code to these operating systems, may
555
+ be copied and redistributed for use in accordance with this
556
+ Agreement, provided that the object code files are not
557
+ modified in any way (except for unzipping of compressed
558
+ files).
559
+
560
+
561
+ 2.4. Audio and Video Encoders and Decoders
562
+
563
+ You acknowledge and agree that it is your sole responsibility
564
+ to obtain any additional third-party licenses required to
565
+ make, have made, use, have used, sell, import, and offer for
566
+ sale your products or services that include or incorporate any
567
+ third-party software and content relating to audio and/or
568
+ video encoders and decoders from, including but not limited
569
+ to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A.,
570
+ MPEG-LA, and Coding Technologies. NVIDIA does not grant to you
571
+ under this Agreement any necessary patent or other rights with
572
+ respect to any audio and/or video encoders and decoders.
573
+
574
+
575
+ 2.5. Licensing
576
+
577
+ If the distribution terms in this Agreement are not suitable
578
+ for your organization, or for any questions regarding this
579
+ Agreement, please contact NVIDIA at
580
581
+
582
+
583
+ 2.6. Attachment A
584
+
585
+ The following portions of the SDK are distributable under the
586
+ Agreement:
587
+
588
+ Component
589
+
590
+ CUDA Runtime
591
+
592
+ Windows
593
+
594
+ cudart.dll, cudart_static.lib, cudadevrt.lib
595
+
596
+ Mac OSX
597
+
598
+ libcudart.dylib, libcudart_static.a, libcudadevrt.a
599
+
600
+ Linux
601
+
602
+ libcudart.so, libcudart_static.a, libcudadevrt.a
603
+
604
+ Android
605
+
606
+ libcudart.so, libcudart_static.a, libcudadevrt.a
607
+
608
+ Component
609
+
610
+ CUDA FFT Library
611
+
612
+ Windows
613
+
614
+ cufft.dll, cufftw.dll, cufft.lib, cufftw.lib
615
+
616
+ Mac OSX
617
+
618
+ libcufft.dylib, libcufft_static.a, libcufftw.dylib,
619
+ libcufftw_static.a
620
+
621
+ Linux
622
+
623
+ libcufft.so, libcufft_static.a, libcufftw.so,
624
+ libcufftw_static.a
625
+
626
+ Android
627
+
628
+ libcufft.so, libcufft_static.a, libcufftw.so,
629
+ libcufftw_static.a
630
+
631
+ Component
632
+
633
+ CUDA BLAS Library
634
+
635
+ Windows
636
+
637
+ cublas.dll, cublasLt.dll
638
+
639
+ Mac OSX
640
+
641
+ libcublas.dylib, libcublasLt.dylib, libcublas_static.a,
642
+ libcublasLt_static.a
643
+
644
+ Linux
645
+
646
+ libcublas.so, libcublasLt.so, libcublas_static.a,
647
+ libcublasLt_static.a
648
+
649
+ Android
650
+
651
+ libcublas.so, libcublasLt.so, libcublas_static.a,
652
+ libcublasLt_static.a
653
+
654
+ Component
655
+
656
+ NVIDIA "Drop-in" BLAS Library
657
+
658
+ Windows
659
+
660
+ nvblas.dll
661
+
662
+ Mac OSX
663
+
664
+ libnvblas.dylib
665
+
666
+ Linux
667
+
668
+ libnvblas.so
669
+
670
+ Component
671
+
672
+ CUDA Sparse Matrix Library
673
+
674
+ Windows
675
+
676
+ cusparse.dll, cusparse.lib
677
+
678
+ Mac OSX
679
+
680
+ libcusparse.dylib, libcusparse_static.a
681
+
682
+ Linux
683
+
684
+ libcusparse.so, libcusparse_static.a
685
+
686
+ Android
687
+
688
+ libcusparse.so, libcusparse_static.a
689
+
690
+ Component
691
+
692
+ CUDA Linear Solver Library
693
+
694
+ Windows
695
+
696
+ cusolver.dll, cusolver.lib
697
+
698
+ Mac OSX
699
+
700
+ libcusolver.dylib, libcusolver_static.a
701
+
702
+ Linux
703
+
704
+ libcusolver.so, libcusolver_static.a
705
+
706
+ Android
707
+
708
+ libcusolver.so, libcusolver_static.a
709
+
710
+ Component
711
+
712
+ CUDA Random Number Generation Library
713
+
714
+ Windows
715
+
716
+ curand.dll, curand.lib
717
+
718
+ Mac OSX
719
+
720
+ libcurand.dylib, libcurand_static.a
721
+
722
+ Linux
723
+
724
+ libcurand.so, libcurand_static.a
725
+
726
+ Android
727
+
728
+ libcurand.so, libcurand_static.a
729
+
730
+ Component
731
+
732
+ CUDA Accelerated Graph Library
733
+
734
+ Component
735
+
736
+ NVIDIA Performance Primitives Library
737
+
738
+ Windows
739
+
740
+ nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll,
741
+ nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll,
742
+ nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib,
743
+ nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll,
744
+ nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib
745
+
746
+ Mac OSX
747
+
748
+ libnppc.dylib, libnppc_static.a, libnppial.dylib,
749
+ libnppial_static.a, libnppicc.dylib, libnppicc_static.a,
750
+ libnppicom.dylib, libnppicom_static.a, libnppidei.dylib,
751
+ libnppidei_static.a, libnppif.dylib, libnppif_static.a,
752
+ libnppig.dylib, libnppig_static.a, libnppim.dylib,
753
+ libnppisu_static.a, libnppitc.dylib, libnppitc_static.a,
754
+ libnpps.dylib, libnpps_static.a
755
+
756
+ Linux
757
+
758
+ libnppc.so, libnppc_static.a, libnppial.so,
759
+ libnppial_static.a, libnppicc.so, libnppicc_static.a,
760
+ libnppicom.so, libnppicom_static.a, libnppidei.so,
761
+ libnppidei_static.a, libnppif.so, libnppif_static.a
762
+ libnppig.so, libnppig_static.a, libnppim.so,
763
+ libnppim_static.a, libnppist.so, libnppist_static.a,
764
+ libnppisu.so, libnppisu_static.a, libnppitc.so
765
+ libnppitc_static.a, libnpps.so, libnpps_static.a
766
+
767
+ Android
768
+
769
+ libnppc.so, libnppc_static.a, libnppial.so,
770
+ libnppial_static.a, libnppicc.so, libnppicc_static.a,
771
+ libnppicom.so, libnppicom_static.a, libnppidei.so,
772
+ libnppidei_static.a, libnppif.so, libnppif_static.a
773
+ libnppig.so, libnppig_static.a, libnppim.so,
774
+ libnppim_static.a, libnppist.so, libnppist_static.a,
775
+ libnppisu.so, libnppisu_static.a, libnppitc.so
776
+ libnppitc_static.a, libnpps.so, libnpps_static.a
777
+
778
+ Component
779
+
780
+ NVIDIA JPEG Library
781
+
782
+ Linux
783
+
784
+ libnvjpeg.so, libnvjpeg_static.a
785
+
786
+ Component
787
+
788
+ Internal common library required for statically linking to
789
+ cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP
790
+
791
+ Mac OSX
792
+
793
+ libculibos.a
794
+
795
+ Linux
796
+
797
+ libculibos.a
798
+
799
+ Component
800
+
801
+ NVIDIA Runtime Compilation Library and Header
802
+
803
+ All
804
+
805
+ nvrtc.h
806
+
807
+ Windows
808
+
809
+ nvrtc.dll, nvrtc-builtins.dll
810
+
811
+ Mac OSX
812
+
813
+ libnvrtc.dylib, libnvrtc-builtins.dylib
814
+
815
+ Linux
816
+
817
+ libnvrtc.so, libnvrtc-builtins.so
818
+
819
+ Component
820
+
821
+ NVIDIA Optimizing Compiler Library
822
+
823
+ Windows
824
+
825
+ nvvm.dll
826
+
827
+ Mac OSX
828
+
829
+ libnvvm.dylib
830
+
831
+ Linux
832
+
833
+ libnvvm.so
834
+
835
+ Component
836
+
837
+ NVIDIA Common Device Math Functions Library
838
+
839
+ Windows
840
+
841
+ libdevice.10.bc
842
+
843
+ Mac OSX
844
+
845
+ libdevice.10.bc
846
+
847
+ Linux
848
+
849
+ libdevice.10.bc
850
+
851
+ Component
852
+
853
+ CUDA Occupancy Calculation Header Library
854
+
855
+ All
856
+
857
+ cuda_occupancy.h
858
+
859
+ Component
860
+
861
+ CUDA Half Precision Headers
862
+
863
+ All
864
+
865
+ cuda_fp16.h, cuda_fp16.hpp
866
+
867
+ Component
868
+
869
+ CUDA Profiling Tools Interface (CUPTI) Library
870
+
871
+ Windows
872
+
873
+ cupti.dll
874
+
875
+ Mac OSX
876
+
877
+ libcupti.dylib
878
+
879
+ Linux
880
+
881
+ libcupti.so
882
+
883
+ Component
884
+
885
+ NVIDIA Tools Extension Library
886
+
887
+ Windows
888
+
889
+ nvToolsExt.dll, nvToolsExt.lib
890
+
891
+ Mac OSX
892
+
893
+ libnvToolsExt.dylib
894
+
895
+ Linux
896
+
897
+ libnvToolsExt.so
898
+
899
+ Component
900
+
901
+ NVIDIA CUDA Driver Libraries
902
+
903
+ Linux
904
+
905
+ libcuda.so, libnvidia-fatbinaryloader.so,
906
+ libnvidia-ptxjitcompiler.so
907
+
908
+ The NVIDIA CUDA Driver Libraries are only distributable in
909
+ applications that meet this criteria:
910
+
911
+ 1. The application was developed starting from a NVIDIA CUDA
912
+ container obtained from Docker Hub or the NVIDIA GPU
913
+ Cloud, and
914
+
915
+ 2. The resulting application is packaged as a Docker
916
+ container and distributed to users on Docker Hub or the
917
+ NVIDIA GPU Cloud only.
918
+
919
+
920
+ 2.7. Attachment B
921
+
922
+
923
+ Additional Licensing Obligations
924
+
925
+ The following third party components included in the SOFTWARE
926
+ are licensed to Licensee pursuant to the following terms and
927
+ conditions:
928
+
929
+ 1. Licensee's use of the GDB third party component is
930
+ subject to the terms and conditions of GNU GPL v3:
931
+
932
+ This product includes copyrighted third-party software licensed
933
+ under the terms of the GNU General Public License v3 ("GPL v3").
934
+ All third-party software packages are copyright by their respective
935
+ authors. GPL v3 terms and conditions are hereby incorporated into
936
+ the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt
937
+
938
+ Consistent with these licensing requirements, the software
939
+ listed below is provided under the terms of the specified
940
+ open source software licenses. To obtain source code for
941
+ software provided under licenses that require
942
+ redistribution of source code, including the GNU General
943
+ Public License (GPL) and GNU Lesser General Public License
944
+ (LGPL), contact [email protected]. This offer is
945
+ valid for a period of three (3) years from the date of the
946
+ distribution of this product by NVIDIA CORPORATION.
947
+
948
+ Component License
949
+ CUDA-GDB GPL v3
950
+
951
+ 2. Licensee represents and warrants that any and all third
952
+ party licensing and/or royalty payment obligations in
953
+ connection with Licensee's use of the H.264 video codecs
954
+ are solely the responsibility of Licensee.
955
+
956
+ 3. Licensee's use of the Thrust library is subject to the
957
+ terms and conditions of the Apache License Version 2.0.
958
+ All third-party software packages are copyright by their
959
+ respective authors. Apache License Version 2.0 terms and
960
+ conditions are hereby incorporated into the Agreement by
961
+ this reference.
962
+ http://www.apache.org/licenses/LICENSE-2.0.html
963
+
964
+ In addition, Licensee acknowledges the following notice:
965
+ Thrust includes source code from the Boost Iterator,
966
+ Tuple, System, and Random Number libraries.
967
+
968
+ Boost Software License - Version 1.0 - August 17th, 2003
969
+ . . . .
970
+
971
+ Permission is hereby granted, free of charge, to any person or
972
+ organization obtaining a copy of the software and accompanying
973
+ documentation covered by this license (the "Software") to use,
974
+ reproduce, display, distribute, execute, and transmit the Software,
975
+ and to prepare derivative works of the Software, and to permit
976
+ third-parties to whom the Software is furnished to do so, all
977
+ subject to the following:
978
+
979
+ The copyright notices in the Software and this entire statement,
980
+ including the above license grant, this restriction and the following
981
+ disclaimer, must be included in all copies of the Software, in whole
982
+ or in part, and all derivative works of the Software, unless such
983
+ copies or derivative works are solely in the form of machine-executable
984
+ object code generated by a source language processor.
985
+
986
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
987
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
988
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
989
+ NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
990
+ ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
991
+ OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
992
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
993
+ OTHER DEALINGS IN THE SOFTWARE.
994
+
995
+ 4. Licensee's use of the LLVM third party component is
996
+ subject to the following terms and conditions:
997
+
998
+ ======================================================
999
+ LLVM Release License
1000
+ ======================================================
1001
+ University of Illinois/NCSA
1002
+ Open Source License
1003
+
1004
+ Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign.
1005
+ All rights reserved.
1006
+
1007
+ Developed by:
1008
+
1009
+ LLVM Team
1010
+
1011
+ University of Illinois at Urbana-Champaign
1012
+
1013
+ http://llvm.org
1014
+
1015
+ Permission is hereby granted, free of charge, to any person obtaining a copy
1016
+ of this software and associated documentation files (the "Software"), to
1017
+ deal with the Software without restriction, including without limitation the
1018
+ rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
1019
+ sell copies of the Software, and to permit persons to whom the Software is
1020
+ furnished to do so, subject to the following conditions:
1021
+
1022
+ * Redistributions of source code must retain the above copyright notice,
1023
+ this list of conditions and the following disclaimers.
1024
+
1025
+ * Redistributions in binary form must reproduce the above copyright
1026
+ notice, this list of conditions and the following disclaimers in the
1027
+ documentation and/or other materials provided with the distribution.
1028
+
1029
+ * Neither the names of the LLVM Team, University of Illinois at Urbana-
1030
+ Champaign, nor the names of its contributors may be used to endorse or
1031
+ promote products derived from this Software without specific prior
1032
+ written permission.
1033
+
1034
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
1035
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
1036
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
1037
+ THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
1038
+ OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
1039
+ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
1040
+ DEALINGS WITH THE SOFTWARE.
1041
+
1042
+ 5. Licensee's use (e.g. nvprof) of the PCRE third party
1043
+ component is subject to the following terms and
1044
+ conditions:
1045
+
1046
+ ------------
1047
+ PCRE LICENCE
1048
+ ------------
1049
+ PCRE is a library of functions to support regular expressions whose syntax
1050
+ and semantics are as close as possible to those of the Perl 5 language.
1051
+ Release 8 of PCRE is distributed under the terms of the "BSD" licence, as
1052
+ specified below. The documentation for PCRE, supplied in the "doc"
1053
+ directory, is distributed under the same terms as the software itself. The
1054
+ basic library functions are written in C and are freestanding. Also
1055
+ included in the distribution is a set of C++ wrapper functions, and a just-
1056
+ in-time compiler that can be used to optimize pattern matching. These are
1057
+ both optional features that can be omitted when the library is built.
1058
+
1059
+ THE BASIC LIBRARY FUNCTIONS
1060
+ ---------------------------
1061
+ Written by: Philip Hazel
1062
+ Email local part: ph10
1063
+ Email domain: cam.ac.uk
1064
+ University of Cambridge Computing Service,
1065
+ Cambridge, England.
1066
+ Copyright (c) 1997-2012 University of Cambridge
1067
+ All rights reserved.
1068
+
1069
+ PCRE JUST-IN-TIME COMPILATION SUPPORT
1070
+ -------------------------------------
1071
+ Written by: Zoltan Herczeg
1072
+ Email local part: hzmester
1073
+ Emain domain: freemail.hu
1074
+ Copyright(c) 2010-2012 Zoltan Herczeg
1075
+ All rights reserved.
1076
+
1077
+ STACK-LESS JUST-IN-TIME COMPILER
1078
+ --------------------------------
1079
+ Written by: Zoltan Herczeg
1080
+ Email local part: hzmester
1081
+ Emain domain: freemail.hu
1082
+ Copyright(c) 2009-2012 Zoltan Herczeg
1083
+ All rights reserved.
1084
+
1085
+ THE C++ WRAPPER FUNCTIONS
1086
+ -------------------------
1087
+ Contributed by: Google Inc.
1088
+ Copyright (c) 2007-2012, Google Inc.
1089
+ All rights reserved.
1090
+
1091
+ THE "BSD" LICENCE
1092
+ -----------------
1093
+ Redistribution and use in source and binary forms, with or without
1094
+ modification, are permitted provided that the following conditions are met:
1095
+
1096
+ * Redistributions of source code must retain the above copyright notice,
1097
+ this list of conditions and the following disclaimer.
1098
+
1099
+ * Redistributions in binary form must reproduce the above copyright
1100
+ notice, this list of conditions and the following disclaimer in the
1101
+ documentation and/or other materials provided with the distribution.
1102
+
1103
+ * Neither the name of the University of Cambridge nor the name of Google
1104
+ Inc. nor the names of their contributors may be used to endorse or
1105
+ promote products derived from this software without specific prior
1106
+ written permission.
1107
+
1108
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
1109
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
1110
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
1111
+ ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
1112
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
1113
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
1114
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
1115
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
1116
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
1117
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
1118
+ POSSIBILITY OF SUCH DAMAGE.
1119
+
1120
+ 6. Some of the cuBLAS library routines were written by or
1121
+ derived from code written by Vasily Volkov and are subject
1122
+ to the Modified Berkeley Software Distribution License as
1123
+ follows:
1124
+
1125
+ Copyright (c) 2007-2009, Regents of the University of California
1126
+
1127
+ All rights reserved.
1128
+
1129
+ Redistribution and use in source and binary forms, with or without
1130
+ modification, are permitted provided that the following conditions are
1131
+ met:
1132
+ * Redistributions of source code must retain the above copyright
1133
+ notice, this list of conditions and the following disclaimer.
1134
+ * Redistributions in binary form must reproduce the above
1135
+ copyright notice, this list of conditions and the following
1136
+ disclaimer in the documentation and/or other materials provided
1137
+ with the distribution.
1138
+ * Neither the name of the University of California, Berkeley nor
1139
+ the names of its contributors may be used to endorse or promote
1140
+ products derived from this software without specific prior
1141
+ written permission.
1142
+
1143
+ THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
1144
+ IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
1145
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
1146
+ DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
1147
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
1148
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
1149
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
1150
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
1151
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
1152
+ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
1153
+ POSSIBILITY OF SUCH DAMAGE.
1154
+
1155
+ 7. Some of the cuBLAS library routines were written by or
1156
+ derived from code written by Davide Barbieri and are
1157
+ subject to the Modified Berkeley Software Distribution
1158
+ License as follows:
1159
+
1160
+ Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata.
1161
+
1162
+ All rights reserved.
1163
+
1164
+ Redistribution and use in source and binary forms, with or without
1165
+ modification, are permitted provided that the following conditions are
1166
+ met:
1167
+ * Redistributions of source code must retain the above copyright
1168
+ notice, this list of conditions and the following disclaimer.
1169
+ * Redistributions in binary form must reproduce the above
1170
+ copyright notice, this list of conditions and the following
1171
+ disclaimer in the documentation and/or other materials provided
1172
+ with the distribution.
1173
+ * The name of the author may not be used to endorse or promote
1174
+ products derived from this software without specific prior
1175
+ written permission.
1176
+
1177
+ THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
1178
+ IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
1179
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
1180
+ DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
1181
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
1182
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
1183
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
1184
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
1185
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
1186
+ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
1187
+ POSSIBILITY OF SUCH DAMAGE.
1188
+
1189
+ 8. Some of the cuBLAS library routines were derived from
1190
+ code developed by the University of Tennessee and are
1191
+ subject to the Modified Berkeley Software Distribution
1192
+ License as follows:
1193
+
1194
+ Copyright (c) 2010 The University of Tennessee.
1195
+
1196
+ All rights reserved.
1197
+
1198
+ Redistribution and use in source and binary forms, with or without
1199
+ modification, are permitted provided that the following conditions are
1200
+ met:
1201
+ * Redistributions of source code must retain the above copyright
1202
+ notice, this list of conditions and the following disclaimer.
1203
+ * Redistributions in binary form must reproduce the above
1204
+ copyright notice, this list of conditions and the following
1205
+ disclaimer listed in this license in the documentation and/or
1206
+ other materials provided with the distribution.
1207
+ * Neither the name of the copyright holders nor the names of its
1208
+ contributors may be used to endorse or promote products derived
1209
+ from this software without specific prior written permission.
1210
+
1211
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1212
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1213
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1214
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1215
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1216
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1217
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1218
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1219
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1220
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1221
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1222
+
1223
+ 9. Some of the cuBLAS library routines were written by or
1224
+ derived from code written by Jonathan Hogg and are subject
1225
+ to the Modified Berkeley Software Distribution License as
1226
+ follows:
1227
+
1228
+ Copyright (c) 2012, The Science and Technology Facilities Council (STFC).
1229
+
1230
+ All rights reserved.
1231
+
1232
+ Redistribution and use in source and binary forms, with or without
1233
+ modification, are permitted provided that the following conditions are
1234
+ met:
1235
+ * Redistributions of source code must retain the above copyright
1236
+ notice, this list of conditions and the following disclaimer.
1237
+ * Redistributions in binary form must reproduce the above
1238
+ copyright notice, this list of conditions and the following
1239
+ disclaimer in the documentation and/or other materials provided
1240
+ with the distribution.
1241
+ * Neither the name of the STFC nor the names of its contributors
1242
+ may be used to endorse or promote products derived from this
1243
+ software without specific prior written permission.
1244
+
1245
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1246
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1247
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1248
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE
1249
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
1250
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
1251
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
1252
+ BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
1253
+ WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
1254
+ OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
1255
+ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1256
+
1257
+ 10. Some of the cuBLAS library routines were written by or
1258
+ derived from code written by Ahmad M. Abdelfattah, David
1259
+ Keyes, and Hatem Ltaief, and are subject to the Apache
1260
+ License, Version 2.0, as follows:
1261
+
1262
+ -- (C) Copyright 2013 King Abdullah University of Science and Technology
1263
+ Authors:
1264
+ Ahmad Abdelfattah ([email protected])
1265
+ David Keyes ([email protected])
1266
+ Hatem Ltaief ([email protected])
1267
+
1268
+ Redistribution and use in source and binary forms, with or without
1269
+ modification, are permitted provided that the following conditions
1270
+ are met:
1271
+
1272
+ * Redistributions of source code must retain the above copyright
1273
+ notice, this list of conditions and the following disclaimer.
1274
+ * Redistributions in binary form must reproduce the above copyright
1275
+ notice, this list of conditions and the following disclaimer in the
1276
+ documentation and/or other materials provided with the distribution.
1277
+ * Neither the name of the King Abdullah University of Science and
1278
+ Technology nor the names of its contributors may be used to endorse
1279
+ or promote products derived from this software without specific prior
1280
+ written permission.
1281
+
1282
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1283
+ ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1284
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1285
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1286
+ HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1287
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1288
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1289
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1290
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1291
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1292
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
1293
+
1294
+ 11. Some of the cuSPARSE library routines were written by or
1295
+ derived from code written by Li-Wen Chang and are subject
1296
+ to the NCSA Open Source License as follows:
1297
+
1298
+ Copyright (c) 2012, University of Illinois.
1299
+
1300
+ All rights reserved.
1301
+
1302
+ Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu
1303
+
1304
+ Permission is hereby granted, free of charge, to any person obtaining
1305
+ a copy of this software and associated documentation files (the
1306
+ "Software"), to deal with the Software without restriction, including
1307
+ without limitation the rights to use, copy, modify, merge, publish,
1308
+ distribute, sublicense, and/or sell copies of the Software, and to
1309
+ permit persons to whom the Software is furnished to do so, subject to
1310
+ the following conditions:
1311
+ * Redistributions of source code must retain the above copyright
1312
+ notice, this list of conditions and the following disclaimer.
1313
+ * Redistributions in binary form must reproduce the above
1314
+ copyright notice, this list of conditions and the following
1315
+ disclaimers in the documentation and/or other materials provided
1316
+ with the distribution.
1317
+ * Neither the names of IMPACT Group, University of Illinois, nor
1318
+ the names of its contributors may be used to endorse or promote
1319
+ products derived from this Software without specific prior
1320
+ written permission.
1321
+
1322
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
1323
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
1324
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
1325
+ NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT
1326
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
1327
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
1328
+ IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
1329
+ SOFTWARE.
1330
+
1331
+ 12. Some of the cuRAND library routines were written by or
1332
+ derived from code written by Mutsuo Saito and Makoto
1333
+ Matsumoto and are subject to the following license:
1334
+
1335
+ Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima
1336
+ University. All rights reserved.
1337
+
1338
+ Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima
1339
+ University and University of Tokyo. All rights reserved.
1340
+
1341
+ Redistribution and use in source and binary forms, with or without
1342
+ modification, are permitted provided that the following conditions are
1343
+ met:
1344
+ * Redistributions of source code must retain the above copyright
1345
+ notice, this list of conditions and the following disclaimer.
1346
+ * Redistributions in binary form must reproduce the above
1347
+ copyright notice, this list of conditions and the following
1348
+ disclaimer in the documentation and/or other materials provided
1349
+ with the distribution.
1350
+ * Neither the name of the Hiroshima University nor the names of
1351
+ its contributors may be used to endorse or promote products
1352
+ derived from this software without specific prior written
1353
+ permission.
1354
+
1355
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1356
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1357
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1358
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1359
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1360
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1361
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1362
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1363
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1364
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1365
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1366
+
1367
+ 13. Some of the cuRAND library routines were derived from
1368
+ code developed by D. E. Shaw Research and are subject to
1369
+ the following license:
1370
+
1371
+ Copyright 2010-2011, D. E. Shaw Research.
1372
+
1373
+ All rights reserved.
1374
+
1375
+ Redistribution and use in source and binary forms, with or without
1376
+ modification, are permitted provided that the following conditions are
1377
+ met:
1378
+ * Redistributions of source code must retain the above copyright
1379
+ notice, this list of conditions, and the following disclaimer.
1380
+ * Redistributions in binary form must reproduce the above
1381
+ copyright notice, this list of conditions, and the following
1382
+ disclaimer in the documentation and/or other materials provided
1383
+ with the distribution.
1384
+ * Neither the name of D. E. Shaw Research nor the names of its
1385
+ contributors may be used to endorse or promote products derived
1386
+ from this software without specific prior written permission.
1387
+
1388
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1389
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1390
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1391
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1392
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1393
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1394
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1395
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1396
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1397
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1398
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1399
+
1400
+ 14. Some of the Math library routines were written by or
1401
+ derived from code developed by Norbert Juffa and are
1402
+ subject to the following license:
1403
+
1404
+ Copyright (c) 2015-2017, Norbert Juffa
1405
+ All rights reserved.
1406
+
1407
+ Redistribution and use in source and binary forms, with or without
1408
+ modification, are permitted provided that the following conditions
1409
+ are met:
1410
+
1411
+ 1. Redistributions of source code must retain the above copyright
1412
+ notice, this list of conditions and the following disclaimer.
1413
+
1414
+ 2. Redistributions in binary form must reproduce the above copyright
1415
+ notice, this list of conditions and the following disclaimer in the
1416
+ documentation and/or other materials provided with the distribution.
1417
+
1418
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1419
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1420
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1421
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1422
+ HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1423
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1424
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1425
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1426
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1427
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1428
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1429
+
1430
+ 15. Licensee's use of the lz4 third party component is
1431
+ subject to the following terms and conditions:
1432
+
1433
+ Copyright (C) 2011-2013, Yann Collet.
1434
+ BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
1435
+
1436
+ Redistribution and use in source and binary forms, with or without
1437
+ modification, are permitted provided that the following conditions are
1438
+ met:
1439
+
1440
+ * Redistributions of source code must retain the above copyright
1441
+ notice, this list of conditions and the following disclaimer.
1442
+ * Redistributions in binary form must reproduce the above
1443
+ copyright notice, this list of conditions and the following disclaimer
1444
+ in the documentation and/or other materials provided with the
1445
+ distribution.
1446
+
1447
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1448
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1449
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1450
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1451
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1452
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1453
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1454
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1455
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1456
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1457
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1458
+
1459
+ 16. The NPP library uses code from the Boost Math Toolkit,
1460
+ and is subject to the following license:
1461
+
1462
+ Boost Software License - Version 1.0 - August 17th, 2003
1463
+ . . . .
1464
+
1465
+ Permission is hereby granted, free of charge, to any person or
1466
+ organization obtaining a copy of the software and accompanying
1467
+ documentation covered by this license (the "Software") to use,
1468
+ reproduce, display, distribute, execute, and transmit the Software,
1469
+ and to prepare derivative works of the Software, and to permit
1470
+ third-parties to whom the Software is furnished to do so, all
1471
+ subject to the following:
1472
+
1473
+ The copyright notices in the Software and this entire statement,
1474
+ including the above license grant, this restriction and the following
1475
+ disclaimer, must be included in all copies of the Software, in whole
1476
+ or in part, and all derivative works of the Software, unless such
1477
+ copies or derivative works are solely in the form of machine-executable
1478
+ object code generated by a source language processor.
1479
+
1480
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
1481
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
1482
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
1483
+ NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
1484
+ ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
1485
+ OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
1486
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
1487
+ OTHER DEALINGS IN THE SOFTWARE.
1488
+
1489
+ 17. Portions of the Nsight Eclipse Edition is subject to the
1490
+ following license:
1491
+
1492
+ The Eclipse Foundation makes available all content in this plug-in
1493
+ ("Content"). Unless otherwise indicated below, the Content is provided
1494
+ to you under the terms and conditions of the Eclipse Public License
1495
+ Version 1.0 ("EPL"). A copy of the EPL is available at http://
1496
+ www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program"
1497
+ will mean the Content.
1498
+
1499
+ If you did not receive this Content directly from the Eclipse
1500
+ Foundation, the Content is being redistributed by another party
1501
+ ("Redistributor") and different terms and conditions may apply to your
1502
+ use of any object code in the Content. Check the Redistributor's
1503
+ license that was provided with the Content. If no such license exists,
1504
+ contact the Redistributor. Unless otherwise indicated below, the terms
1505
+ and conditions of the EPL still apply to any source code in the
1506
+ Content and such source code may be obtained at http://www.eclipse.org.
1507
+
1508
+ 18. Some of the cuBLAS library routines uses code from
1509
+ OpenAI, which is subject to the following license:
1510
+
1511
+ License URL
1512
+ https://github.com/openai/openai-gemm/blob/master/LICENSE
1513
+
1514
+ License Text
1515
+ The MIT License
1516
+
1517
+ Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc.
1518
+
1519
+ Permission is hereby granted, free of charge, to any person obtaining a copy
1520
+ of this software and associated documentation files (the "Software"), to deal
1521
+ in the Software without restriction, including without limitation the rights
1522
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
1523
+ copies of the Software, and to permit persons to whom the Software is
1524
+ furnished to do so, subject to the following conditions:
1525
+
1526
+ The above copyright notice and this permission notice shall be included in
1527
+ all copies or substantial portions of the Software.
1528
+
1529
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
1530
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
1531
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
1532
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
1533
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
1534
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
1535
+ THE SOFTWARE.
1536
+
1537
+ 19. Licensee's use of the Visual Studio Setup Configuration
1538
+ Samples is subject to the following license:
1539
+
1540
+ The MIT License (MIT)
1541
+ Copyright (C) Microsoft Corporation. All rights reserved.
1542
+
1543
+ Permission is hereby granted, free of charge, to any person
1544
+ obtaining a copy of this software and associated documentation
1545
+ files (the "Software"), to deal in the Software without restriction,
1546
+ including without limitation the rights to use, copy, modify, merge,
1547
+ publish, distribute, sublicense, and/or sell copies of the Software,
1548
+ and to permit persons to whom the Software is furnished to do so,
1549
+ subject to the following conditions:
1550
+
1551
+ The above copyright notice and this permission notice shall be included
1552
+ in all copies or substantial portions of the Software.
1553
+
1554
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
1555
+ OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
1556
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
1557
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
1558
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
1559
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
1560
+
1561
+ 20. Licensee's use of linmath.h header for CPU functions for
1562
+ GL vector/matrix operations from lunarG is subject to the
1563
+ Apache License Version 2.0.
1564
+
1565
+ 21. The DX12-CUDA sample uses the d3dx12.h header, which is
1566
+ subject to the MIT license .
1567
+
1568
+ -----------------
venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/METADATA ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: nvidia-cusparse-cu12
3
+ Version: 12.1.0.106
4
+ Summary: CUSPARSE native runtime libraries
5
+ Home-page: https://developer.nvidia.com/cuda-zone
6
+ Author: Nvidia CUDA Installer Team
7
+ Author-email: [email protected]
8
+ License: NVIDIA Proprietary Software
9
+ Keywords: cuda,nvidia,runtime,machine learning,deep learning
10
+ Classifier: Development Status :: 4 - Beta
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: Intended Audience :: Education
13
+ Classifier: Intended Audience :: Science/Research
14
+ Classifier: License :: Other/Proprietary License
15
+ Classifier: Natural Language :: English
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.5
18
+ Classifier: Programming Language :: Python :: 3.6
19
+ Classifier: Programming Language :: Python :: 3.7
20
+ Classifier: Programming Language :: Python :: 3.8
21
+ Classifier: Programming Language :: Python :: 3.9
22
+ Classifier: Programming Language :: Python :: 3.10
23
+ Classifier: Programming Language :: Python :: 3.11
24
+ Classifier: Programming Language :: Python :: 3 :: Only
25
+ Classifier: Topic :: Scientific/Engineering
26
+ Classifier: Topic :: Scientific/Engineering :: Mathematics
27
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
28
+ Classifier: Topic :: Software Development
29
+ Classifier: Topic :: Software Development :: Libraries
30
+ Classifier: Operating System :: Microsoft :: Windows
31
+ Classifier: Operating System :: POSIX :: Linux
32
+ Requires-Python: >=3
33
+ License-File: License.txt
34
+ Requires-Dist: nvidia-nvjitlink-cu12
35
+
36
+ CUSPARSE native runtime libraries
venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/RECORD ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ nvidia/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ nvidia/__pycache__/__init__.cpython-310.pyc,,
3
+ nvidia/cusparse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ nvidia/cusparse/__pycache__/__init__.cpython-310.pyc,,
5
+ nvidia/cusparse/include/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ nvidia/cusparse/include/__pycache__/__init__.cpython-310.pyc,,
7
+ nvidia/cusparse/include/cusparse.h,sha256=yhV9iTcEW9XEyhaJmX4iddh_cMb8sfNAy6qva5ae4qw,287290
8
+ nvidia/cusparse/include/cusparse_v2.h,sha256=jkH2A9hYc-TEF0vuQ_SurbhPNEHkYGUIRuxKXhFAqnw,2587
9
+ nvidia/cusparse/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ nvidia/cusparse/lib/__pycache__/__init__.cpython-310.pyc,,
11
+ nvidia/cusparse/lib/libcusparse.so.12,sha256=UARmovVZ3mIqcbuSDT0pI-aRNSRXR6J0LuE-3_C6YIU,264876688
12
+ nvidia_cusparse_cu12-12.1.0.106.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
13
+ nvidia_cusparse_cu12-12.1.0.106.dist-info/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262
14
+ nvidia_cusparse_cu12-12.1.0.106.dist-info/METADATA,sha256=XpBtE4L1lFCx7gDu7Klx9dijNWQW26PS3fcOGjNIsXg,1550
15
+ nvidia_cusparse_cu12-12.1.0.106.dist-info/RECORD,,
16
+ nvidia_cusparse_cu12-12.1.0.106.dist-info/WHEEL,sha256=-kQi_VMfvRQozZJT7HUPMfY-5vLo0LVTmAylNJ3Ft98,106
17
+ nvidia_cusparse_cu12-12.1.0.106.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7
venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.37.1)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-manylinux1_x86_64
5
+
venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.1.0.106.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ nvidia
venv/lib/python3.10/site-packages/pandas/__init__.py ADDED
@@ -0,0 +1,367 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import warnings
5
+
6
+ __docformat__ = "restructuredtext"
7
+
8
+ # Let users know if they're missing any of our hard dependencies
9
+ _hard_dependencies = ("numpy", "pytz", "dateutil")
10
+ _missing_dependencies = []
11
+
12
+ for _dependency in _hard_dependencies:
13
+ try:
14
+ __import__(_dependency)
15
+ except ImportError as _e: # pragma: no cover
16
+ _missing_dependencies.append(f"{_dependency}: {_e}")
17
+
18
+ if _missing_dependencies: # pragma: no cover
19
+ raise ImportError(
20
+ "Unable to import required dependencies:\n" + "\n".join(_missing_dependencies)
21
+ )
22
+ del _hard_dependencies, _dependency, _missing_dependencies
23
+
24
+ try:
25
+ # numpy compat
26
+ from pandas.compat import (
27
+ is_numpy_dev as _is_numpy_dev, # pyright: ignore[reportUnusedImport] # noqa: F401
28
+ )
29
+ except ImportError as _err: # pragma: no cover
30
+ _module = _err.name
31
+ raise ImportError(
32
+ f"C extension: {_module} not built. If you want to import "
33
+ "pandas from the source directory, you may need to run "
34
+ "'python setup.py build_ext' to build the C extensions first."
35
+ ) from _err
36
+
37
+ from pandas._config import (
38
+ get_option,
39
+ set_option,
40
+ reset_option,
41
+ describe_option,
42
+ option_context,
43
+ options,
44
+ )
45
+
46
+ # let init-time option registration happen
47
+ import pandas.core.config_init # pyright: ignore[reportUnusedImport] # noqa: F401
48
+
49
+ from pandas.core.api import (
50
+ # dtype
51
+ ArrowDtype,
52
+ Int8Dtype,
53
+ Int16Dtype,
54
+ Int32Dtype,
55
+ Int64Dtype,
56
+ UInt8Dtype,
57
+ UInt16Dtype,
58
+ UInt32Dtype,
59
+ UInt64Dtype,
60
+ Float32Dtype,
61
+ Float64Dtype,
62
+ CategoricalDtype,
63
+ PeriodDtype,
64
+ IntervalDtype,
65
+ DatetimeTZDtype,
66
+ StringDtype,
67
+ BooleanDtype,
68
+ # missing
69
+ NA,
70
+ isna,
71
+ isnull,
72
+ notna,
73
+ notnull,
74
+ # indexes
75
+ Index,
76
+ CategoricalIndex,
77
+ RangeIndex,
78
+ MultiIndex,
79
+ IntervalIndex,
80
+ TimedeltaIndex,
81
+ DatetimeIndex,
82
+ PeriodIndex,
83
+ IndexSlice,
84
+ # tseries
85
+ NaT,
86
+ Period,
87
+ period_range,
88
+ Timedelta,
89
+ timedelta_range,
90
+ Timestamp,
91
+ date_range,
92
+ bdate_range,
93
+ Interval,
94
+ interval_range,
95
+ DateOffset,
96
+ # conversion
97
+ to_numeric,
98
+ to_datetime,
99
+ to_timedelta,
100
+ # misc
101
+ Flags,
102
+ Grouper,
103
+ factorize,
104
+ unique,
105
+ value_counts,
106
+ NamedAgg,
107
+ array,
108
+ Categorical,
109
+ set_eng_float_format,
110
+ Series,
111
+ DataFrame,
112
+ )
113
+
114
+ from pandas.core.dtypes.dtypes import SparseDtype
115
+
116
+ from pandas.tseries.api import infer_freq
117
+ from pandas.tseries import offsets
118
+
119
+ from pandas.core.computation.api import eval
120
+
121
+ from pandas.core.reshape.api import (
122
+ concat,
123
+ lreshape,
124
+ melt,
125
+ wide_to_long,
126
+ merge,
127
+ merge_asof,
128
+ merge_ordered,
129
+ crosstab,
130
+ pivot,
131
+ pivot_table,
132
+ get_dummies,
133
+ from_dummies,
134
+ cut,
135
+ qcut,
136
+ )
137
+
138
+ from pandas import api, arrays, errors, io, plotting, tseries
139
+ from pandas import testing
140
+ from pandas.util._print_versions import show_versions
141
+
142
+ from pandas.io.api import (
143
+ # excel
144
+ ExcelFile,
145
+ ExcelWriter,
146
+ read_excel,
147
+ # parsers
148
+ read_csv,
149
+ read_fwf,
150
+ read_table,
151
+ # pickle
152
+ read_pickle,
153
+ to_pickle,
154
+ # pytables
155
+ HDFStore,
156
+ read_hdf,
157
+ # sql
158
+ read_sql,
159
+ read_sql_query,
160
+ read_sql_table,
161
+ # misc
162
+ read_clipboard,
163
+ read_parquet,
164
+ read_orc,
165
+ read_feather,
166
+ read_gbq,
167
+ read_html,
168
+ read_xml,
169
+ read_json,
170
+ read_stata,
171
+ read_sas,
172
+ read_spss,
173
+ )
174
+
175
+ from pandas.io.json._normalize import json_normalize
176
+
177
+ from pandas.util._tester import test
178
+
179
+ # use the closest tagged version if possible
180
+ _built_with_meson = False
181
+ try:
182
+ from pandas._version_meson import ( # pyright: ignore [reportMissingImports]
183
+ __version__,
184
+ __git_version__,
185
+ )
186
+
187
+ _built_with_meson = True
188
+ except ImportError:
189
+ from pandas._version import get_versions
190
+
191
+ v = get_versions()
192
+ __version__ = v.get("closest-tag", v["version"])
193
+ __git_version__ = v.get("full-revisionid")
194
+ del get_versions, v
195
+
196
+ # GH#55043 - deprecation of the data_manager option
197
+ if "PANDAS_DATA_MANAGER" in os.environ:
198
+ warnings.warn(
199
+ "The env variable PANDAS_DATA_MANAGER is set. The data_manager option is "
200
+ "deprecated and will be removed in a future version. Only the BlockManager "
201
+ "will be available. Unset this environment variable to silence this warning.",
202
+ FutureWarning,
203
+ stacklevel=2,
204
+ )
205
+
206
+ del warnings, os
207
+
208
+ # module level doc-string
209
+ __doc__ = """
210
+ pandas - a powerful data analysis and manipulation library for Python
211
+ =====================================================================
212
+
213
+ **pandas** is a Python package providing fast, flexible, and expressive data
214
+ structures designed to make working with "relational" or "labeled" data both
215
+ easy and intuitive. It aims to be the fundamental high-level building block for
216
+ doing practical, **real world** data analysis in Python. Additionally, it has
217
+ the broader goal of becoming **the most powerful and flexible open source data
218
+ analysis / manipulation tool available in any language**. It is already well on
219
+ its way toward this goal.
220
+
221
+ Main Features
222
+ -------------
223
+ Here are just a few of the things that pandas does well:
224
+
225
+ - Easy handling of missing data in floating point as well as non-floating
226
+ point data.
227
+ - Size mutability: columns can be inserted and deleted from DataFrame and
228
+ higher dimensional objects
229
+ - Automatic and explicit data alignment: objects can be explicitly aligned
230
+ to a set of labels, or the user can simply ignore the labels and let
231
+ `Series`, `DataFrame`, etc. automatically align the data for you in
232
+ computations.
233
+ - Powerful, flexible group by functionality to perform split-apply-combine
234
+ operations on data sets, for both aggregating and transforming data.
235
+ - Make it easy to convert ragged, differently-indexed data in other Python
236
+ and NumPy data structures into DataFrame objects.
237
+ - Intelligent label-based slicing, fancy indexing, and subsetting of large
238
+ data sets.
239
+ - Intuitive merging and joining data sets.
240
+ - Flexible reshaping and pivoting of data sets.
241
+ - Hierarchical labeling of axes (possible to have multiple labels per tick).
242
+ - Robust IO tools for loading data from flat files (CSV and delimited),
243
+ Excel files, databases, and saving/loading data from the ultrafast HDF5
244
+ format.
245
+ - Time series-specific functionality: date range generation and frequency
246
+ conversion, moving window statistics, date shifting and lagging.
247
+ """
248
+
249
+ # Use __all__ to let type checkers know what is part of the public API.
250
+ # Pandas is not (yet) a py.typed library: the public API is determined
251
+ # based on the documentation.
252
+ __all__ = [
253
+ "ArrowDtype",
254
+ "BooleanDtype",
255
+ "Categorical",
256
+ "CategoricalDtype",
257
+ "CategoricalIndex",
258
+ "DataFrame",
259
+ "DateOffset",
260
+ "DatetimeIndex",
261
+ "DatetimeTZDtype",
262
+ "ExcelFile",
263
+ "ExcelWriter",
264
+ "Flags",
265
+ "Float32Dtype",
266
+ "Float64Dtype",
267
+ "Grouper",
268
+ "HDFStore",
269
+ "Index",
270
+ "IndexSlice",
271
+ "Int16Dtype",
272
+ "Int32Dtype",
273
+ "Int64Dtype",
274
+ "Int8Dtype",
275
+ "Interval",
276
+ "IntervalDtype",
277
+ "IntervalIndex",
278
+ "MultiIndex",
279
+ "NA",
280
+ "NaT",
281
+ "NamedAgg",
282
+ "Period",
283
+ "PeriodDtype",
284
+ "PeriodIndex",
285
+ "RangeIndex",
286
+ "Series",
287
+ "SparseDtype",
288
+ "StringDtype",
289
+ "Timedelta",
290
+ "TimedeltaIndex",
291
+ "Timestamp",
292
+ "UInt16Dtype",
293
+ "UInt32Dtype",
294
+ "UInt64Dtype",
295
+ "UInt8Dtype",
296
+ "api",
297
+ "array",
298
+ "arrays",
299
+ "bdate_range",
300
+ "concat",
301
+ "crosstab",
302
+ "cut",
303
+ "date_range",
304
+ "describe_option",
305
+ "errors",
306
+ "eval",
307
+ "factorize",
308
+ "get_dummies",
309
+ "from_dummies",
310
+ "get_option",
311
+ "infer_freq",
312
+ "interval_range",
313
+ "io",
314
+ "isna",
315
+ "isnull",
316
+ "json_normalize",
317
+ "lreshape",
318
+ "melt",
319
+ "merge",
320
+ "merge_asof",
321
+ "merge_ordered",
322
+ "notna",
323
+ "notnull",
324
+ "offsets",
325
+ "option_context",
326
+ "options",
327
+ "period_range",
328
+ "pivot",
329
+ "pivot_table",
330
+ "plotting",
331
+ "qcut",
332
+ "read_clipboard",
333
+ "read_csv",
334
+ "read_excel",
335
+ "read_feather",
336
+ "read_fwf",
337
+ "read_gbq",
338
+ "read_hdf",
339
+ "read_html",
340
+ "read_json",
341
+ "read_orc",
342
+ "read_parquet",
343
+ "read_pickle",
344
+ "read_sas",
345
+ "read_spss",
346
+ "read_sql",
347
+ "read_sql_query",
348
+ "read_sql_table",
349
+ "read_stata",
350
+ "read_table",
351
+ "read_xml",
352
+ "reset_option",
353
+ "set_eng_float_format",
354
+ "set_option",
355
+ "show_versions",
356
+ "test",
357
+ "testing",
358
+ "timedelta_range",
359
+ "to_datetime",
360
+ "to_numeric",
361
+ "to_pickle",
362
+ "to_timedelta",
363
+ "tseries",
364
+ "unique",
365
+ "value_counts",
366
+ "wide_to_long",
367
+ ]
venv/lib/python3.10/site-packages/pandas/_typing.py ADDED
@@ -0,0 +1,525 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import (
4
+ Hashable,
5
+ Iterator,
6
+ Mapping,
7
+ MutableMapping,
8
+ Sequence,
9
+ )
10
+ from datetime import (
11
+ date,
12
+ datetime,
13
+ timedelta,
14
+ tzinfo,
15
+ )
16
+ from os import PathLike
17
+ import sys
18
+ from typing import (
19
+ TYPE_CHECKING,
20
+ Any,
21
+ Callable,
22
+ Literal,
23
+ Optional,
24
+ Protocol,
25
+ Type as type_t,
26
+ TypeVar,
27
+ Union,
28
+ overload,
29
+ )
30
+
31
+ import numpy as np
32
+
33
+ # To prevent import cycles place any internal imports in the branch below
34
+ # and use a string literal forward reference to it in subsequent types
35
+ # https://mypy.readthedocs.io/en/latest/common_issues.html#import-cycles
36
+ if TYPE_CHECKING:
37
+ import numpy.typing as npt
38
+
39
+ from pandas._libs import (
40
+ NaTType,
41
+ Period,
42
+ Timedelta,
43
+ Timestamp,
44
+ )
45
+ from pandas._libs.tslibs import BaseOffset
46
+
47
+ from pandas.core.dtypes.dtypes import ExtensionDtype
48
+
49
+ from pandas import Interval
50
+ from pandas.arrays import (
51
+ DatetimeArray,
52
+ TimedeltaArray,
53
+ )
54
+ from pandas.core.arrays.base import ExtensionArray
55
+ from pandas.core.frame import DataFrame
56
+ from pandas.core.generic import NDFrame
57
+ from pandas.core.groupby.generic import (
58
+ DataFrameGroupBy,
59
+ GroupBy,
60
+ SeriesGroupBy,
61
+ )
62
+ from pandas.core.indexes.base import Index
63
+ from pandas.core.internals import (
64
+ ArrayManager,
65
+ BlockManager,
66
+ SingleArrayManager,
67
+ SingleBlockManager,
68
+ )
69
+ from pandas.core.resample import Resampler
70
+ from pandas.core.series import Series
71
+ from pandas.core.window.rolling import BaseWindow
72
+
73
+ from pandas.io.formats.format import EngFormatter
74
+ from pandas.tseries.holiday import AbstractHolidayCalendar
75
+
76
+ ScalarLike_co = Union[
77
+ int,
78
+ float,
79
+ complex,
80
+ str,
81
+ bytes,
82
+ np.generic,
83
+ ]
84
+
85
+ # numpy compatible types
86
+ NumpyValueArrayLike = Union[ScalarLike_co, npt.ArrayLike]
87
+ # Name "npt._ArrayLikeInt_co" is not defined [name-defined]
88
+ NumpySorter = Optional[npt._ArrayLikeInt_co] # type: ignore[name-defined]
89
+
90
+ from typing import SupportsIndex
91
+
92
+ if sys.version_info >= (3, 10):
93
+ from typing import TypeGuard # pyright: ignore[reportUnusedImport]
94
+ else:
95
+ from typing_extensions import TypeGuard # pyright: ignore[reportUnusedImport]
96
+
97
+ if sys.version_info >= (3, 11):
98
+ from typing import Self # pyright: ignore[reportUnusedImport]
99
+ else:
100
+ from typing_extensions import Self # pyright: ignore[reportUnusedImport]
101
+ else:
102
+ npt: Any = None
103
+ Self: Any = None
104
+ TypeGuard: Any = None
105
+
106
+ HashableT = TypeVar("HashableT", bound=Hashable)
107
+ MutableMappingT = TypeVar("MutableMappingT", bound=MutableMapping)
108
+
109
+ # array-like
110
+
111
+ ArrayLike = Union["ExtensionArray", np.ndarray]
112
+ AnyArrayLike = Union[ArrayLike, "Index", "Series"]
113
+ TimeArrayLike = Union["DatetimeArray", "TimedeltaArray"]
114
+
115
+ # list-like
116
+
117
+ # from https://github.com/hauntsaninja/useful_types
118
+ # includes Sequence-like objects but excludes str and bytes
119
+ _T_co = TypeVar("_T_co", covariant=True)
120
+
121
+
122
+ class SequenceNotStr(Protocol[_T_co]):
123
+ @overload
124
+ def __getitem__(self, index: SupportsIndex, /) -> _T_co:
125
+ ...
126
+
127
+ @overload
128
+ def __getitem__(self, index: slice, /) -> Sequence[_T_co]:
129
+ ...
130
+
131
+ def __contains__(self, value: object, /) -> bool:
132
+ ...
133
+
134
+ def __len__(self) -> int:
135
+ ...
136
+
137
+ def __iter__(self) -> Iterator[_T_co]:
138
+ ...
139
+
140
+ def index(self, value: Any, /, start: int = 0, stop: int = ...) -> int:
141
+ ...
142
+
143
+ def count(self, value: Any, /) -> int:
144
+ ...
145
+
146
+ def __reversed__(self) -> Iterator[_T_co]:
147
+ ...
148
+
149
+
150
+ ListLike = Union[AnyArrayLike, SequenceNotStr, range]
151
+
152
+ # scalars
153
+
154
+ PythonScalar = Union[str, float, bool]
155
+ DatetimeLikeScalar = Union["Period", "Timestamp", "Timedelta"]
156
+ PandasScalar = Union["Period", "Timestamp", "Timedelta", "Interval"]
157
+ Scalar = Union[PythonScalar, PandasScalar, np.datetime64, np.timedelta64, date]
158
+ IntStrT = TypeVar("IntStrT", bound=Union[int, str])
159
+
160
+
161
+ # timestamp and timedelta convertible types
162
+
163
+ TimestampConvertibleTypes = Union[
164
+ "Timestamp", date, np.datetime64, np.int64, float, str
165
+ ]
166
+ TimestampNonexistent = Union[
167
+ Literal["shift_forward", "shift_backward", "NaT", "raise"], timedelta
168
+ ]
169
+ TimedeltaConvertibleTypes = Union[
170
+ "Timedelta", timedelta, np.timedelta64, np.int64, float, str
171
+ ]
172
+ Timezone = Union[str, tzinfo]
173
+
174
+ ToTimestampHow = Literal["s", "e", "start", "end"]
175
+
176
+ # NDFrameT is stricter and ensures that the same subclass of NDFrame always is
177
+ # used. E.g. `def func(a: NDFrameT) -> NDFrameT: ...` means that if a
178
+ # Series is passed into a function, a Series is always returned and if a DataFrame is
179
+ # passed in, a DataFrame is always returned.
180
+ NDFrameT = TypeVar("NDFrameT", bound="NDFrame")
181
+
182
+ NumpyIndexT = TypeVar("NumpyIndexT", np.ndarray, "Index")
183
+
184
+ AxisInt = int
185
+ Axis = Union[AxisInt, Literal["index", "columns", "rows"]]
186
+ IndexLabel = Union[Hashable, Sequence[Hashable]]
187
+ Level = Hashable
188
+ Shape = tuple[int, ...]
189
+ Suffixes = tuple[Optional[str], Optional[str]]
190
+ Ordered = Optional[bool]
191
+ JSONSerializable = Optional[Union[PythonScalar, list, dict]]
192
+ Frequency = Union[str, "BaseOffset"]
193
+ Axes = ListLike
194
+
195
+ RandomState = Union[
196
+ int,
197
+ np.ndarray,
198
+ np.random.Generator,
199
+ np.random.BitGenerator,
200
+ np.random.RandomState,
201
+ ]
202
+
203
+ # dtypes
204
+ NpDtype = Union[str, np.dtype, type_t[Union[str, complex, bool, object]]]
205
+ Dtype = Union["ExtensionDtype", NpDtype]
206
+ AstypeArg = Union["ExtensionDtype", "npt.DTypeLike"]
207
+ # DtypeArg specifies all allowable dtypes in a functions its dtype argument
208
+ DtypeArg = Union[Dtype, dict[Hashable, Dtype]]
209
+ DtypeObj = Union[np.dtype, "ExtensionDtype"]
210
+
211
+ # converters
212
+ ConvertersArg = dict[Hashable, Callable[[Dtype], Dtype]]
213
+
214
+ # parse_dates
215
+ ParseDatesArg = Union[
216
+ bool, list[Hashable], list[list[Hashable]], dict[Hashable, list[Hashable]]
217
+ ]
218
+
219
+ # For functions like rename that convert one label to another
220
+ Renamer = Union[Mapping[Any, Hashable], Callable[[Any], Hashable]]
221
+
222
+ # to maintain type information across generic functions and parametrization
223
+ T = TypeVar("T")
224
+
225
+ # used in decorators to preserve the signature of the function it decorates
226
+ # see https://mypy.readthedocs.io/en/stable/generics.html#declaring-decorators
227
+ FuncType = Callable[..., Any]
228
+ F = TypeVar("F", bound=FuncType)
229
+
230
+ # types of vectorized key functions for DataFrame::sort_values and
231
+ # DataFrame::sort_index, among others
232
+ ValueKeyFunc = Optional[Callable[["Series"], Union["Series", AnyArrayLike]]]
233
+ IndexKeyFunc = Optional[Callable[["Index"], Union["Index", AnyArrayLike]]]
234
+
235
+ # types of `func` kwarg for DataFrame.aggregate and Series.aggregate
236
+ AggFuncTypeBase = Union[Callable, str]
237
+ AggFuncTypeDict = MutableMapping[
238
+ Hashable, Union[AggFuncTypeBase, list[AggFuncTypeBase]]
239
+ ]
240
+ AggFuncType = Union[
241
+ AggFuncTypeBase,
242
+ list[AggFuncTypeBase],
243
+ AggFuncTypeDict,
244
+ ]
245
+ AggObjType = Union[
246
+ "Series",
247
+ "DataFrame",
248
+ "GroupBy",
249
+ "SeriesGroupBy",
250
+ "DataFrameGroupBy",
251
+ "BaseWindow",
252
+ "Resampler",
253
+ ]
254
+
255
+ PythonFuncType = Callable[[Any], Any]
256
+
257
+ # filenames and file-like-objects
258
+ AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True)
259
+ AnyStr_contra = TypeVar("AnyStr_contra", str, bytes, contravariant=True)
260
+
261
+
262
+ class BaseBuffer(Protocol):
263
+ @property
264
+ def mode(self) -> str:
265
+ # for _get_filepath_or_buffer
266
+ ...
267
+
268
+ def seek(self, __offset: int, __whence: int = ...) -> int:
269
+ # with one argument: gzip.GzipFile, bz2.BZ2File
270
+ # with two arguments: zip.ZipFile, read_sas
271
+ ...
272
+
273
+ def seekable(self) -> bool:
274
+ # for bz2.BZ2File
275
+ ...
276
+
277
+ def tell(self) -> int:
278
+ # for zip.ZipFile, read_stata, to_stata
279
+ ...
280
+
281
+
282
+ class ReadBuffer(BaseBuffer, Protocol[AnyStr_co]):
283
+ def read(self, __n: int = ...) -> AnyStr_co:
284
+ # for BytesIOWrapper, gzip.GzipFile, bz2.BZ2File
285
+ ...
286
+
287
+
288
+ class WriteBuffer(BaseBuffer, Protocol[AnyStr_contra]):
289
+ def write(self, __b: AnyStr_contra) -> Any:
290
+ # for gzip.GzipFile, bz2.BZ2File
291
+ ...
292
+
293
+ def flush(self) -> Any:
294
+ # for gzip.GzipFile, bz2.BZ2File
295
+ ...
296
+
297
+
298
+ class ReadPickleBuffer(ReadBuffer[bytes], Protocol):
299
+ def readline(self) -> bytes:
300
+ ...
301
+
302
+
303
+ class WriteExcelBuffer(WriteBuffer[bytes], Protocol):
304
+ def truncate(self, size: int | None = ...) -> int:
305
+ ...
306
+
307
+
308
+ class ReadCsvBuffer(ReadBuffer[AnyStr_co], Protocol):
309
+ def __iter__(self) -> Iterator[AnyStr_co]:
310
+ # for engine=python
311
+ ...
312
+
313
+ def fileno(self) -> int:
314
+ # for _MMapWrapper
315
+ ...
316
+
317
+ def readline(self) -> AnyStr_co:
318
+ # for engine=python
319
+ ...
320
+
321
+ @property
322
+ def closed(self) -> bool:
323
+ # for enine=pyarrow
324
+ ...
325
+
326
+
327
+ FilePath = Union[str, "PathLike[str]"]
328
+
329
+ # for arbitrary kwargs passed during reading/writing files
330
+ StorageOptions = Optional[dict[str, Any]]
331
+
332
+
333
+ # compression keywords and compression
334
+ CompressionDict = dict[str, Any]
335
+ CompressionOptions = Optional[
336
+ Union[Literal["infer", "gzip", "bz2", "zip", "xz", "zstd", "tar"], CompressionDict]
337
+ ]
338
+
339
+ # types in DataFrameFormatter
340
+ FormattersType = Union[
341
+ list[Callable], tuple[Callable, ...], Mapping[Union[str, int], Callable]
342
+ ]
343
+ ColspaceType = Mapping[Hashable, Union[str, int]]
344
+ FloatFormatType = Union[str, Callable, "EngFormatter"]
345
+ ColspaceArgType = Union[
346
+ str, int, Sequence[Union[str, int]], Mapping[Hashable, Union[str, int]]
347
+ ]
348
+
349
+ # Arguments for fillna()
350
+ FillnaOptions = Literal["backfill", "bfill", "ffill", "pad"]
351
+ InterpolateOptions = Literal[
352
+ "linear",
353
+ "time",
354
+ "index",
355
+ "values",
356
+ "nearest",
357
+ "zero",
358
+ "slinear",
359
+ "quadratic",
360
+ "cubic",
361
+ "barycentric",
362
+ "polynomial",
363
+ "krogh",
364
+ "piecewise_polynomial",
365
+ "spline",
366
+ "pchip",
367
+ "akima",
368
+ "cubicspline",
369
+ "from_derivatives",
370
+ ]
371
+
372
+ # internals
373
+ Manager = Union[
374
+ "ArrayManager", "SingleArrayManager", "BlockManager", "SingleBlockManager"
375
+ ]
376
+ SingleManager = Union["SingleArrayManager", "SingleBlockManager"]
377
+ Manager2D = Union["ArrayManager", "BlockManager"]
378
+
379
+ # indexing
380
+ # PositionalIndexer -> valid 1D positional indexer, e.g. can pass
381
+ # to ndarray.__getitem__
382
+ # ScalarIndexer is for a single value as the index
383
+ # SequenceIndexer is for list like or slices (but not tuples)
384
+ # PositionalIndexerTuple is extends the PositionalIndexer for 2D arrays
385
+ # These are used in various __getitem__ overloads
386
+ # TODO(typing#684): add Ellipsis, see
387
+ # https://github.com/python/typing/issues/684#issuecomment-548203158
388
+ # https://bugs.python.org/issue41810
389
+ # Using List[int] here rather than Sequence[int] to disallow tuples.
390
+ ScalarIndexer = Union[int, np.integer]
391
+ SequenceIndexer = Union[slice, list[int], np.ndarray]
392
+ PositionalIndexer = Union[ScalarIndexer, SequenceIndexer]
393
+ PositionalIndexerTuple = tuple[PositionalIndexer, PositionalIndexer]
394
+ PositionalIndexer2D = Union[PositionalIndexer, PositionalIndexerTuple]
395
+ if TYPE_CHECKING:
396
+ TakeIndexer = Union[Sequence[int], Sequence[np.integer], npt.NDArray[np.integer]]
397
+ else:
398
+ TakeIndexer = Any
399
+
400
+ # Shared by functions such as drop and astype
401
+ IgnoreRaise = Literal["ignore", "raise"]
402
+
403
+ # Windowing rank methods
404
+ WindowingRankType = Literal["average", "min", "max"]
405
+
406
+ # read_csv engines
407
+ CSVEngine = Literal["c", "python", "pyarrow", "python-fwf"]
408
+
409
+ # read_json engines
410
+ JSONEngine = Literal["ujson", "pyarrow"]
411
+
412
+ # read_xml parsers
413
+ XMLParsers = Literal["lxml", "etree"]
414
+
415
+ # read_html flavors
416
+ HTMLFlavors = Literal["lxml", "html5lib", "bs4"]
417
+
418
+ # Interval closed type
419
+ IntervalLeftRight = Literal["left", "right"]
420
+ IntervalClosedType = Union[IntervalLeftRight, Literal["both", "neither"]]
421
+
422
+ # datetime and NaTType
423
+ DatetimeNaTType = Union[datetime, "NaTType"]
424
+ DateTimeErrorChoices = Union[IgnoreRaise, Literal["coerce"]]
425
+
426
+ # sort_index
427
+ SortKind = Literal["quicksort", "mergesort", "heapsort", "stable"]
428
+ NaPosition = Literal["first", "last"]
429
+
430
+ # Arguments for nsmalles and n_largest
431
+ NsmallestNlargestKeep = Literal["first", "last", "all"]
432
+
433
+ # quantile interpolation
434
+ QuantileInterpolation = Literal["linear", "lower", "higher", "midpoint", "nearest"]
435
+
436
+ # plotting
437
+ PlottingOrientation = Literal["horizontal", "vertical"]
438
+
439
+ # dropna
440
+ AnyAll = Literal["any", "all"]
441
+
442
+ # merge
443
+ MergeHow = Literal["left", "right", "inner", "outer", "cross"]
444
+ MergeValidate = Literal[
445
+ "one_to_one",
446
+ "1:1",
447
+ "one_to_many",
448
+ "1:m",
449
+ "many_to_one",
450
+ "m:1",
451
+ "many_to_many",
452
+ "m:m",
453
+ ]
454
+
455
+ # join
456
+ JoinHow = Literal["left", "right", "inner", "outer"]
457
+ JoinValidate = Literal[
458
+ "one_to_one",
459
+ "1:1",
460
+ "one_to_many",
461
+ "1:m",
462
+ "many_to_one",
463
+ "m:1",
464
+ "many_to_many",
465
+ "m:m",
466
+ ]
467
+
468
+ # reindex
469
+ ReindexMethod = Union[FillnaOptions, Literal["nearest"]]
470
+
471
+ MatplotlibColor = Union[str, Sequence[float]]
472
+ TimeGrouperOrigin = Union[
473
+ "Timestamp", Literal["epoch", "start", "start_day", "end", "end_day"]
474
+ ]
475
+ TimeAmbiguous = Union[Literal["infer", "NaT", "raise"], "npt.NDArray[np.bool_]"]
476
+ TimeNonexistent = Union[
477
+ Literal["shift_forward", "shift_backward", "NaT", "raise"], timedelta
478
+ ]
479
+ DropKeep = Literal["first", "last", False]
480
+ CorrelationMethod = Union[
481
+ Literal["pearson", "kendall", "spearman"], Callable[[np.ndarray, np.ndarray], float]
482
+ ]
483
+ AlignJoin = Literal["outer", "inner", "left", "right"]
484
+ DtypeBackend = Literal["pyarrow", "numpy_nullable"]
485
+
486
+ TimeUnit = Literal["s", "ms", "us", "ns"]
487
+ OpenFileErrors = Literal[
488
+ "strict",
489
+ "ignore",
490
+ "replace",
491
+ "surrogateescape",
492
+ "xmlcharrefreplace",
493
+ "backslashreplace",
494
+ "namereplace",
495
+ ]
496
+
497
+ # update
498
+ UpdateJoin = Literal["left"]
499
+
500
+ # applymap
501
+ NaAction = Literal["ignore"]
502
+
503
+ # from_dict
504
+ FromDictOrient = Literal["columns", "index", "tight"]
505
+
506
+ # to_gbc
507
+ ToGbqIfexist = Literal["fail", "replace", "append"]
508
+
509
+ # to_stata
510
+ ToStataByteorder = Literal[">", "<", "little", "big"]
511
+
512
+ # ExcelWriter
513
+ ExcelWriterIfSheetExists = Literal["error", "new", "replace", "overlay"]
514
+
515
+ # Offsets
516
+ OffsetCalendar = Union[np.busdaycalendar, "AbstractHolidayCalendar"]
517
+
518
+ # read_csv: usecols
519
+ UsecolsArgType = Union[
520
+ SequenceNotStr[Hashable],
521
+ range,
522
+ AnyArrayLike,
523
+ Callable[[HashableT], bool],
524
+ None,
525
+ ]
venv/lib/python3.10/site-packages/pandas/_version.py ADDED
@@ -0,0 +1,692 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file helps to compute a version number in source trees obtained from
2
+ # git-archive tarball (such as those provided by githubs download-from-tag
3
+ # feature). Distribution tarballs (built by setup.py sdist) and build
4
+ # directories (produced by setup.py build) will contain a much shorter file
5
+ # that just contains the computed version number.
6
+
7
+ # This file is released into the public domain.
8
+ # Generated by versioneer-0.28
9
+ # https://github.com/python-versioneer/python-versioneer
10
+
11
+ """Git implementation of _version.py."""
12
+
13
+ import errno
14
+ import functools
15
+ import os
16
+ import re
17
+ import subprocess
18
+ import sys
19
+ from typing import Callable
20
+
21
+
22
+ def get_keywords():
23
+ """Get the keywords needed to look up the version information."""
24
+ # these strings will be replaced by git during git-archive.
25
+ # setup.py/versioneer.py will grep for the variable names, so they must
26
+ # each be defined on a line of their own. _version.py will just call
27
+ # get_keywords().
28
+ git_refnames = "$Format:%d$"
29
+ git_full = "$Format:%H$"
30
+ git_date = "$Format:%ci$"
31
+ keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
32
+ return keywords
33
+
34
+
35
+ class VersioneerConfig:
36
+ """Container for Versioneer configuration parameters."""
37
+
38
+
39
+ def get_config():
40
+ """Create, populate and return the VersioneerConfig() object."""
41
+ # these strings are filled in when 'setup.py versioneer' creates
42
+ # _version.py
43
+ cfg = VersioneerConfig()
44
+ cfg.VCS = "git"
45
+ cfg.style = "pep440"
46
+ cfg.tag_prefix = "v"
47
+ cfg.parentdir_prefix = "pandas-"
48
+ cfg.versionfile_source = "pandas/_version.py"
49
+ cfg.verbose = False
50
+ return cfg
51
+
52
+
53
+ class NotThisMethod(Exception):
54
+ """Exception raised if a method is not valid for the current scenario."""
55
+
56
+
57
+ LONG_VERSION_PY: dict[str, str] = {}
58
+ HANDLERS: dict[str, dict[str, Callable]] = {}
59
+
60
+
61
+ def register_vcs_handler(vcs, method): # decorator
62
+ """Create decorator to mark a method as the handler of a VCS."""
63
+
64
+ def decorate(f):
65
+ """Store f in HANDLERS[vcs][method]."""
66
+ if vcs not in HANDLERS:
67
+ HANDLERS[vcs] = {}
68
+ HANDLERS[vcs][method] = f
69
+ return f
70
+
71
+ return decorate
72
+
73
+
74
+ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
75
+ """Call the given command(s)."""
76
+ assert isinstance(commands, list)
77
+ process = None
78
+
79
+ popen_kwargs = {}
80
+ if sys.platform == "win32":
81
+ # This hides the console window if pythonw.exe is used
82
+ startupinfo = subprocess.STARTUPINFO()
83
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
84
+ popen_kwargs["startupinfo"] = startupinfo
85
+
86
+ for command in commands:
87
+ dispcmd = str([command] + args)
88
+ try:
89
+ # remember shell=False, so use git.cmd on windows, not just git
90
+ process = subprocess.Popen(
91
+ [command] + args,
92
+ cwd=cwd,
93
+ env=env,
94
+ stdout=subprocess.PIPE,
95
+ stderr=(subprocess.PIPE if hide_stderr else None),
96
+ **popen_kwargs,
97
+ )
98
+ break
99
+ except OSError:
100
+ e = sys.exc_info()[1]
101
+ if e.errno == errno.ENOENT:
102
+ continue
103
+ if verbose:
104
+ print(f"unable to run {dispcmd}")
105
+ print(e)
106
+ return None, None
107
+ else:
108
+ if verbose:
109
+ print(f"unable to find command, tried {commands}")
110
+ return None, None
111
+ stdout = process.communicate()[0].strip().decode()
112
+ if process.returncode != 0:
113
+ if verbose:
114
+ print(f"unable to run {dispcmd} (error)")
115
+ print(f"stdout was {stdout}")
116
+ return None, process.returncode
117
+ return stdout, process.returncode
118
+
119
+
120
+ def versions_from_parentdir(parentdir_prefix, root, verbose):
121
+ """Try to determine the version from the parent directory name.
122
+
123
+ Source tarballs conventionally unpack into a directory that includes both
124
+ the project name and a version string. We will also support searching up
125
+ two directory levels for an appropriately named parent directory
126
+ """
127
+ rootdirs = []
128
+
129
+ for _ in range(3):
130
+ dirname = os.path.basename(root)
131
+ if dirname.startswith(parentdir_prefix):
132
+ return {
133
+ "version": dirname[len(parentdir_prefix) :],
134
+ "full-revisionid": None,
135
+ "dirty": False,
136
+ "error": None,
137
+ "date": None,
138
+ }
139
+ rootdirs.append(root)
140
+ root = os.path.dirname(root) # up a level
141
+
142
+ if verbose:
143
+ print(
144
+ f"Tried directories {str(rootdirs)} \
145
+ but none started with prefix {parentdir_prefix}"
146
+ )
147
+ raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
148
+
149
+
150
+ @register_vcs_handler("git", "get_keywords")
151
+ def git_get_keywords(versionfile_abs):
152
+ """Extract version information from the given file."""
153
+ # the code embedded in _version.py can just fetch the value of these
154
+ # keywords. When used from setup.py, we don't want to import _version.py,
155
+ # so we do it with a regexp instead. This function is not used from
156
+ # _version.py.
157
+ keywords = {}
158
+ try:
159
+ with open(versionfile_abs, encoding="utf-8") as fobj:
160
+ for line in fobj:
161
+ if line.strip().startswith("git_refnames ="):
162
+ mo = re.search(r'=\s*"(.*)"', line)
163
+ if mo:
164
+ keywords["refnames"] = mo.group(1)
165
+ if line.strip().startswith("git_full ="):
166
+ mo = re.search(r'=\s*"(.*)"', line)
167
+ if mo:
168
+ keywords["full"] = mo.group(1)
169
+ if line.strip().startswith("git_date ="):
170
+ mo = re.search(r'=\s*"(.*)"', line)
171
+ if mo:
172
+ keywords["date"] = mo.group(1)
173
+ except OSError:
174
+ pass
175
+ return keywords
176
+
177
+
178
+ @register_vcs_handler("git", "keywords")
179
+ def git_versions_from_keywords(keywords, tag_prefix, verbose):
180
+ """Get version information from git keywords."""
181
+ if "refnames" not in keywords:
182
+ raise NotThisMethod("Short version file found")
183
+ date = keywords.get("date")
184
+ if date is not None:
185
+ # Use only the last line. Previous lines may contain GPG signature
186
+ # information.
187
+ date = date.splitlines()[-1]
188
+
189
+ # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
190
+ # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
191
+ # -like" string, which we must then edit to make compliant), because
192
+ # it's been around since git-1.5.3, and it's too difficult to
193
+ # discover which version we're using, or to work around using an
194
+ # older one.
195
+ date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
196
+ refnames = keywords["refnames"].strip()
197
+ if refnames.startswith("$Format"):
198
+ if verbose:
199
+ print("keywords are unexpanded, not using")
200
+ raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
201
+ refs = {r.strip() for r in refnames.strip("()").split(",")}
202
+ # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
203
+ # just "foo-1.0". If we see a "tag: " prefix, prefer those.
204
+ TAG = "tag: "
205
+ tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
206
+ if not tags:
207
+ # Either we're using git < 1.8.3, or there really are no tags. We use
208
+ # a heuristic: assume all version tags have a digit. The old git %d
209
+ # expansion behaves like git log --decorate=short and strips out the
210
+ # refs/heads/ and refs/tags/ prefixes that would let us distinguish
211
+ # between branches and tags. By ignoring refnames without digits, we
212
+ # filter out many common branch names like "release" and
213
+ # "stabilization", as well as "HEAD" and "master".
214
+ tags = {r for r in refs if re.search(r"\d", r)}
215
+ if verbose:
216
+ print(f"discarding '{','.join(refs - tags)}', no digits")
217
+ if verbose:
218
+ print(f"likely tags: {','.join(sorted(tags))}")
219
+ for ref in sorted(tags):
220
+ # sorting will prefer e.g. "2.0" over "2.0rc1"
221
+ if ref.startswith(tag_prefix):
222
+ r = ref[len(tag_prefix) :]
223
+ # Filter out refs that exactly match prefix or that don't start
224
+ # with a number once the prefix is stripped (mostly a concern
225
+ # when prefix is '')
226
+ if not re.match(r"\d", r):
227
+ continue
228
+ if verbose:
229
+ print(f"picking {r}")
230
+ return {
231
+ "version": r,
232
+ "full-revisionid": keywords["full"].strip(),
233
+ "dirty": False,
234
+ "error": None,
235
+ "date": date,
236
+ }
237
+ # no suitable tags, so version is "0+unknown", but full hex is still there
238
+ if verbose:
239
+ print("no suitable tags, using unknown + full revision id")
240
+ return {
241
+ "version": "0+unknown",
242
+ "full-revisionid": keywords["full"].strip(),
243
+ "dirty": False,
244
+ "error": "no suitable tags",
245
+ "date": None,
246
+ }
247
+
248
+
249
+ @register_vcs_handler("git", "pieces_from_vcs")
250
+ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
251
+ """Get version from 'git describe' in the root of the source tree.
252
+
253
+ This only gets called if the git-archive 'subst' keywords were *not*
254
+ expanded, and _version.py hasn't already been rewritten with a short
255
+ version string, meaning we're inside a checked out source tree.
256
+ """
257
+ GITS = ["git"]
258
+ if sys.platform == "win32":
259
+ GITS = ["git.cmd", "git.exe"]
260
+
261
+ # GIT_DIR can interfere with correct operation of Versioneer.
262
+ # It may be intended to be passed to the Versioneer-versioned project,
263
+ # but that should not change where we get our version from.
264
+ env = os.environ.copy()
265
+ env.pop("GIT_DIR", None)
266
+ runner = functools.partial(runner, env=env)
267
+
268
+ _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose)
269
+ if rc != 0:
270
+ if verbose:
271
+ print(f"Directory {root} not under git control")
272
+ raise NotThisMethod("'git rev-parse --git-dir' returned error")
273
+
274
+ # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
275
+ # if there isn't one, this yields HEX[-dirty] (no NUM)
276
+ describe_out, rc = runner(
277
+ GITS,
278
+ [
279
+ "describe",
280
+ "--tags",
281
+ "--dirty",
282
+ "--always",
283
+ "--long",
284
+ "--match",
285
+ f"{tag_prefix}[[:digit:]]*",
286
+ ],
287
+ cwd=root,
288
+ )
289
+ # --long was added in git-1.5.5
290
+ if describe_out is None:
291
+ raise NotThisMethod("'git describe' failed")
292
+ describe_out = describe_out.strip()
293
+ full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
294
+ if full_out is None:
295
+ raise NotThisMethod("'git rev-parse' failed")
296
+ full_out = full_out.strip()
297
+
298
+ pieces = {}
299
+ pieces["long"] = full_out
300
+ pieces["short"] = full_out[:7] # maybe improved later
301
+ pieces["error"] = None
302
+
303
+ branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root)
304
+ # --abbrev-ref was added in git-1.6.3
305
+ if rc != 0 or branch_name is None:
306
+ raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
307
+ branch_name = branch_name.strip()
308
+
309
+ if branch_name == "HEAD":
310
+ # If we aren't exactly on a branch, pick a branch which represents
311
+ # the current commit. If all else fails, we are on a branchless
312
+ # commit.
313
+ branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
314
+ # --contains was added in git-1.5.4
315
+ if rc != 0 or branches is None:
316
+ raise NotThisMethod("'git branch --contains' returned error")
317
+ branches = branches.split("\n")
318
+
319
+ # Remove the first line if we're running detached
320
+ if "(" in branches[0]:
321
+ branches.pop(0)
322
+
323
+ # Strip off the leading "* " from the list of branches.
324
+ branches = [branch[2:] for branch in branches]
325
+ if "master" in branches:
326
+ branch_name = "master"
327
+ elif not branches:
328
+ branch_name = None
329
+ else:
330
+ # Pick the first branch that is returned. Good or bad.
331
+ branch_name = branches[0]
332
+
333
+ pieces["branch"] = branch_name
334
+
335
+ # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
336
+ # TAG might have hyphens.
337
+ git_describe = describe_out
338
+
339
+ # look for -dirty suffix
340
+ dirty = git_describe.endswith("-dirty")
341
+ pieces["dirty"] = dirty
342
+ if dirty:
343
+ git_describe = git_describe[: git_describe.rindex("-dirty")]
344
+
345
+ # now we have TAG-NUM-gHEX or HEX
346
+
347
+ if "-" in git_describe:
348
+ # TAG-NUM-gHEX
349
+ mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
350
+ if not mo:
351
+ # unparsable. Maybe git-describe is misbehaving?
352
+ pieces["error"] = f"unable to parse git-describe output: '{describe_out}'"
353
+ return pieces
354
+
355
+ # tag
356
+ full_tag = mo.group(1)
357
+ if not full_tag.startswith(tag_prefix):
358
+ if verbose:
359
+ fmt = "tag '%s' doesn't start with prefix '%s'"
360
+ print(fmt % (full_tag, tag_prefix))
361
+ pieces[
362
+ "error"
363
+ ] = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'"
364
+ return pieces
365
+ pieces["closest-tag"] = full_tag[len(tag_prefix) :]
366
+
367
+ # distance: number of commits since tag
368
+ pieces["distance"] = int(mo.group(2))
369
+
370
+ # commit: short hex revision ID
371
+ pieces["short"] = mo.group(3)
372
+
373
+ else:
374
+ # HEX: no tags
375
+ pieces["closest-tag"] = None
376
+ out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
377
+ pieces["distance"] = len(out.split()) # total number of commits
378
+
379
+ # commit date: see ISO-8601 comment in git_versions_from_keywords()
380
+ date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
381
+ # Use only the last line. Previous lines may contain GPG signature
382
+ # information.
383
+ date = date.splitlines()[-1]
384
+ pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
385
+
386
+ return pieces
387
+
388
+
389
+ def plus_or_dot(pieces) -> str:
390
+ """Return a + if we don't already have one, else return a ."""
391
+ if "+" in pieces.get("closest-tag", ""):
392
+ return "."
393
+ return "+"
394
+
395
+
396
+ def render_pep440(pieces):
397
+ """Build up version string, with post-release "local version identifier".
398
+
399
+ Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
400
+ get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
401
+
402
+ Exceptions:
403
+ 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
404
+ """
405
+ if pieces["closest-tag"]:
406
+ rendered = pieces["closest-tag"]
407
+ if pieces["distance"] or pieces["dirty"]:
408
+ rendered += plus_or_dot(pieces)
409
+ rendered += f"{pieces['distance']}.g{pieces['short']}"
410
+ if pieces["dirty"]:
411
+ rendered += ".dirty"
412
+ else:
413
+ # exception #1
414
+ rendered = f"0+untagged.{pieces['distance']}.g{pieces['short']}"
415
+ if pieces["dirty"]:
416
+ rendered += ".dirty"
417
+ return rendered
418
+
419
+
420
+ def render_pep440_branch(pieces):
421
+ """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
422
+
423
+ The ".dev0" means not master branch. Note that .dev0 sorts backwards
424
+ (a feature branch will appear "older" than the master branch).
425
+
426
+ Exceptions:
427
+ 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
428
+ """
429
+ if pieces["closest-tag"]:
430
+ rendered = pieces["closest-tag"]
431
+ if pieces["distance"] or pieces["dirty"]:
432
+ if pieces["branch"] != "master":
433
+ rendered += ".dev0"
434
+ rendered += plus_or_dot(pieces)
435
+ rendered += f"{pieces['distance']}.g{pieces['short']}"
436
+ if pieces["dirty"]:
437
+ rendered += ".dirty"
438
+ else:
439
+ # exception #1
440
+ rendered = "0"
441
+ if pieces["branch"] != "master":
442
+ rendered += ".dev0"
443
+ rendered += f"+untagged.{pieces['distance']}.g{pieces['short']}"
444
+ if pieces["dirty"]:
445
+ rendered += ".dirty"
446
+ return rendered
447
+
448
+
449
+ def pep440_split_post(ver):
450
+ """Split pep440 version string at the post-release segment.
451
+
452
+ Returns the release segments before the post-release and the
453
+ post-release version number (or -1 if no post-release segment is present).
454
+ """
455
+ vc = str.split(ver, ".post")
456
+ return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
457
+
458
+
459
+ def render_pep440_pre(pieces):
460
+ """TAG[.postN.devDISTANCE] -- No -dirty.
461
+
462
+ Exceptions:
463
+ 1: no tags. 0.post0.devDISTANCE
464
+ """
465
+ if pieces["closest-tag"]:
466
+ if pieces["distance"]:
467
+ # update the post release segment
468
+ tag_version, post_version = pep440_split_post(pieces["closest-tag"])
469
+ rendered = tag_version
470
+ if post_version is not None:
471
+ rendered += f".post{post_version + 1}.dev{pieces['distance']}"
472
+ else:
473
+ rendered += f".post0.dev{pieces['distance']}"
474
+ else:
475
+ # no commits, use the tag as the version
476
+ rendered = pieces["closest-tag"]
477
+ else:
478
+ # exception #1
479
+ rendered = f"0.post0.dev{pieces['distance']}"
480
+ return rendered
481
+
482
+
483
+ def render_pep440_post(pieces):
484
+ """TAG[.postDISTANCE[.dev0]+gHEX] .
485
+
486
+ The ".dev0" means dirty. Note that .dev0 sorts backwards
487
+ (a dirty tree will appear "older" than the corresponding clean one),
488
+ but you shouldn't be releasing software with -dirty anyways.
489
+
490
+ Exceptions:
491
+ 1: no tags. 0.postDISTANCE[.dev0]
492
+ """
493
+ if pieces["closest-tag"]:
494
+ rendered = pieces["closest-tag"]
495
+ if pieces["distance"] or pieces["dirty"]:
496
+ rendered += f".post{pieces['distance']}"
497
+ if pieces["dirty"]:
498
+ rendered += ".dev0"
499
+ rendered += plus_or_dot(pieces)
500
+ rendered += f"g{pieces['short']}"
501
+ else:
502
+ # exception #1
503
+ rendered = f"0.post{pieces['distance']}"
504
+ if pieces["dirty"]:
505
+ rendered += ".dev0"
506
+ rendered += f"+g{pieces['short']}"
507
+ return rendered
508
+
509
+
510
+ def render_pep440_post_branch(pieces):
511
+ """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
512
+
513
+ The ".dev0" means not master branch.
514
+
515
+ Exceptions:
516
+ 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
517
+ """
518
+ if pieces["closest-tag"]:
519
+ rendered = pieces["closest-tag"]
520
+ if pieces["distance"] or pieces["dirty"]:
521
+ rendered += f".post{pieces['distance']}"
522
+ if pieces["branch"] != "master":
523
+ rendered += ".dev0"
524
+ rendered += plus_or_dot(pieces)
525
+ rendered += f"g{pieces['short']}"
526
+ if pieces["dirty"]:
527
+ rendered += ".dirty"
528
+ else:
529
+ # exception #1
530
+ rendered = f"0.post{pieces['distance']}"
531
+ if pieces["branch"] != "master":
532
+ rendered += ".dev0"
533
+ rendered += f"+g{pieces['short']}"
534
+ if pieces["dirty"]:
535
+ rendered += ".dirty"
536
+ return rendered
537
+
538
+
539
+ def render_pep440_old(pieces):
540
+ """TAG[.postDISTANCE[.dev0]] .
541
+
542
+ The ".dev0" means dirty.
543
+
544
+ Exceptions:
545
+ 1: no tags. 0.postDISTANCE[.dev0]
546
+ """
547
+ if pieces["closest-tag"]:
548
+ rendered = pieces["closest-tag"]
549
+ if pieces["distance"] or pieces["dirty"]:
550
+ rendered += f"0.post{pieces['distance']}"
551
+ if pieces["dirty"]:
552
+ rendered += ".dev0"
553
+ else:
554
+ # exception #1
555
+ rendered = f"0.post{pieces['distance']}"
556
+ if pieces["dirty"]:
557
+ rendered += ".dev0"
558
+ return rendered
559
+
560
+
561
+ def render_git_describe(pieces):
562
+ """TAG[-DISTANCE-gHEX][-dirty].
563
+
564
+ Like 'git describe --tags --dirty --always'.
565
+
566
+ Exceptions:
567
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
568
+ """
569
+ if pieces["closest-tag"]:
570
+ rendered = pieces["closest-tag"]
571
+ if pieces["distance"]:
572
+ rendered += f"-{pieces['distance']}-g{pieces['short']}"
573
+ else:
574
+ # exception #1
575
+ rendered = pieces["short"]
576
+ if pieces["dirty"]:
577
+ rendered += "-dirty"
578
+ return rendered
579
+
580
+
581
+ def render_git_describe_long(pieces):
582
+ """TAG-DISTANCE-gHEX[-dirty].
583
+
584
+ Like 'git describe --tags --dirty --always -long'.
585
+ The distance/hash is unconditional.
586
+
587
+ Exceptions:
588
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
589
+ """
590
+ if pieces["closest-tag"]:
591
+ rendered = pieces["closest-tag"]
592
+ rendered += f"-{pieces['distance']}-g{pieces['short']}"
593
+ else:
594
+ # exception #1
595
+ rendered = pieces["short"]
596
+ if pieces["dirty"]:
597
+ rendered += "-dirty"
598
+ return rendered
599
+
600
+
601
+ def render(pieces, style):
602
+ """Render the given version pieces into the requested style."""
603
+ if pieces["error"]:
604
+ return {
605
+ "version": "unknown",
606
+ "full-revisionid": pieces.get("long"),
607
+ "dirty": None,
608
+ "error": pieces["error"],
609
+ "date": None,
610
+ }
611
+
612
+ if not style or style == "default":
613
+ style = "pep440" # the default
614
+
615
+ if style == "pep440":
616
+ rendered = render_pep440(pieces)
617
+ elif style == "pep440-branch":
618
+ rendered = render_pep440_branch(pieces)
619
+ elif style == "pep440-pre":
620
+ rendered = render_pep440_pre(pieces)
621
+ elif style == "pep440-post":
622
+ rendered = render_pep440_post(pieces)
623
+ elif style == "pep440-post-branch":
624
+ rendered = render_pep440_post_branch(pieces)
625
+ elif style == "pep440-old":
626
+ rendered = render_pep440_old(pieces)
627
+ elif style == "git-describe":
628
+ rendered = render_git_describe(pieces)
629
+ elif style == "git-describe-long":
630
+ rendered = render_git_describe_long(pieces)
631
+ else:
632
+ raise ValueError(f"unknown style '{style}'")
633
+
634
+ return {
635
+ "version": rendered,
636
+ "full-revisionid": pieces["long"],
637
+ "dirty": pieces["dirty"],
638
+ "error": None,
639
+ "date": pieces.get("date"),
640
+ }
641
+
642
+
643
+ def get_versions():
644
+ """Get version information or return default if unable to do so."""
645
+ # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
646
+ # __file__, we can work backwards from there to the root. Some
647
+ # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
648
+ # case we can only use expanded keywords.
649
+
650
+ cfg = get_config()
651
+ verbose = cfg.verbose
652
+
653
+ try:
654
+ return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
655
+ except NotThisMethod:
656
+ pass
657
+
658
+ try:
659
+ root = os.path.realpath(__file__)
660
+ # versionfile_source is the relative path from the top of the source
661
+ # tree (where the .git directory might live) to this file. Invert
662
+ # this to find the root from __file__.
663
+ for _ in cfg.versionfile_source.split("/"):
664
+ root = os.path.dirname(root)
665
+ except NameError:
666
+ return {
667
+ "version": "0+unknown",
668
+ "full-revisionid": None,
669
+ "dirty": None,
670
+ "error": "unable to find root of source tree",
671
+ "date": None,
672
+ }
673
+
674
+ try:
675
+ pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
676
+ return render(pieces, cfg.style)
677
+ except NotThisMethod:
678
+ pass
679
+
680
+ try:
681
+ if cfg.parentdir_prefix:
682
+ return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
683
+ except NotThisMethod:
684
+ pass
685
+
686
+ return {
687
+ "version": "0+unknown",
688
+ "full-revisionid": None,
689
+ "dirty": None,
690
+ "error": "unable to compute version",
691
+ "date": None,
692
+ }
venv/lib/python3.10/site-packages/pandas/_version_meson.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ __version__="2.2.2"
2
+ __git_version__="d9cdd2ee5a58015ef6f4d15c7226110c9aab8140"
venv/lib/python3.10/site-packages/pandas/api/__init__.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ public toolkit API """
2
+ from pandas.api import (
3
+ extensions,
4
+ indexers,
5
+ interchange,
6
+ types,
7
+ typing,
8
+ )
9
+
10
+ __all__ = [
11
+ "interchange",
12
+ "extensions",
13
+ "indexers",
14
+ "types",
15
+ "typing",
16
+ ]
venv/lib/python3.10/site-packages/pandas/api/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (380 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/api/extensions/__init__.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public API for extending pandas objects.
3
+ """
4
+
5
+ from pandas._libs.lib import no_default
6
+
7
+ from pandas.core.dtypes.base import (
8
+ ExtensionDtype,
9
+ register_extension_dtype,
10
+ )
11
+
12
+ from pandas.core.accessor import (
13
+ register_dataframe_accessor,
14
+ register_index_accessor,
15
+ register_series_accessor,
16
+ )
17
+ from pandas.core.algorithms import take
18
+ from pandas.core.arrays import (
19
+ ExtensionArray,
20
+ ExtensionScalarOpsMixin,
21
+ )
22
+
23
+ __all__ = [
24
+ "no_default",
25
+ "ExtensionDtype",
26
+ "register_extension_dtype",
27
+ "register_dataframe_accessor",
28
+ "register_index_accessor",
29
+ "register_series_accessor",
30
+ "take",
31
+ "ExtensionArray",
32
+ "ExtensionScalarOpsMixin",
33
+ ]
venv/lib/python3.10/site-packages/pandas/api/extensions/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (745 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/api/indexers/__init__.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public API for Rolling Window Indexers.
3
+ """
4
+
5
+ from pandas.core.indexers import check_array_indexer
6
+ from pandas.core.indexers.objects import (
7
+ BaseIndexer,
8
+ FixedForwardWindowIndexer,
9
+ VariableOffsetWindowIndexer,
10
+ )
11
+
12
+ __all__ = [
13
+ "check_array_indexer",
14
+ "BaseIndexer",
15
+ "FixedForwardWindowIndexer",
16
+ "VariableOffsetWindowIndexer",
17
+ ]
venv/lib/python3.10/site-packages/pandas/api/indexers/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (488 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/api/interchange/__init__.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public API for DataFrame interchange protocol.
3
+ """
4
+
5
+ from pandas.core.interchange.dataframe_protocol import DataFrame
6
+ from pandas.core.interchange.from_dataframe import from_dataframe
7
+
8
+ __all__ = ["from_dataframe", "DataFrame"]
venv/lib/python3.10/site-packages/pandas/api/interchange/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (437 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/api/types/__init__.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public toolkit API.
3
+ """
4
+
5
+ from pandas._libs.lib import infer_dtype
6
+
7
+ from pandas.core.dtypes.api import * # noqa: F403
8
+ from pandas.core.dtypes.concat import union_categoricals
9
+ from pandas.core.dtypes.dtypes import (
10
+ CategoricalDtype,
11
+ DatetimeTZDtype,
12
+ IntervalDtype,
13
+ PeriodDtype,
14
+ )
15
+
16
+ __all__ = [
17
+ "infer_dtype",
18
+ "union_categoricals",
19
+ "CategoricalDtype",
20
+ "DatetimeTZDtype",
21
+ "IntervalDtype",
22
+ "PeriodDtype",
23
+ ]
venv/lib/python3.10/site-packages/pandas/api/types/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (570 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/api/typing/__init__.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public API classes that store intermediate results useful for type-hinting.
3
+ """
4
+
5
+ from pandas._libs import NaTType
6
+ from pandas._libs.missing import NAType
7
+
8
+ from pandas.core.groupby import (
9
+ DataFrameGroupBy,
10
+ SeriesGroupBy,
11
+ )
12
+ from pandas.core.resample import (
13
+ DatetimeIndexResamplerGroupby,
14
+ PeriodIndexResamplerGroupby,
15
+ Resampler,
16
+ TimedeltaIndexResamplerGroupby,
17
+ TimeGrouper,
18
+ )
19
+ from pandas.core.window import (
20
+ Expanding,
21
+ ExpandingGroupby,
22
+ ExponentialMovingWindow,
23
+ ExponentialMovingWindowGroupby,
24
+ Rolling,
25
+ RollingGroupby,
26
+ Window,
27
+ )
28
+
29
+ # TODO: Can't import Styler without importing jinja2
30
+ # from pandas.io.formats.style import Styler
31
+ from pandas.io.json._json import JsonReader
32
+ from pandas.io.stata import StataReader
33
+
34
+ __all__ = [
35
+ "DataFrameGroupBy",
36
+ "DatetimeIndexResamplerGroupby",
37
+ "Expanding",
38
+ "ExpandingGroupby",
39
+ "ExponentialMovingWindow",
40
+ "ExponentialMovingWindowGroupby",
41
+ "JsonReader",
42
+ "NaTType",
43
+ "NAType",
44
+ "PeriodIndexResamplerGroupby",
45
+ "Resampler",
46
+ "Rolling",
47
+ "RollingGroupby",
48
+ "SeriesGroupBy",
49
+ "StataReader",
50
+ # See TODO above
51
+ # "Styler",
52
+ "TimedeltaIndexResamplerGroupby",
53
+ "TimeGrouper",
54
+ "Window",
55
+ ]
venv/lib/python3.10/site-packages/pandas/api/typing/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.08 kB). View file
 
venv/lib/python3.10/site-packages/pandas/conftest.py ADDED
@@ -0,0 +1,1965 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This file is very long and growing, but it was decided to not split it yet, as
3
+ it's still manageable (2020-03-17, ~1.1k LoC). See gh-31989
4
+
5
+ Instead of splitting it was decided to define sections here:
6
+ - Configuration / Settings
7
+ - Autouse fixtures
8
+ - Common arguments
9
+ - Missing values & co.
10
+ - Classes
11
+ - Indices
12
+ - Series'
13
+ - DataFrames
14
+ - Operators & Operations
15
+ - Data sets/files
16
+ - Time zones
17
+ - Dtypes
18
+ - Misc
19
+ """
20
+ from __future__ import annotations
21
+
22
+ from collections import abc
23
+ from datetime import (
24
+ date,
25
+ datetime,
26
+ time,
27
+ timedelta,
28
+ timezone,
29
+ )
30
+ from decimal import Decimal
31
+ import operator
32
+ import os
33
+ from typing import (
34
+ TYPE_CHECKING,
35
+ Callable,
36
+ )
37
+
38
+ from dateutil.tz import (
39
+ tzlocal,
40
+ tzutc,
41
+ )
42
+ import hypothesis
43
+ from hypothesis import strategies as st
44
+ import numpy as np
45
+ import pytest
46
+ from pytz import (
47
+ FixedOffset,
48
+ utc,
49
+ )
50
+
51
+ from pandas._config.config import _get_option
52
+
53
+ import pandas.util._test_decorators as td
54
+
55
+ from pandas.core.dtypes.dtypes import (
56
+ DatetimeTZDtype,
57
+ IntervalDtype,
58
+ )
59
+
60
+ import pandas as pd
61
+ from pandas import (
62
+ CategoricalIndex,
63
+ DataFrame,
64
+ Interval,
65
+ IntervalIndex,
66
+ Period,
67
+ RangeIndex,
68
+ Series,
69
+ Timedelta,
70
+ Timestamp,
71
+ date_range,
72
+ period_range,
73
+ timedelta_range,
74
+ )
75
+ import pandas._testing as tm
76
+ from pandas.core import ops
77
+ from pandas.core.indexes.api import (
78
+ Index,
79
+ MultiIndex,
80
+ )
81
+ from pandas.util.version import Version
82
+
83
+ if TYPE_CHECKING:
84
+ from collections.abc import (
85
+ Hashable,
86
+ Iterator,
87
+ )
88
+
89
+ try:
90
+ import pyarrow as pa
91
+ except ImportError:
92
+ has_pyarrow = False
93
+ else:
94
+ del pa
95
+ has_pyarrow = True
96
+
97
+ import zoneinfo
98
+
99
+ try:
100
+ zoneinfo.ZoneInfo("UTC")
101
+ except zoneinfo.ZoneInfoNotFoundError:
102
+ zoneinfo = None # type: ignore[assignment]
103
+
104
+
105
+ # ----------------------------------------------------------------
106
+ # Configuration / Settings
107
+ # ----------------------------------------------------------------
108
+ # pytest
109
+
110
+
111
+ def pytest_addoption(parser) -> None:
112
+ parser.addoption(
113
+ "--no-strict-data-files",
114
+ action="store_false",
115
+ help="Don't fail if a test is skipped for missing data file.",
116
+ )
117
+
118
+
119
+ def ignore_doctest_warning(item: pytest.Item, path: str, message: str) -> None:
120
+ """Ignore doctest warning.
121
+
122
+ Parameters
123
+ ----------
124
+ item : pytest.Item
125
+ pytest test item.
126
+ path : str
127
+ Module path to Python object, e.g. "pandas.core.frame.DataFrame.append". A
128
+ warning will be filtered when item.name ends with in given path. So it is
129
+ sufficient to specify e.g. "DataFrame.append".
130
+ message : str
131
+ Message to be filtered.
132
+ """
133
+ if item.name.endswith(path):
134
+ item.add_marker(pytest.mark.filterwarnings(f"ignore:{message}"))
135
+
136
+
137
+ def pytest_collection_modifyitems(items, config) -> None:
138
+ is_doctest = config.getoption("--doctest-modules") or config.getoption(
139
+ "--doctest-cython", default=False
140
+ )
141
+
142
+ # Warnings from doctests that can be ignored; place reason in comment above.
143
+ # Each entry specifies (path, message) - see the ignore_doctest_warning function
144
+ ignored_doctest_warnings = [
145
+ ("is_int64_dtype", "is_int64_dtype is deprecated"),
146
+ ("is_interval_dtype", "is_interval_dtype is deprecated"),
147
+ ("is_period_dtype", "is_period_dtype is deprecated"),
148
+ ("is_datetime64tz_dtype", "is_datetime64tz_dtype is deprecated"),
149
+ ("is_categorical_dtype", "is_categorical_dtype is deprecated"),
150
+ ("is_sparse", "is_sparse is deprecated"),
151
+ ("DataFrameGroupBy.fillna", "DataFrameGroupBy.fillna is deprecated"),
152
+ ("NDFrame.replace", "The 'method' keyword"),
153
+ ("NDFrame.replace", "Series.replace without 'value'"),
154
+ ("NDFrame.clip", "Downcasting behavior in Series and DataFrame methods"),
155
+ ("Series.idxmin", "The behavior of Series.idxmin"),
156
+ ("Series.idxmax", "The behavior of Series.idxmax"),
157
+ ("SeriesGroupBy.fillna", "SeriesGroupBy.fillna is deprecated"),
158
+ ("SeriesGroupBy.idxmin", "The behavior of Series.idxmin"),
159
+ ("SeriesGroupBy.idxmax", "The behavior of Series.idxmax"),
160
+ # Docstring divides by zero to show behavior difference
161
+ ("missing.mask_zero_div_zero", "divide by zero encountered"),
162
+ (
163
+ "to_pydatetime",
164
+ "The behavior of DatetimeProperties.to_pydatetime is deprecated",
165
+ ),
166
+ (
167
+ "pandas.core.generic.NDFrame.bool",
168
+ "(Series|DataFrame).bool is now deprecated and will be removed "
169
+ "in future version of pandas",
170
+ ),
171
+ (
172
+ "pandas.core.generic.NDFrame.first",
173
+ "first is deprecated and will be removed in a future version. "
174
+ "Please create a mask and filter using `.loc` instead",
175
+ ),
176
+ (
177
+ "Resampler.fillna",
178
+ "DatetimeIndexResampler.fillna is deprecated",
179
+ ),
180
+ (
181
+ "DataFrameGroupBy.fillna",
182
+ "DataFrameGroupBy.fillna with 'method' is deprecated",
183
+ ),
184
+ (
185
+ "DataFrameGroupBy.fillna",
186
+ "DataFrame.fillna with 'method' is deprecated",
187
+ ),
188
+ ("read_parquet", "Passing a BlockManager to DataFrame is deprecated"),
189
+ ]
190
+
191
+ if is_doctest:
192
+ for item in items:
193
+ for path, message in ignored_doctest_warnings:
194
+ ignore_doctest_warning(item, path, message)
195
+
196
+
197
+ hypothesis_health_checks = [hypothesis.HealthCheck.too_slow]
198
+ if Version(hypothesis.__version__) >= Version("6.83.2"):
199
+ hypothesis_health_checks.append(hypothesis.HealthCheck.differing_executors)
200
+
201
+ # Hypothesis
202
+ hypothesis.settings.register_profile(
203
+ "ci",
204
+ # Hypothesis timing checks are tuned for scalars by default, so we bump
205
+ # them from 200ms to 500ms per test case as the global default. If this
206
+ # is too short for a specific test, (a) try to make it faster, and (b)
207
+ # if it really is slow add `@settings(deadline=...)` with a working value,
208
+ # or `deadline=None` to entirely disable timeouts for that test.
209
+ # 2022-02-09: Changed deadline from 500 -> None. Deadline leads to
210
+ # non-actionable, flaky CI failures (# GH 24641, 44969, 45118, 44969)
211
+ deadline=None,
212
+ suppress_health_check=tuple(hypothesis_health_checks),
213
+ )
214
+ hypothesis.settings.load_profile("ci")
215
+
216
+ # Registering these strategies makes them globally available via st.from_type,
217
+ # which is use for offsets in tests/tseries/offsets/test_offsets_properties.py
218
+ for name in "MonthBegin MonthEnd BMonthBegin BMonthEnd".split():
219
+ cls = getattr(pd.tseries.offsets, name)
220
+ st.register_type_strategy(
221
+ cls, st.builds(cls, n=st.integers(-99, 99), normalize=st.booleans())
222
+ )
223
+
224
+ for name in "YearBegin YearEnd BYearBegin BYearEnd".split():
225
+ cls = getattr(pd.tseries.offsets, name)
226
+ st.register_type_strategy(
227
+ cls,
228
+ st.builds(
229
+ cls,
230
+ n=st.integers(-5, 5),
231
+ normalize=st.booleans(),
232
+ month=st.integers(min_value=1, max_value=12),
233
+ ),
234
+ )
235
+
236
+ for name in "QuarterBegin QuarterEnd BQuarterBegin BQuarterEnd".split():
237
+ cls = getattr(pd.tseries.offsets, name)
238
+ st.register_type_strategy(
239
+ cls,
240
+ st.builds(
241
+ cls,
242
+ n=st.integers(-24, 24),
243
+ normalize=st.booleans(),
244
+ startingMonth=st.integers(min_value=1, max_value=12),
245
+ ),
246
+ )
247
+
248
+
249
+ # ----------------------------------------------------------------
250
+ # Autouse fixtures
251
+ # ----------------------------------------------------------------
252
+
253
+
254
+ # https://github.com/pytest-dev/pytest/issues/11873
255
+ # Would like to avoid autouse=True, but cannot as of pytest 8.0.0
256
+ @pytest.fixture(autouse=True)
257
+ def add_doctest_imports(doctest_namespace) -> None:
258
+ """
259
+ Make `np` and `pd` names available for doctests.
260
+ """
261
+ doctest_namespace["np"] = np
262
+ doctest_namespace["pd"] = pd
263
+
264
+
265
+ @pytest.fixture(autouse=True)
266
+ def configure_tests() -> None:
267
+ """
268
+ Configure settings for all tests and test modules.
269
+ """
270
+ pd.set_option("chained_assignment", "raise")
271
+
272
+
273
+ # ----------------------------------------------------------------
274
+ # Common arguments
275
+ # ----------------------------------------------------------------
276
+ @pytest.fixture(params=[0, 1, "index", "columns"], ids=lambda x: f"axis={repr(x)}")
277
+ def axis(request):
278
+ """
279
+ Fixture for returning the axis numbers of a DataFrame.
280
+ """
281
+ return request.param
282
+
283
+
284
+ axis_frame = axis
285
+
286
+
287
+ @pytest.fixture(params=[1, "columns"], ids=lambda x: f"axis={repr(x)}")
288
+ def axis_1(request):
289
+ """
290
+ Fixture for returning aliases of axis 1 of a DataFrame.
291
+ """
292
+ return request.param
293
+
294
+
295
+ @pytest.fixture(params=[True, False, None])
296
+ def observed(request):
297
+ """
298
+ Pass in the observed keyword to groupby for [True, False]
299
+ This indicates whether categoricals should return values for
300
+ values which are not in the grouper [False / None], or only values which
301
+ appear in the grouper [True]. [None] is supported for future compatibility
302
+ if we decide to change the default (and would need to warn if this
303
+ parameter is not passed).
304
+ """
305
+ return request.param
306
+
307
+
308
+ @pytest.fixture(params=[True, False, None])
309
+ def ordered(request):
310
+ """
311
+ Boolean 'ordered' parameter for Categorical.
312
+ """
313
+ return request.param
314
+
315
+
316
+ @pytest.fixture(params=[True, False])
317
+ def skipna(request):
318
+ """
319
+ Boolean 'skipna' parameter.
320
+ """
321
+ return request.param
322
+
323
+
324
+ @pytest.fixture(params=["first", "last", False])
325
+ def keep(request):
326
+ """
327
+ Valid values for the 'keep' parameter used in
328
+ .duplicated or .drop_duplicates
329
+ """
330
+ return request.param
331
+
332
+
333
+ @pytest.fixture(params=["both", "neither", "left", "right"])
334
+ def inclusive_endpoints_fixture(request):
335
+ """
336
+ Fixture for trying all interval 'inclusive' parameters.
337
+ """
338
+ return request.param
339
+
340
+
341
+ @pytest.fixture(params=["left", "right", "both", "neither"])
342
+ def closed(request):
343
+ """
344
+ Fixture for trying all interval closed parameters.
345
+ """
346
+ return request.param
347
+
348
+
349
+ @pytest.fixture(params=["left", "right", "both", "neither"])
350
+ def other_closed(request):
351
+ """
352
+ Secondary closed fixture to allow parametrizing over all pairs of closed.
353
+ """
354
+ return request.param
355
+
356
+
357
+ @pytest.fixture(
358
+ params=[
359
+ None,
360
+ "gzip",
361
+ "bz2",
362
+ "zip",
363
+ "xz",
364
+ "tar",
365
+ pytest.param("zstd", marks=td.skip_if_no("zstandard")),
366
+ ]
367
+ )
368
+ def compression(request):
369
+ """
370
+ Fixture for trying common compression types in compression tests.
371
+ """
372
+ return request.param
373
+
374
+
375
+ @pytest.fixture(
376
+ params=[
377
+ "gzip",
378
+ "bz2",
379
+ "zip",
380
+ "xz",
381
+ "tar",
382
+ pytest.param("zstd", marks=td.skip_if_no("zstandard")),
383
+ ]
384
+ )
385
+ def compression_only(request):
386
+ """
387
+ Fixture for trying common compression types in compression tests excluding
388
+ uncompressed case.
389
+ """
390
+ return request.param
391
+
392
+
393
+ @pytest.fixture(params=[True, False])
394
+ def writable(request):
395
+ """
396
+ Fixture that an array is writable.
397
+ """
398
+ return request.param
399
+
400
+
401
+ @pytest.fixture(params=["inner", "outer", "left", "right"])
402
+ def join_type(request):
403
+ """
404
+ Fixture for trying all types of join operations.
405
+ """
406
+ return request.param
407
+
408
+
409
+ @pytest.fixture(params=["nlargest", "nsmallest"])
410
+ def nselect_method(request):
411
+ """
412
+ Fixture for trying all nselect methods.
413
+ """
414
+ return request.param
415
+
416
+
417
+ # ----------------------------------------------------------------
418
+ # Missing values & co.
419
+ # ----------------------------------------------------------------
420
+ @pytest.fixture(params=tm.NULL_OBJECTS, ids=lambda x: type(x).__name__)
421
+ def nulls_fixture(request):
422
+ """
423
+ Fixture for each null type in pandas.
424
+ """
425
+ return request.param
426
+
427
+
428
+ nulls_fixture2 = nulls_fixture # Generate cartesian product of nulls_fixture
429
+
430
+
431
+ @pytest.fixture(params=[None, np.nan, pd.NaT])
432
+ def unique_nulls_fixture(request):
433
+ """
434
+ Fixture for each null type in pandas, each null type exactly once.
435
+ """
436
+ return request.param
437
+
438
+
439
+ # Generate cartesian product of unique_nulls_fixture:
440
+ unique_nulls_fixture2 = unique_nulls_fixture
441
+
442
+
443
+ @pytest.fixture(params=tm.NP_NAT_OBJECTS, ids=lambda x: type(x).__name__)
444
+ def np_nat_fixture(request):
445
+ """
446
+ Fixture for each NaT type in numpy.
447
+ """
448
+ return request.param
449
+
450
+
451
+ # Generate cartesian product of np_nat_fixture:
452
+ np_nat_fixture2 = np_nat_fixture
453
+
454
+
455
+ # ----------------------------------------------------------------
456
+ # Classes
457
+ # ----------------------------------------------------------------
458
+
459
+
460
+ @pytest.fixture(params=[DataFrame, Series])
461
+ def frame_or_series(request):
462
+ """
463
+ Fixture to parametrize over DataFrame and Series.
464
+ """
465
+ return request.param
466
+
467
+
468
+ @pytest.fixture(params=[Index, Series], ids=["index", "series"])
469
+ def index_or_series(request):
470
+ """
471
+ Fixture to parametrize over Index and Series, made necessary by a mypy
472
+ bug, giving an error:
473
+
474
+ List item 0 has incompatible type "Type[Series]"; expected "Type[PandasObject]"
475
+
476
+ See GH#29725
477
+ """
478
+ return request.param
479
+
480
+
481
+ # Generate cartesian product of index_or_series fixture:
482
+ index_or_series2 = index_or_series
483
+
484
+
485
+ @pytest.fixture(params=[Index, Series, pd.array], ids=["index", "series", "array"])
486
+ def index_or_series_or_array(request):
487
+ """
488
+ Fixture to parametrize over Index, Series, and ExtensionArray
489
+ """
490
+ return request.param
491
+
492
+
493
+ @pytest.fixture(params=[Index, Series, DataFrame, pd.array], ids=lambda x: x.__name__)
494
+ def box_with_array(request):
495
+ """
496
+ Fixture to test behavior for Index, Series, DataFrame, and pandas Array
497
+ classes
498
+ """
499
+ return request.param
500
+
501
+
502
+ box_with_array2 = box_with_array
503
+
504
+
505
+ @pytest.fixture
506
+ def dict_subclass() -> type[dict]:
507
+ """
508
+ Fixture for a dictionary subclass.
509
+ """
510
+
511
+ class TestSubDict(dict):
512
+ def __init__(self, *args, **kwargs) -> None:
513
+ dict.__init__(self, *args, **kwargs)
514
+
515
+ return TestSubDict
516
+
517
+
518
+ @pytest.fixture
519
+ def non_dict_mapping_subclass() -> type[abc.Mapping]:
520
+ """
521
+ Fixture for a non-mapping dictionary subclass.
522
+ """
523
+
524
+ class TestNonDictMapping(abc.Mapping):
525
+ def __init__(self, underlying_dict) -> None:
526
+ self._data = underlying_dict
527
+
528
+ def __getitem__(self, key):
529
+ return self._data.__getitem__(key)
530
+
531
+ def __iter__(self) -> Iterator:
532
+ return self._data.__iter__()
533
+
534
+ def __len__(self) -> int:
535
+ return self._data.__len__()
536
+
537
+ return TestNonDictMapping
538
+
539
+
540
+ # ----------------------------------------------------------------
541
+ # Indices
542
+ # ----------------------------------------------------------------
543
+ @pytest.fixture
544
+ def multiindex_year_month_day_dataframe_random_data():
545
+ """
546
+ DataFrame with 3 level MultiIndex (year, month, day) covering
547
+ first 100 business days from 2000-01-01 with random data
548
+ """
549
+ tdf = DataFrame(
550
+ np.random.default_rng(2).standard_normal((100, 4)),
551
+ columns=Index(list("ABCD"), dtype=object),
552
+ index=date_range("2000-01-01", periods=100, freq="B"),
553
+ )
554
+ ymd = tdf.groupby([lambda x: x.year, lambda x: x.month, lambda x: x.day]).sum()
555
+ # use int64 Index, to make sure things work
556
+ ymd.index = ymd.index.set_levels([lev.astype("i8") for lev in ymd.index.levels])
557
+ ymd.index.set_names(["year", "month", "day"], inplace=True)
558
+ return ymd
559
+
560
+
561
+ @pytest.fixture
562
+ def lexsorted_two_level_string_multiindex() -> MultiIndex:
563
+ """
564
+ 2-level MultiIndex, lexsorted, with string names.
565
+ """
566
+ return MultiIndex(
567
+ levels=[["foo", "bar", "baz", "qux"], ["one", "two", "three"]],
568
+ codes=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3], [0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
569
+ names=["first", "second"],
570
+ )
571
+
572
+
573
+ @pytest.fixture
574
+ def multiindex_dataframe_random_data(
575
+ lexsorted_two_level_string_multiindex,
576
+ ) -> DataFrame:
577
+ """DataFrame with 2 level MultiIndex with random data"""
578
+ index = lexsorted_two_level_string_multiindex
579
+ return DataFrame(
580
+ np.random.default_rng(2).standard_normal((10, 3)),
581
+ index=index,
582
+ columns=Index(["A", "B", "C"], name="exp"),
583
+ )
584
+
585
+
586
+ def _create_multiindex():
587
+ """
588
+ MultiIndex used to test the general functionality of this object
589
+ """
590
+
591
+ # See Also: tests.multi.conftest.idx
592
+ major_axis = Index(["foo", "bar", "baz", "qux"])
593
+ minor_axis = Index(["one", "two"])
594
+
595
+ major_codes = np.array([0, 0, 1, 2, 3, 3])
596
+ minor_codes = np.array([0, 1, 0, 1, 0, 1])
597
+ index_names = ["first", "second"]
598
+ return MultiIndex(
599
+ levels=[major_axis, minor_axis],
600
+ codes=[major_codes, minor_codes],
601
+ names=index_names,
602
+ verify_integrity=False,
603
+ )
604
+
605
+
606
+ def _create_mi_with_dt64tz_level():
607
+ """
608
+ MultiIndex with a level that is a tzaware DatetimeIndex.
609
+ """
610
+ # GH#8367 round trip with pickle
611
+ return MultiIndex.from_product(
612
+ [[1, 2], ["a", "b"], date_range("20130101", periods=3, tz="US/Eastern")],
613
+ names=["one", "two", "three"],
614
+ )
615
+
616
+
617
+ indices_dict = {
618
+ "string": Index([f"pandas_{i}" for i in range(100)]),
619
+ "datetime": date_range("2020-01-01", periods=100),
620
+ "datetime-tz": date_range("2020-01-01", periods=100, tz="US/Pacific"),
621
+ "period": period_range("2020-01-01", periods=100, freq="D"),
622
+ "timedelta": timedelta_range(start="1 day", periods=100, freq="D"),
623
+ "range": RangeIndex(100),
624
+ "int8": Index(np.arange(100), dtype="int8"),
625
+ "int16": Index(np.arange(100), dtype="int16"),
626
+ "int32": Index(np.arange(100), dtype="int32"),
627
+ "int64": Index(np.arange(100), dtype="int64"),
628
+ "uint8": Index(np.arange(100), dtype="uint8"),
629
+ "uint16": Index(np.arange(100), dtype="uint16"),
630
+ "uint32": Index(np.arange(100), dtype="uint32"),
631
+ "uint64": Index(np.arange(100), dtype="uint64"),
632
+ "float32": Index(np.arange(100), dtype="float32"),
633
+ "float64": Index(np.arange(100), dtype="float64"),
634
+ "bool-object": Index([True, False] * 5, dtype=object),
635
+ "bool-dtype": Index([True, False] * 5, dtype=bool),
636
+ "complex64": Index(
637
+ np.arange(100, dtype="complex64") + 1.0j * np.arange(100, dtype="complex64")
638
+ ),
639
+ "complex128": Index(
640
+ np.arange(100, dtype="complex128") + 1.0j * np.arange(100, dtype="complex128")
641
+ ),
642
+ "categorical": CategoricalIndex(list("abcd") * 25),
643
+ "interval": IntervalIndex.from_breaks(np.linspace(0, 100, num=101)),
644
+ "empty": Index([]),
645
+ "tuples": MultiIndex.from_tuples(zip(["foo", "bar", "baz"], [1, 2, 3])),
646
+ "mi-with-dt64tz-level": _create_mi_with_dt64tz_level(),
647
+ "multi": _create_multiindex(),
648
+ "repeats": Index([0, 0, 1, 1, 2, 2]),
649
+ "nullable_int": Index(np.arange(100), dtype="Int64"),
650
+ "nullable_uint": Index(np.arange(100), dtype="UInt16"),
651
+ "nullable_float": Index(np.arange(100), dtype="Float32"),
652
+ "nullable_bool": Index(np.arange(100).astype(bool), dtype="boolean"),
653
+ "string-python": Index(
654
+ pd.array([f"pandas_{i}" for i in range(100)], dtype="string[python]")
655
+ ),
656
+ }
657
+ if has_pyarrow:
658
+ idx = Index(pd.array([f"pandas_{i}" for i in range(100)], dtype="string[pyarrow]"))
659
+ indices_dict["string-pyarrow"] = idx
660
+
661
+
662
+ @pytest.fixture(params=indices_dict.keys())
663
+ def index(request):
664
+ """
665
+ Fixture for many "simple" kinds of indices.
666
+
667
+ These indices are unlikely to cover corner cases, e.g.
668
+ - no names
669
+ - no NaTs/NaNs
670
+ - no values near implementation bounds
671
+ - ...
672
+ """
673
+ # copy to avoid mutation, e.g. setting .name
674
+ return indices_dict[request.param].copy()
675
+
676
+
677
+ # Needed to generate cartesian product of indices
678
+ index_fixture2 = index
679
+
680
+
681
+ @pytest.fixture(
682
+ params=[
683
+ key for key, value in indices_dict.items() if not isinstance(value, MultiIndex)
684
+ ]
685
+ )
686
+ def index_flat(request):
687
+ """
688
+ index fixture, but excluding MultiIndex cases.
689
+ """
690
+ key = request.param
691
+ return indices_dict[key].copy()
692
+
693
+
694
+ # Alias so we can test with cartesian product of index_flat
695
+ index_flat2 = index_flat
696
+
697
+
698
+ @pytest.fixture(
699
+ params=[
700
+ key
701
+ for key, value in indices_dict.items()
702
+ if not (
703
+ key.startswith(("int", "uint", "float"))
704
+ or key in ["range", "empty", "repeats", "bool-dtype"]
705
+ )
706
+ and not isinstance(value, MultiIndex)
707
+ ]
708
+ )
709
+ def index_with_missing(request):
710
+ """
711
+ Fixture for indices with missing values.
712
+
713
+ Integer-dtype and empty cases are excluded because they cannot hold missing
714
+ values.
715
+
716
+ MultiIndex is excluded because isna() is not defined for MultiIndex.
717
+ """
718
+
719
+ # GH 35538. Use deep copy to avoid illusive bug on np-dev
720
+ # GHA pipeline that writes into indices_dict despite copy
721
+ ind = indices_dict[request.param].copy(deep=True)
722
+ vals = ind.values.copy()
723
+ if request.param in ["tuples", "mi-with-dt64tz-level", "multi"]:
724
+ # For setting missing values in the top level of MultiIndex
725
+ vals = ind.tolist()
726
+ vals[0] = (None,) + vals[0][1:]
727
+ vals[-1] = (None,) + vals[-1][1:]
728
+ return MultiIndex.from_tuples(vals)
729
+ else:
730
+ vals[0] = None
731
+ vals[-1] = None
732
+ return type(ind)(vals)
733
+
734
+
735
+ # ----------------------------------------------------------------
736
+ # Series'
737
+ # ----------------------------------------------------------------
738
+ @pytest.fixture
739
+ def string_series() -> Series:
740
+ """
741
+ Fixture for Series of floats with Index of unique strings
742
+ """
743
+ return Series(
744
+ np.arange(30, dtype=np.float64) * 1.1,
745
+ index=Index([f"i_{i}" for i in range(30)], dtype=object),
746
+ name="series",
747
+ )
748
+
749
+
750
+ @pytest.fixture
751
+ def object_series() -> Series:
752
+ """
753
+ Fixture for Series of dtype object with Index of unique strings
754
+ """
755
+ data = [f"foo_{i}" for i in range(30)]
756
+ index = Index([f"bar_{i}" for i in range(30)], dtype=object)
757
+ return Series(data, index=index, name="objects", dtype=object)
758
+
759
+
760
+ @pytest.fixture
761
+ def datetime_series() -> Series:
762
+ """
763
+ Fixture for Series of floats with DatetimeIndex
764
+ """
765
+ return Series(
766
+ np.random.default_rng(2).standard_normal(30),
767
+ index=date_range("2000-01-01", periods=30, freq="B"),
768
+ name="ts",
769
+ )
770
+
771
+
772
+ def _create_series(index):
773
+ """Helper for the _series dict"""
774
+ size = len(index)
775
+ data = np.random.default_rng(2).standard_normal(size)
776
+ return Series(data, index=index, name="a", copy=False)
777
+
778
+
779
+ _series = {
780
+ f"series-with-{index_id}-index": _create_series(index)
781
+ for index_id, index in indices_dict.items()
782
+ }
783
+
784
+
785
+ @pytest.fixture
786
+ def series_with_simple_index(index) -> Series:
787
+ """
788
+ Fixture for tests on series with changing types of indices.
789
+ """
790
+ return _create_series(index)
791
+
792
+
793
+ _narrow_series = {
794
+ f"{dtype.__name__}-series": Series(
795
+ range(30), index=[f"i-{i}" for i in range(30)], name="a", dtype=dtype
796
+ )
797
+ for dtype in tm.NARROW_NP_DTYPES
798
+ }
799
+
800
+
801
+ _index_or_series_objs = {**indices_dict, **_series, **_narrow_series}
802
+
803
+
804
+ @pytest.fixture(params=_index_or_series_objs.keys())
805
+ def index_or_series_obj(request):
806
+ """
807
+ Fixture for tests on indexes, series and series with a narrow dtype
808
+ copy to avoid mutation, e.g. setting .name
809
+ """
810
+ return _index_or_series_objs[request.param].copy(deep=True)
811
+
812
+
813
+ _typ_objects_series = {
814
+ f"{dtype.__name__}-series": Series(dtype) for dtype in tm.PYTHON_DATA_TYPES
815
+ }
816
+
817
+
818
+ _index_or_series_memory_objs = {
819
+ **indices_dict,
820
+ **_series,
821
+ **_narrow_series,
822
+ **_typ_objects_series,
823
+ }
824
+
825
+
826
+ @pytest.fixture(params=_index_or_series_memory_objs.keys())
827
+ def index_or_series_memory_obj(request):
828
+ """
829
+ Fixture for tests on indexes, series, series with a narrow dtype and
830
+ series with empty objects type
831
+ copy to avoid mutation, e.g. setting .name
832
+ """
833
+ return _index_or_series_memory_objs[request.param].copy(deep=True)
834
+
835
+
836
+ # ----------------------------------------------------------------
837
+ # DataFrames
838
+ # ----------------------------------------------------------------
839
+ @pytest.fixture
840
+ def int_frame() -> DataFrame:
841
+ """
842
+ Fixture for DataFrame of ints with index of unique strings
843
+
844
+ Columns are ['A', 'B', 'C', 'D']
845
+ """
846
+ return DataFrame(
847
+ np.ones((30, 4), dtype=np.int64),
848
+ index=Index([f"foo_{i}" for i in range(30)], dtype=object),
849
+ columns=Index(list("ABCD"), dtype=object),
850
+ )
851
+
852
+
853
+ @pytest.fixture
854
+ def float_frame() -> DataFrame:
855
+ """
856
+ Fixture for DataFrame of floats with index of unique strings
857
+
858
+ Columns are ['A', 'B', 'C', 'D'].
859
+ """
860
+ return DataFrame(
861
+ np.random.default_rng(2).standard_normal((30, 4)),
862
+ index=Index([f"foo_{i}" for i in range(30)]),
863
+ columns=Index(list("ABCD")),
864
+ )
865
+
866
+
867
+ @pytest.fixture
868
+ def rand_series_with_duplicate_datetimeindex() -> Series:
869
+ """
870
+ Fixture for Series with a DatetimeIndex that has duplicates.
871
+ """
872
+ dates = [
873
+ datetime(2000, 1, 2),
874
+ datetime(2000, 1, 2),
875
+ datetime(2000, 1, 2),
876
+ datetime(2000, 1, 3),
877
+ datetime(2000, 1, 3),
878
+ datetime(2000, 1, 3),
879
+ datetime(2000, 1, 4),
880
+ datetime(2000, 1, 4),
881
+ datetime(2000, 1, 4),
882
+ datetime(2000, 1, 5),
883
+ ]
884
+
885
+ return Series(np.random.default_rng(2).standard_normal(len(dates)), index=dates)
886
+
887
+
888
+ # ----------------------------------------------------------------
889
+ # Scalars
890
+ # ----------------------------------------------------------------
891
+ @pytest.fixture(
892
+ params=[
893
+ (Interval(left=0, right=5), IntervalDtype("int64", "right")),
894
+ (Interval(left=0.1, right=0.5), IntervalDtype("float64", "right")),
895
+ (Period("2012-01", freq="M"), "period[M]"),
896
+ (Period("2012-02-01", freq="D"), "period[D]"),
897
+ (
898
+ Timestamp("2011-01-01", tz="US/Eastern"),
899
+ DatetimeTZDtype(unit="s", tz="US/Eastern"),
900
+ ),
901
+ (Timedelta(seconds=500), "timedelta64[ns]"),
902
+ ]
903
+ )
904
+ def ea_scalar_and_dtype(request):
905
+ return request.param
906
+
907
+
908
+ # ----------------------------------------------------------------
909
+ # Operators & Operations
910
+ # ----------------------------------------------------------------
911
+
912
+
913
+ @pytest.fixture(params=tm.arithmetic_dunder_methods)
914
+ def all_arithmetic_operators(request):
915
+ """
916
+ Fixture for dunder names for common arithmetic operations.
917
+ """
918
+ return request.param
919
+
920
+
921
+ @pytest.fixture(
922
+ params=[
923
+ operator.add,
924
+ ops.radd,
925
+ operator.sub,
926
+ ops.rsub,
927
+ operator.mul,
928
+ ops.rmul,
929
+ operator.truediv,
930
+ ops.rtruediv,
931
+ operator.floordiv,
932
+ ops.rfloordiv,
933
+ operator.mod,
934
+ ops.rmod,
935
+ operator.pow,
936
+ ops.rpow,
937
+ operator.eq,
938
+ operator.ne,
939
+ operator.lt,
940
+ operator.le,
941
+ operator.gt,
942
+ operator.ge,
943
+ operator.and_,
944
+ ops.rand_,
945
+ operator.xor,
946
+ ops.rxor,
947
+ operator.or_,
948
+ ops.ror_,
949
+ ]
950
+ )
951
+ def all_binary_operators(request):
952
+ """
953
+ Fixture for operator and roperator arithmetic, comparison, and logical ops.
954
+ """
955
+ return request.param
956
+
957
+
958
+ @pytest.fixture(
959
+ params=[
960
+ operator.add,
961
+ ops.radd,
962
+ operator.sub,
963
+ ops.rsub,
964
+ operator.mul,
965
+ ops.rmul,
966
+ operator.truediv,
967
+ ops.rtruediv,
968
+ operator.floordiv,
969
+ ops.rfloordiv,
970
+ operator.mod,
971
+ ops.rmod,
972
+ operator.pow,
973
+ ops.rpow,
974
+ ]
975
+ )
976
+ def all_arithmetic_functions(request):
977
+ """
978
+ Fixture for operator and roperator arithmetic functions.
979
+
980
+ Notes
981
+ -----
982
+ This includes divmod and rdivmod, whereas all_arithmetic_operators
983
+ does not.
984
+ """
985
+ return request.param
986
+
987
+
988
+ _all_numeric_reductions = [
989
+ "count",
990
+ "sum",
991
+ "max",
992
+ "min",
993
+ "mean",
994
+ "prod",
995
+ "std",
996
+ "var",
997
+ "median",
998
+ "kurt",
999
+ "skew",
1000
+ "sem",
1001
+ ]
1002
+
1003
+
1004
+ @pytest.fixture(params=_all_numeric_reductions)
1005
+ def all_numeric_reductions(request):
1006
+ """
1007
+ Fixture for numeric reduction names.
1008
+ """
1009
+ return request.param
1010
+
1011
+
1012
+ _all_boolean_reductions = ["all", "any"]
1013
+
1014
+
1015
+ @pytest.fixture(params=_all_boolean_reductions)
1016
+ def all_boolean_reductions(request):
1017
+ """
1018
+ Fixture for boolean reduction names.
1019
+ """
1020
+ return request.param
1021
+
1022
+
1023
+ _all_reductions = _all_numeric_reductions + _all_boolean_reductions
1024
+
1025
+
1026
+ @pytest.fixture(params=_all_reductions)
1027
+ def all_reductions(request):
1028
+ """
1029
+ Fixture for all (boolean + numeric) reduction names.
1030
+ """
1031
+ return request.param
1032
+
1033
+
1034
+ @pytest.fixture(
1035
+ params=[
1036
+ operator.eq,
1037
+ operator.ne,
1038
+ operator.gt,
1039
+ operator.ge,
1040
+ operator.lt,
1041
+ operator.le,
1042
+ ]
1043
+ )
1044
+ def comparison_op(request):
1045
+ """
1046
+ Fixture for operator module comparison functions.
1047
+ """
1048
+ return request.param
1049
+
1050
+
1051
+ @pytest.fixture(params=["__le__", "__lt__", "__ge__", "__gt__"])
1052
+ def compare_operators_no_eq_ne(request):
1053
+ """
1054
+ Fixture for dunder names for compare operations except == and !=
1055
+
1056
+ * >=
1057
+ * >
1058
+ * <
1059
+ * <=
1060
+ """
1061
+ return request.param
1062
+
1063
+
1064
+ @pytest.fixture(
1065
+ params=["__and__", "__rand__", "__or__", "__ror__", "__xor__", "__rxor__"]
1066
+ )
1067
+ def all_logical_operators(request):
1068
+ """
1069
+ Fixture for dunder names for common logical operations
1070
+
1071
+ * |
1072
+ * &
1073
+ * ^
1074
+ """
1075
+ return request.param
1076
+
1077
+
1078
+ _all_numeric_accumulations = ["cumsum", "cumprod", "cummin", "cummax"]
1079
+
1080
+
1081
+ @pytest.fixture(params=_all_numeric_accumulations)
1082
+ def all_numeric_accumulations(request):
1083
+ """
1084
+ Fixture for numeric accumulation names
1085
+ """
1086
+ return request.param
1087
+
1088
+
1089
+ # ----------------------------------------------------------------
1090
+ # Data sets/files
1091
+ # ----------------------------------------------------------------
1092
+ @pytest.fixture
1093
+ def strict_data_files(pytestconfig):
1094
+ """
1095
+ Returns the configuration for the test setting `--no-strict-data-files`.
1096
+ """
1097
+ return pytestconfig.getoption("--no-strict-data-files")
1098
+
1099
+
1100
+ @pytest.fixture
1101
+ def datapath(strict_data_files: str) -> Callable[..., str]:
1102
+ """
1103
+ Get the path to a data file.
1104
+
1105
+ Parameters
1106
+ ----------
1107
+ path : str
1108
+ Path to the file, relative to ``pandas/tests/``
1109
+
1110
+ Returns
1111
+ -------
1112
+ path including ``pandas/tests``.
1113
+
1114
+ Raises
1115
+ ------
1116
+ ValueError
1117
+ If the path doesn't exist and the --no-strict-data-files option is not set.
1118
+ """
1119
+ BASE_PATH = os.path.join(os.path.dirname(__file__), "tests")
1120
+
1121
+ def deco(*args):
1122
+ path = os.path.join(BASE_PATH, *args)
1123
+ if not os.path.exists(path):
1124
+ if strict_data_files:
1125
+ raise ValueError(
1126
+ f"Could not find file {path} and --no-strict-data-files is not set."
1127
+ )
1128
+ pytest.skip(f"Could not find {path}.")
1129
+ return path
1130
+
1131
+ return deco
1132
+
1133
+
1134
+ # ----------------------------------------------------------------
1135
+ # Time zones
1136
+ # ----------------------------------------------------------------
1137
+ TIMEZONES = [
1138
+ None,
1139
+ "UTC",
1140
+ "US/Eastern",
1141
+ "Asia/Tokyo",
1142
+ "dateutil/US/Pacific",
1143
+ "dateutil/Asia/Singapore",
1144
+ "+01:15",
1145
+ "-02:15",
1146
+ "UTC+01:15",
1147
+ "UTC-02:15",
1148
+ tzutc(),
1149
+ tzlocal(),
1150
+ FixedOffset(300),
1151
+ FixedOffset(0),
1152
+ FixedOffset(-300),
1153
+ timezone.utc,
1154
+ timezone(timedelta(hours=1)),
1155
+ timezone(timedelta(hours=-1), name="foo"),
1156
+ ]
1157
+ if zoneinfo is not None:
1158
+ TIMEZONES.extend(
1159
+ [
1160
+ zoneinfo.ZoneInfo("US/Pacific"), # type: ignore[list-item]
1161
+ zoneinfo.ZoneInfo("UTC"), # type: ignore[list-item]
1162
+ ]
1163
+ )
1164
+ TIMEZONE_IDS = [repr(i) for i in TIMEZONES]
1165
+
1166
+
1167
+ @td.parametrize_fixture_doc(str(TIMEZONE_IDS))
1168
+ @pytest.fixture(params=TIMEZONES, ids=TIMEZONE_IDS)
1169
+ def tz_naive_fixture(request):
1170
+ """
1171
+ Fixture for trying timezones including default (None): {0}
1172
+ """
1173
+ return request.param
1174
+
1175
+
1176
+ @td.parametrize_fixture_doc(str(TIMEZONE_IDS[1:]))
1177
+ @pytest.fixture(params=TIMEZONES[1:], ids=TIMEZONE_IDS[1:])
1178
+ def tz_aware_fixture(request):
1179
+ """
1180
+ Fixture for trying explicit timezones: {0}
1181
+ """
1182
+ return request.param
1183
+
1184
+
1185
+ # Generate cartesian product of tz_aware_fixture:
1186
+ tz_aware_fixture2 = tz_aware_fixture
1187
+
1188
+
1189
+ _UTCS = ["utc", "dateutil/UTC", utc, tzutc(), timezone.utc]
1190
+ if zoneinfo is not None:
1191
+ _UTCS.append(zoneinfo.ZoneInfo("UTC"))
1192
+
1193
+
1194
+ @pytest.fixture(params=_UTCS)
1195
+ def utc_fixture(request):
1196
+ """
1197
+ Fixture to provide variants of UTC timezone strings and tzinfo objects.
1198
+ """
1199
+ return request.param
1200
+
1201
+
1202
+ utc_fixture2 = utc_fixture
1203
+
1204
+
1205
+ @pytest.fixture(params=["s", "ms", "us", "ns"])
1206
+ def unit(request):
1207
+ """
1208
+ datetime64 units we support.
1209
+ """
1210
+ return request.param
1211
+
1212
+
1213
+ unit2 = unit
1214
+
1215
+
1216
+ # ----------------------------------------------------------------
1217
+ # Dtypes
1218
+ # ----------------------------------------------------------------
1219
+ @pytest.fixture(params=tm.STRING_DTYPES)
1220
+ def string_dtype(request):
1221
+ """
1222
+ Parametrized fixture for string dtypes.
1223
+
1224
+ * str
1225
+ * 'str'
1226
+ * 'U'
1227
+ """
1228
+ return request.param
1229
+
1230
+
1231
+ @pytest.fixture(
1232
+ params=[
1233
+ "string[python]",
1234
+ pytest.param("string[pyarrow]", marks=td.skip_if_no("pyarrow")),
1235
+ ]
1236
+ )
1237
+ def nullable_string_dtype(request):
1238
+ """
1239
+ Parametrized fixture for string dtypes.
1240
+
1241
+ * 'string[python]'
1242
+ * 'string[pyarrow]'
1243
+ """
1244
+ return request.param
1245
+
1246
+
1247
+ @pytest.fixture(
1248
+ params=[
1249
+ "python",
1250
+ pytest.param("pyarrow", marks=td.skip_if_no("pyarrow")),
1251
+ pytest.param("pyarrow_numpy", marks=td.skip_if_no("pyarrow")),
1252
+ ]
1253
+ )
1254
+ def string_storage(request):
1255
+ """
1256
+ Parametrized fixture for pd.options.mode.string_storage.
1257
+
1258
+ * 'python'
1259
+ * 'pyarrow'
1260
+ * 'pyarrow_numpy'
1261
+ """
1262
+ return request.param
1263
+
1264
+
1265
+ @pytest.fixture(
1266
+ params=[
1267
+ "numpy_nullable",
1268
+ pytest.param("pyarrow", marks=td.skip_if_no("pyarrow")),
1269
+ ]
1270
+ )
1271
+ def dtype_backend(request):
1272
+ """
1273
+ Parametrized fixture for pd.options.mode.string_storage.
1274
+
1275
+ * 'python'
1276
+ * 'pyarrow'
1277
+ """
1278
+ return request.param
1279
+
1280
+
1281
+ # Alias so we can test with cartesian product of string_storage
1282
+ string_storage2 = string_storage
1283
+
1284
+
1285
+ @pytest.fixture(params=tm.BYTES_DTYPES)
1286
+ def bytes_dtype(request):
1287
+ """
1288
+ Parametrized fixture for bytes dtypes.
1289
+
1290
+ * bytes
1291
+ * 'bytes'
1292
+ """
1293
+ return request.param
1294
+
1295
+
1296
+ @pytest.fixture(params=tm.OBJECT_DTYPES)
1297
+ def object_dtype(request):
1298
+ """
1299
+ Parametrized fixture for object dtypes.
1300
+
1301
+ * object
1302
+ * 'object'
1303
+ """
1304
+ return request.param
1305
+
1306
+
1307
+ @pytest.fixture(
1308
+ params=[
1309
+ "object",
1310
+ "string[python]",
1311
+ pytest.param("string[pyarrow]", marks=td.skip_if_no("pyarrow")),
1312
+ pytest.param("string[pyarrow_numpy]", marks=td.skip_if_no("pyarrow")),
1313
+ ]
1314
+ )
1315
+ def any_string_dtype(request):
1316
+ """
1317
+ Parametrized fixture for string dtypes.
1318
+ * 'object'
1319
+ * 'string[python]'
1320
+ * 'string[pyarrow]'
1321
+ """
1322
+ return request.param
1323
+
1324
+
1325
+ @pytest.fixture(params=tm.DATETIME64_DTYPES)
1326
+ def datetime64_dtype(request):
1327
+ """
1328
+ Parametrized fixture for datetime64 dtypes.
1329
+
1330
+ * 'datetime64[ns]'
1331
+ * 'M8[ns]'
1332
+ """
1333
+ return request.param
1334
+
1335
+
1336
+ @pytest.fixture(params=tm.TIMEDELTA64_DTYPES)
1337
+ def timedelta64_dtype(request):
1338
+ """
1339
+ Parametrized fixture for timedelta64 dtypes.
1340
+
1341
+ * 'timedelta64[ns]'
1342
+ * 'm8[ns]'
1343
+ """
1344
+ return request.param
1345
+
1346
+
1347
+ @pytest.fixture
1348
+ def fixed_now_ts() -> Timestamp:
1349
+ """
1350
+ Fixture emits fixed Timestamp.now()
1351
+ """
1352
+ return Timestamp( # pyright: ignore[reportGeneralTypeIssues]
1353
+ year=2021, month=1, day=1, hour=12, minute=4, second=13, microsecond=22
1354
+ )
1355
+
1356
+
1357
+ @pytest.fixture(params=tm.FLOAT_NUMPY_DTYPES)
1358
+ def float_numpy_dtype(request):
1359
+ """
1360
+ Parameterized fixture for float dtypes.
1361
+
1362
+ * float
1363
+ * 'float32'
1364
+ * 'float64'
1365
+ """
1366
+ return request.param
1367
+
1368
+
1369
+ @pytest.fixture(params=tm.FLOAT_EA_DTYPES)
1370
+ def float_ea_dtype(request):
1371
+ """
1372
+ Parameterized fixture for float dtypes.
1373
+
1374
+ * 'Float32'
1375
+ * 'Float64'
1376
+ """
1377
+ return request.param
1378
+
1379
+
1380
+ @pytest.fixture(params=tm.ALL_FLOAT_DTYPES)
1381
+ def any_float_dtype(request):
1382
+ """
1383
+ Parameterized fixture for float dtypes.
1384
+
1385
+ * float
1386
+ * 'float32'
1387
+ * 'float64'
1388
+ * 'Float32'
1389
+ * 'Float64'
1390
+ """
1391
+ return request.param
1392
+
1393
+
1394
+ @pytest.fixture(params=tm.COMPLEX_DTYPES)
1395
+ def complex_dtype(request):
1396
+ """
1397
+ Parameterized fixture for complex dtypes.
1398
+
1399
+ * complex
1400
+ * 'complex64'
1401
+ * 'complex128'
1402
+ """
1403
+ return request.param
1404
+
1405
+
1406
+ @pytest.fixture(params=tm.SIGNED_INT_NUMPY_DTYPES)
1407
+ def any_signed_int_numpy_dtype(request):
1408
+ """
1409
+ Parameterized fixture for signed integer dtypes.
1410
+
1411
+ * int
1412
+ * 'int8'
1413
+ * 'int16'
1414
+ * 'int32'
1415
+ * 'int64'
1416
+ """
1417
+ return request.param
1418
+
1419
+
1420
+ @pytest.fixture(params=tm.UNSIGNED_INT_NUMPY_DTYPES)
1421
+ def any_unsigned_int_numpy_dtype(request):
1422
+ """
1423
+ Parameterized fixture for unsigned integer dtypes.
1424
+
1425
+ * 'uint8'
1426
+ * 'uint16'
1427
+ * 'uint32'
1428
+ * 'uint64'
1429
+ """
1430
+ return request.param
1431
+
1432
+
1433
+ @pytest.fixture(params=tm.ALL_INT_NUMPY_DTYPES)
1434
+ def any_int_numpy_dtype(request):
1435
+ """
1436
+ Parameterized fixture for any integer dtype.
1437
+
1438
+ * int
1439
+ * 'int8'
1440
+ * 'uint8'
1441
+ * 'int16'
1442
+ * 'uint16'
1443
+ * 'int32'
1444
+ * 'uint32'
1445
+ * 'int64'
1446
+ * 'uint64'
1447
+ """
1448
+ return request.param
1449
+
1450
+
1451
+ @pytest.fixture(params=tm.ALL_INT_EA_DTYPES)
1452
+ def any_int_ea_dtype(request):
1453
+ """
1454
+ Parameterized fixture for any nullable integer dtype.
1455
+
1456
+ * 'UInt8'
1457
+ * 'Int8'
1458
+ * 'UInt16'
1459
+ * 'Int16'
1460
+ * 'UInt32'
1461
+ * 'Int32'
1462
+ * 'UInt64'
1463
+ * 'Int64'
1464
+ """
1465
+ return request.param
1466
+
1467
+
1468
+ @pytest.fixture(params=tm.ALL_INT_DTYPES)
1469
+ def any_int_dtype(request):
1470
+ """
1471
+ Parameterized fixture for any nullable integer dtype.
1472
+
1473
+ * int
1474
+ * 'int8'
1475
+ * 'uint8'
1476
+ * 'int16'
1477
+ * 'uint16'
1478
+ * 'int32'
1479
+ * 'uint32'
1480
+ * 'int64'
1481
+ * 'uint64'
1482
+ * 'UInt8'
1483
+ * 'Int8'
1484
+ * 'UInt16'
1485
+ * 'Int16'
1486
+ * 'UInt32'
1487
+ * 'Int32'
1488
+ * 'UInt64'
1489
+ * 'Int64'
1490
+ """
1491
+ return request.param
1492
+
1493
+
1494
+ @pytest.fixture(params=tm.ALL_INT_EA_DTYPES + tm.FLOAT_EA_DTYPES)
1495
+ def any_numeric_ea_dtype(request):
1496
+ """
1497
+ Parameterized fixture for any nullable integer dtype and
1498
+ any float ea dtypes.
1499
+
1500
+ * 'UInt8'
1501
+ * 'Int8'
1502
+ * 'UInt16'
1503
+ * 'Int16'
1504
+ * 'UInt32'
1505
+ * 'Int32'
1506
+ * 'UInt64'
1507
+ * 'Int64'
1508
+ * 'Float32'
1509
+ * 'Float64'
1510
+ """
1511
+ return request.param
1512
+
1513
+
1514
+ # Unsupported operand types for + ("List[Union[str, ExtensionDtype, dtype[Any],
1515
+ # Type[object]]]" and "List[str]")
1516
+ @pytest.fixture(
1517
+ params=tm.ALL_INT_EA_DTYPES
1518
+ + tm.FLOAT_EA_DTYPES
1519
+ + tm.ALL_INT_PYARROW_DTYPES_STR_REPR
1520
+ + tm.FLOAT_PYARROW_DTYPES_STR_REPR # type: ignore[operator]
1521
+ )
1522
+ def any_numeric_ea_and_arrow_dtype(request):
1523
+ """
1524
+ Parameterized fixture for any nullable integer dtype and
1525
+ any float ea dtypes.
1526
+
1527
+ * 'UInt8'
1528
+ * 'Int8'
1529
+ * 'UInt16'
1530
+ * 'Int16'
1531
+ * 'UInt32'
1532
+ * 'Int32'
1533
+ * 'UInt64'
1534
+ * 'Int64'
1535
+ * 'Float32'
1536
+ * 'Float64'
1537
+ * 'uint8[pyarrow]'
1538
+ * 'int8[pyarrow]'
1539
+ * 'uint16[pyarrow]'
1540
+ * 'int16[pyarrow]'
1541
+ * 'uint32[pyarrow]'
1542
+ * 'int32[pyarrow]'
1543
+ * 'uint64[pyarrow]'
1544
+ * 'int64[pyarrow]'
1545
+ * 'float32[pyarrow]'
1546
+ * 'float64[pyarrow]'
1547
+ """
1548
+ return request.param
1549
+
1550
+
1551
+ @pytest.fixture(params=tm.SIGNED_INT_EA_DTYPES)
1552
+ def any_signed_int_ea_dtype(request):
1553
+ """
1554
+ Parameterized fixture for any signed nullable integer dtype.
1555
+
1556
+ * 'Int8'
1557
+ * 'Int16'
1558
+ * 'Int32'
1559
+ * 'Int64'
1560
+ """
1561
+ return request.param
1562
+
1563
+
1564
+ @pytest.fixture(params=tm.ALL_REAL_NUMPY_DTYPES)
1565
+ def any_real_numpy_dtype(request):
1566
+ """
1567
+ Parameterized fixture for any (purely) real numeric dtype.
1568
+
1569
+ * int
1570
+ * 'int8'
1571
+ * 'uint8'
1572
+ * 'int16'
1573
+ * 'uint16'
1574
+ * 'int32'
1575
+ * 'uint32'
1576
+ * 'int64'
1577
+ * 'uint64'
1578
+ * float
1579
+ * 'float32'
1580
+ * 'float64'
1581
+ """
1582
+ return request.param
1583
+
1584
+
1585
+ @pytest.fixture(params=tm.ALL_REAL_DTYPES)
1586
+ def any_real_numeric_dtype(request):
1587
+ """
1588
+ Parameterized fixture for any (purely) real numeric dtype.
1589
+
1590
+ * int
1591
+ * 'int8'
1592
+ * 'uint8'
1593
+ * 'int16'
1594
+ * 'uint16'
1595
+ * 'int32'
1596
+ * 'uint32'
1597
+ * 'int64'
1598
+ * 'uint64'
1599
+ * float
1600
+ * 'float32'
1601
+ * 'float64'
1602
+
1603
+ and associated ea dtypes.
1604
+ """
1605
+ return request.param
1606
+
1607
+
1608
+ @pytest.fixture(params=tm.ALL_NUMPY_DTYPES)
1609
+ def any_numpy_dtype(request):
1610
+ """
1611
+ Parameterized fixture for all numpy dtypes.
1612
+
1613
+ * bool
1614
+ * 'bool'
1615
+ * int
1616
+ * 'int8'
1617
+ * 'uint8'
1618
+ * 'int16'
1619
+ * 'uint16'
1620
+ * 'int32'
1621
+ * 'uint32'
1622
+ * 'int64'
1623
+ * 'uint64'
1624
+ * float
1625
+ * 'float32'
1626
+ * 'float64'
1627
+ * complex
1628
+ * 'complex64'
1629
+ * 'complex128'
1630
+ * str
1631
+ * 'str'
1632
+ * 'U'
1633
+ * bytes
1634
+ * 'bytes'
1635
+ * 'datetime64[ns]'
1636
+ * 'M8[ns]'
1637
+ * 'timedelta64[ns]'
1638
+ * 'm8[ns]'
1639
+ * object
1640
+ * 'object'
1641
+ """
1642
+ return request.param
1643
+
1644
+
1645
+ @pytest.fixture(params=tm.ALL_REAL_NULLABLE_DTYPES)
1646
+ def any_real_nullable_dtype(request):
1647
+ """
1648
+ Parameterized fixture for all real dtypes that can hold NA.
1649
+
1650
+ * float
1651
+ * 'float32'
1652
+ * 'float64'
1653
+ * 'Float32'
1654
+ * 'Float64'
1655
+ * 'UInt8'
1656
+ * 'UInt16'
1657
+ * 'UInt32'
1658
+ * 'UInt64'
1659
+ * 'Int8'
1660
+ * 'Int16'
1661
+ * 'Int32'
1662
+ * 'Int64'
1663
+ * 'uint8[pyarrow]'
1664
+ * 'uint16[pyarrow]'
1665
+ * 'uint32[pyarrow]'
1666
+ * 'uint64[pyarrow]'
1667
+ * 'int8[pyarrow]'
1668
+ * 'int16[pyarrow]'
1669
+ * 'int32[pyarrow]'
1670
+ * 'int64[pyarrow]'
1671
+ * 'float[pyarrow]'
1672
+ * 'double[pyarrow]'
1673
+ """
1674
+ return request.param
1675
+
1676
+
1677
+ @pytest.fixture(params=tm.ALL_NUMERIC_DTYPES)
1678
+ def any_numeric_dtype(request):
1679
+ """
1680
+ Parameterized fixture for all numeric dtypes.
1681
+
1682
+ * int
1683
+ * 'int8'
1684
+ * 'uint8'
1685
+ * 'int16'
1686
+ * 'uint16'
1687
+ * 'int32'
1688
+ * 'uint32'
1689
+ * 'int64'
1690
+ * 'uint64'
1691
+ * float
1692
+ * 'float32'
1693
+ * 'float64'
1694
+ * complex
1695
+ * 'complex64'
1696
+ * 'complex128'
1697
+ * 'UInt8'
1698
+ * 'Int8'
1699
+ * 'UInt16'
1700
+ * 'Int16'
1701
+ * 'UInt32'
1702
+ * 'Int32'
1703
+ * 'UInt64'
1704
+ * 'Int64'
1705
+ * 'Float32'
1706
+ * 'Float64'
1707
+ """
1708
+ return request.param
1709
+
1710
+
1711
+ # categoricals are handled separately
1712
+ _any_skipna_inferred_dtype = [
1713
+ ("string", ["a", np.nan, "c"]),
1714
+ ("string", ["a", pd.NA, "c"]),
1715
+ ("mixed", ["a", pd.NaT, "c"]), # pd.NaT not considered valid by is_string_array
1716
+ ("bytes", [b"a", np.nan, b"c"]),
1717
+ ("empty", [np.nan, np.nan, np.nan]),
1718
+ ("empty", []),
1719
+ ("mixed-integer", ["a", np.nan, 2]),
1720
+ ("mixed", ["a", np.nan, 2.0]),
1721
+ ("floating", [1.0, np.nan, 2.0]),
1722
+ ("integer", [1, np.nan, 2]),
1723
+ ("mixed-integer-float", [1, np.nan, 2.0]),
1724
+ ("decimal", [Decimal(1), np.nan, Decimal(2)]),
1725
+ ("boolean", [True, np.nan, False]),
1726
+ ("boolean", [True, pd.NA, False]),
1727
+ ("datetime64", [np.datetime64("2013-01-01"), np.nan, np.datetime64("2018-01-01")]),
1728
+ ("datetime", [Timestamp("20130101"), np.nan, Timestamp("20180101")]),
1729
+ ("date", [date(2013, 1, 1), np.nan, date(2018, 1, 1)]),
1730
+ ("complex", [1 + 1j, np.nan, 2 + 2j]),
1731
+ # The following dtype is commented out due to GH 23554
1732
+ # ('timedelta64', [np.timedelta64(1, 'D'),
1733
+ # np.nan, np.timedelta64(2, 'D')]),
1734
+ ("timedelta", [timedelta(1), np.nan, timedelta(2)]),
1735
+ ("time", [time(1), np.nan, time(2)]),
1736
+ ("period", [Period(2013), pd.NaT, Period(2018)]),
1737
+ ("interval", [Interval(0, 1), np.nan, Interval(0, 2)]),
1738
+ ]
1739
+ ids, _ = zip(*_any_skipna_inferred_dtype) # use inferred type as fixture-id
1740
+
1741
+
1742
+ @pytest.fixture(params=_any_skipna_inferred_dtype, ids=ids)
1743
+ def any_skipna_inferred_dtype(request):
1744
+ """
1745
+ Fixture for all inferred dtypes from _libs.lib.infer_dtype
1746
+
1747
+ The covered (inferred) types are:
1748
+ * 'string'
1749
+ * 'empty'
1750
+ * 'bytes'
1751
+ * 'mixed'
1752
+ * 'mixed-integer'
1753
+ * 'mixed-integer-float'
1754
+ * 'floating'
1755
+ * 'integer'
1756
+ * 'decimal'
1757
+ * 'boolean'
1758
+ * 'datetime64'
1759
+ * 'datetime'
1760
+ * 'date'
1761
+ * 'timedelta'
1762
+ * 'time'
1763
+ * 'period'
1764
+ * 'interval'
1765
+
1766
+ Returns
1767
+ -------
1768
+ inferred_dtype : str
1769
+ The string for the inferred dtype from _libs.lib.infer_dtype
1770
+ values : np.ndarray
1771
+ An array of object dtype that will be inferred to have
1772
+ `inferred_dtype`
1773
+
1774
+ Examples
1775
+ --------
1776
+ >>> from pandas._libs import lib
1777
+ >>>
1778
+ >>> def test_something(any_skipna_inferred_dtype):
1779
+ ... inferred_dtype, values = any_skipna_inferred_dtype
1780
+ ... # will pass
1781
+ ... assert lib.infer_dtype(values, skipna=True) == inferred_dtype
1782
+ """
1783
+ inferred_dtype, values = request.param
1784
+ values = np.array(values, dtype=object) # object dtype to avoid casting
1785
+
1786
+ # correctness of inference tested in tests/dtypes/test_inference.py
1787
+ return inferred_dtype, values
1788
+
1789
+
1790
+ # ----------------------------------------------------------------
1791
+ # Misc
1792
+ # ----------------------------------------------------------------
1793
+ @pytest.fixture
1794
+ def ip():
1795
+ """
1796
+ Get an instance of IPython.InteractiveShell.
1797
+
1798
+ Will raise a skip if IPython is not installed.
1799
+ """
1800
+ pytest.importorskip("IPython", minversion="6.0.0")
1801
+ from IPython.core.interactiveshell import InteractiveShell
1802
+
1803
+ # GH#35711 make sure sqlite history file handle is not leaked
1804
+ from traitlets.config import Config # isort:skip
1805
+
1806
+ c = Config()
1807
+ c.HistoryManager.hist_file = ":memory:"
1808
+
1809
+ return InteractiveShell(config=c)
1810
+
1811
+
1812
+ @pytest.fixture(params=["bsr", "coo", "csc", "csr", "dia", "dok", "lil"])
1813
+ def spmatrix(request):
1814
+ """
1815
+ Yields scipy sparse matrix classes.
1816
+ """
1817
+ sparse = pytest.importorskip("scipy.sparse")
1818
+
1819
+ return getattr(sparse, request.param + "_matrix")
1820
+
1821
+
1822
+ @pytest.fixture(
1823
+ params=[
1824
+ getattr(pd.offsets, o)
1825
+ for o in pd.offsets.__all__
1826
+ if issubclass(getattr(pd.offsets, o), pd.offsets.Tick) and o != "Tick"
1827
+ ]
1828
+ )
1829
+ def tick_classes(request):
1830
+ """
1831
+ Fixture for Tick based datetime offsets available for a time series.
1832
+ """
1833
+ return request.param
1834
+
1835
+
1836
+ @pytest.fixture(params=[None, lambda x: x])
1837
+ def sort_by_key(request):
1838
+ """
1839
+ Simple fixture for testing keys in sorting methods.
1840
+ Tests None (no key) and the identity key.
1841
+ """
1842
+ return request.param
1843
+
1844
+
1845
+ @pytest.fixture(
1846
+ params=[
1847
+ ("foo", None, None),
1848
+ ("Egon", "Venkman", None),
1849
+ ("NCC1701D", "NCC1701D", "NCC1701D"),
1850
+ # possibly-matching NAs
1851
+ (np.nan, np.nan, np.nan),
1852
+ (np.nan, pd.NaT, None),
1853
+ (np.nan, pd.NA, None),
1854
+ (pd.NA, pd.NA, pd.NA),
1855
+ ]
1856
+ )
1857
+ def names(request) -> tuple[Hashable, Hashable, Hashable]:
1858
+ """
1859
+ A 3-tuple of names, the first two for operands, the last for a result.
1860
+ """
1861
+ return request.param
1862
+
1863
+
1864
+ @pytest.fixture(params=[tm.setitem, tm.loc, tm.iloc])
1865
+ def indexer_sli(request):
1866
+ """
1867
+ Parametrize over __setitem__, loc.__setitem__, iloc.__setitem__
1868
+ """
1869
+ return request.param
1870
+
1871
+
1872
+ @pytest.fixture(params=[tm.loc, tm.iloc])
1873
+ def indexer_li(request):
1874
+ """
1875
+ Parametrize over loc.__getitem__, iloc.__getitem__
1876
+ """
1877
+ return request.param
1878
+
1879
+
1880
+ @pytest.fixture(params=[tm.setitem, tm.iloc])
1881
+ def indexer_si(request):
1882
+ """
1883
+ Parametrize over __setitem__, iloc.__setitem__
1884
+ """
1885
+ return request.param
1886
+
1887
+
1888
+ @pytest.fixture(params=[tm.setitem, tm.loc])
1889
+ def indexer_sl(request):
1890
+ """
1891
+ Parametrize over __setitem__, loc.__setitem__
1892
+ """
1893
+ return request.param
1894
+
1895
+
1896
+ @pytest.fixture(params=[tm.at, tm.loc])
1897
+ def indexer_al(request):
1898
+ """
1899
+ Parametrize over at.__setitem__, loc.__setitem__
1900
+ """
1901
+ return request.param
1902
+
1903
+
1904
+ @pytest.fixture(params=[tm.iat, tm.iloc])
1905
+ def indexer_ial(request):
1906
+ """
1907
+ Parametrize over iat.__setitem__, iloc.__setitem__
1908
+ """
1909
+ return request.param
1910
+
1911
+
1912
+ @pytest.fixture
1913
+ def using_array_manager() -> bool:
1914
+ """
1915
+ Fixture to check if the array manager is being used.
1916
+ """
1917
+ return _get_option("mode.data_manager", silent=True) == "array"
1918
+
1919
+
1920
+ @pytest.fixture
1921
+ def using_copy_on_write() -> bool:
1922
+ """
1923
+ Fixture to check if Copy-on-Write is enabled.
1924
+ """
1925
+ return (
1926
+ pd.options.mode.copy_on_write is True
1927
+ and _get_option("mode.data_manager", silent=True) == "block"
1928
+ )
1929
+
1930
+
1931
+ @pytest.fixture
1932
+ def warn_copy_on_write() -> bool:
1933
+ """
1934
+ Fixture to check if Copy-on-Write is in warning mode.
1935
+ """
1936
+ return (
1937
+ pd.options.mode.copy_on_write == "warn"
1938
+ and _get_option("mode.data_manager", silent=True) == "block"
1939
+ )
1940
+
1941
+
1942
+ @pytest.fixture
1943
+ def using_infer_string() -> bool:
1944
+ """
1945
+ Fixture to check if infer string option is enabled.
1946
+ """
1947
+ return pd.options.future.infer_string is True
1948
+
1949
+
1950
+ warsaws = ["Europe/Warsaw", "dateutil/Europe/Warsaw"]
1951
+ if zoneinfo is not None:
1952
+ warsaws.append(zoneinfo.ZoneInfo("Europe/Warsaw")) # type: ignore[arg-type]
1953
+
1954
+
1955
+ @pytest.fixture(params=warsaws)
1956
+ def warsaw(request) -> str:
1957
+ """
1958
+ tzinfo for Europe/Warsaw using pytz, dateutil, or zoneinfo.
1959
+ """
1960
+ return request.param
1961
+
1962
+
1963
+ @pytest.fixture()
1964
+ def arrow_string_storage():
1965
+ return ("pyarrow", "pyarrow_numpy")
venv/lib/python3.10/site-packages/pandas/pyproject.toml ADDED
@@ -0,0 +1,801 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [build-system]
2
+ # Minimum requirements for the build system to execute.
3
+ # See https://github.com/scipy/scipy/pull/12940 for the AIX issue.
4
+ requires = [
5
+ "meson-python==0.13.1",
6
+ "meson==1.2.1",
7
+ "wheel",
8
+ "Cython==3.0.5", # Note: sync with setup.py, environment.yml and asv.conf.json
9
+ # Force numpy higher than 2.0rc1, so that built wheels are compatible
10
+ # with both numpy 1 and 2
11
+ "numpy>=2.0.0rc1",
12
+ "versioneer[toml]"
13
+ ]
14
+
15
+ build-backend = "mesonpy"
16
+
17
+ [project]
18
+ name = 'pandas'
19
+ dynamic = [
20
+ 'version'
21
+ ]
22
+ description = 'Powerful data structures for data analysis, time series, and statistics'
23
+ readme = 'README.md'
24
+ authors = [
25
+ { name = 'The Pandas Development Team', email='[email protected]' },
26
+ ]
27
+ license = {file = 'LICENSE'}
28
+ requires-python = '>=3.9'
29
+ dependencies = [
30
+ "numpy>=1.22.4; python_version<'3.11'",
31
+ "numpy>=1.23.2; python_version=='3.11'",
32
+ "numpy>=1.26.0; python_version>='3.12'",
33
+ "python-dateutil>=2.8.2",
34
+ "pytz>=2020.1",
35
+ "tzdata>=2022.7"
36
+ ]
37
+ classifiers = [
38
+ 'Development Status :: 5 - Production/Stable',
39
+ 'Environment :: Console',
40
+ 'Intended Audience :: Science/Research',
41
+ 'License :: OSI Approved :: BSD License',
42
+ 'Operating System :: OS Independent',
43
+ 'Programming Language :: Cython',
44
+ 'Programming Language :: Python',
45
+ 'Programming Language :: Python :: 3',
46
+ 'Programming Language :: Python :: 3 :: Only',
47
+ 'Programming Language :: Python :: 3.9',
48
+ 'Programming Language :: Python :: 3.10',
49
+ 'Programming Language :: Python :: 3.11',
50
+ 'Programming Language :: Python :: 3.12',
51
+ 'Topic :: Scientific/Engineering'
52
+ ]
53
+
54
+ [project.urls]
55
+ homepage = 'https://pandas.pydata.org'
56
+ documentation = 'https://pandas.pydata.org/docs/'
57
+ repository = 'https://github.com/pandas-dev/pandas'
58
+
59
+ [project.entry-points."pandas_plotting_backends"]
60
+ matplotlib = "pandas:plotting._matplotlib"
61
+
62
+ [project.optional-dependencies]
63
+ test = ['hypothesis>=6.46.1', 'pytest>=7.3.2', 'pytest-xdist>=2.2.0']
64
+ pyarrow = ['pyarrow>=10.0.1']
65
+ performance = ['bottleneck>=1.3.6', 'numba>=0.56.4', 'numexpr>=2.8.4']
66
+ computation = ['scipy>=1.10.0', 'xarray>=2022.12.0']
67
+ fss = ['fsspec>=2022.11.0']
68
+ aws = ['s3fs>=2022.11.0']
69
+ gcp = ['gcsfs>=2022.11.0', 'pandas-gbq>=0.19.0']
70
+ excel = ['odfpy>=1.4.1', 'openpyxl>=3.1.0', 'python-calamine>=0.1.7', 'pyxlsb>=1.0.10', 'xlrd>=2.0.1', 'xlsxwriter>=3.0.5']
71
+ parquet = ['pyarrow>=10.0.1']
72
+ feather = ['pyarrow>=10.0.1']
73
+ hdf5 = [# blosc only available on conda (https://github.com/Blosc/python-blosc/issues/297)
74
+ #'blosc>=1.20.1',
75
+ 'tables>=3.8.0']
76
+ spss = ['pyreadstat>=1.2.0']
77
+ postgresql = ['SQLAlchemy>=2.0.0', 'psycopg2>=2.9.6', 'adbc-driver-postgresql>=0.8.0']
78
+ mysql = ['SQLAlchemy>=2.0.0', 'pymysql>=1.0.2']
79
+ sql-other = ['SQLAlchemy>=2.0.0', 'adbc-driver-postgresql>=0.8.0', 'adbc-driver-sqlite>=0.8.0']
80
+ html = ['beautifulsoup4>=4.11.2', 'html5lib>=1.1', 'lxml>=4.9.2']
81
+ xml = ['lxml>=4.9.2']
82
+ plot = ['matplotlib>=3.6.3']
83
+ output-formatting = ['jinja2>=3.1.2', 'tabulate>=0.9.0']
84
+ clipboard = ['PyQt5>=5.15.9', 'qtpy>=2.3.0']
85
+ compression = ['zstandard>=0.19.0']
86
+ consortium-standard = ['dataframe-api-compat>=0.1.7']
87
+ all = ['adbc-driver-postgresql>=0.8.0',
88
+ 'adbc-driver-sqlite>=0.8.0',
89
+ 'beautifulsoup4>=4.11.2',
90
+ # blosc only available on conda (https://github.com/Blosc/python-blosc/issues/297)
91
+ #'blosc>=1.21.3',
92
+ 'bottleneck>=1.3.6',
93
+ 'dataframe-api-compat>=0.1.7',
94
+ 'fastparquet>=2022.12.0',
95
+ 'fsspec>=2022.11.0',
96
+ 'gcsfs>=2022.11.0',
97
+ 'html5lib>=1.1',
98
+ 'hypothesis>=6.46.1',
99
+ 'jinja2>=3.1.2',
100
+ 'lxml>=4.9.2',
101
+ 'matplotlib>=3.6.3',
102
+ 'numba>=0.56.4',
103
+ 'numexpr>=2.8.4',
104
+ 'odfpy>=1.4.1',
105
+ 'openpyxl>=3.1.0',
106
+ 'pandas-gbq>=0.19.0',
107
+ 'psycopg2>=2.9.6',
108
+ 'pyarrow>=10.0.1',
109
+ 'pymysql>=1.0.2',
110
+ 'PyQt5>=5.15.9',
111
+ 'pyreadstat>=1.2.0',
112
+ 'pytest>=7.3.2',
113
+ 'pytest-xdist>=2.2.0',
114
+ 'python-calamine>=0.1.7',
115
+ 'pyxlsb>=1.0.10',
116
+ 'qtpy>=2.3.0',
117
+ 'scipy>=1.10.0',
118
+ 's3fs>=2022.11.0',
119
+ 'SQLAlchemy>=2.0.0',
120
+ 'tables>=3.8.0',
121
+ 'tabulate>=0.9.0',
122
+ 'xarray>=2022.12.0',
123
+ 'xlrd>=2.0.1',
124
+ 'xlsxwriter>=3.0.5',
125
+ 'zstandard>=0.19.0']
126
+
127
+ # TODO: Remove after setuptools support is dropped.
128
+ [tool.setuptools]
129
+ include-package-data = true
130
+
131
+ [tool.setuptools.packages.find]
132
+ include = ["pandas", "pandas.*"]
133
+ namespaces = false
134
+
135
+ [tool.setuptools.exclude-package-data]
136
+ "*" = ["*.c", "*.h"]
137
+
138
+ # See the docstring in versioneer.py for instructions. Note that you must
139
+ # re-run 'versioneer.py setup' after changing this section, and commit the
140
+ # resulting files.
141
+ [tool.versioneer]
142
+ VCS = "git"
143
+ style = "pep440"
144
+ versionfile_source = "pandas/_version.py"
145
+ versionfile_build = "pandas/_version.py"
146
+ tag_prefix = "v"
147
+ parentdir_prefix = "pandas-"
148
+
149
+ [tool.meson-python.args]
150
+ setup = ['--vsenv'] # For Windows
151
+
152
+ [tool.cibuildwheel]
153
+ skip = "cp36-* cp37-* cp38-* pp* *_i686 *_ppc64le *_s390x"
154
+ build-verbosity = "3"
155
+ environment = {LDFLAGS="-Wl,--strip-all"}
156
+ # TODO: remove this once numpy 2.0 proper releases
157
+ # and specify numpy 2.0 as a dependency in [build-system] requires in pyproject.toml
158
+ before-build = "pip install numpy==2.0.0rc1"
159
+ test-requires = "hypothesis>=6.46.1 pytest>=7.3.2 pytest-xdist>=2.2.0"
160
+ test-command = """
161
+ PANDAS_CI='1' python -c 'import pandas as pd; \
162
+ pd.test(extra_args=["-m not clipboard and not single_cpu and not slow and not network and not db", "-n 2", "--no-strict-data-files"]); \
163
+ pd.test(extra_args=["-m not clipboard and single_cpu and not slow and not network and not db", "--no-strict-data-files"]);' \
164
+ """
165
+
166
+ [tool.cibuildwheel.windows]
167
+ # TODO: remove this once numpy 2.0 proper releases
168
+ # and specify numpy 2.0 as a dependency in [build-system] requires in pyproject.toml
169
+ before-build = "pip install delvewheel numpy==2.0.0rc1"
170
+ repair-wheel-command = "delvewheel repair -w {dest_dir} {wheel}"
171
+
172
+ [[tool.cibuildwheel.overrides]]
173
+ select = "*-musllinux*"
174
+ before-test = "apk update && apk add musl-locales"
175
+
176
+ [[tool.cibuildwheel.overrides]]
177
+ select = "*-win*"
178
+ # We test separately for Windows, since we use
179
+ # the windowsservercore docker image to check if any dlls are
180
+ # missing from the wheel
181
+ test-command = ""
182
+
183
+ [[tool.cibuildwheel.overrides]]
184
+ # Don't strip wheels on macOS.
185
+ # macOS doesn't support stripping wheels with linker
186
+ # https://github.com/MacPython/numpy-wheels/pull/87#issuecomment-624878264
187
+ select = "*-macosx*"
188
+ environment = {CFLAGS="-g0"}
189
+
190
+ [tool.black]
191
+ target-version = ['py39', 'py310']
192
+ required-version = '23.11.0'
193
+ exclude = '''
194
+ (
195
+ asv_bench/env
196
+ | \.egg
197
+ | \.git
198
+ | \.hg
199
+ | \.mypy_cache
200
+ | \.nox
201
+ | \.tox
202
+ | \.venv
203
+ | _build
204
+ | buck-out
205
+ | build
206
+ | dist
207
+ | setup.py
208
+ )
209
+ '''
210
+
211
+ [tool.ruff]
212
+ line-length = 88
213
+ target-version = "py310"
214
+ fix = true
215
+ unfixable = []
216
+ typing-modules = ["pandas._typing"]
217
+
218
+ select = [
219
+ # pyflakes
220
+ "F",
221
+ # pycodestyle
222
+ "E", "W",
223
+ # flake8-2020
224
+ "YTT",
225
+ # flake8-bugbear
226
+ "B",
227
+ # flake8-quotes
228
+ "Q",
229
+ # flake8-debugger
230
+ "T10",
231
+ # flake8-gettext
232
+ "INT",
233
+ # pylint
234
+ "PL",
235
+ # misc lints
236
+ "PIE",
237
+ # flake8-pyi
238
+ "PYI",
239
+ # tidy imports
240
+ "TID",
241
+ # implicit string concatenation
242
+ "ISC",
243
+ # type-checking imports
244
+ "TCH",
245
+ # comprehensions
246
+ "C4",
247
+ # pygrep-hooks
248
+ "PGH",
249
+ # Ruff-specific rules
250
+ "RUF",
251
+ # flake8-bandit: exec-builtin
252
+ "S102",
253
+ # numpy-legacy-random
254
+ "NPY002",
255
+ # Perflint
256
+ "PERF",
257
+ # flynt
258
+ "FLY",
259
+ # flake8-logging-format
260
+ "G",
261
+ # flake8-future-annotations
262
+ "FA",
263
+ ]
264
+
265
+ ignore = [
266
+ ### Intentionally disabled
267
+ # space before : (needed for how black formats slicing)
268
+ "E203",
269
+ # module level import not at top of file
270
+ "E402",
271
+ # do not assign a lambda expression, use a def
272
+ "E731",
273
+ # line break before binary operator
274
+ # "W503", # not yet implemented
275
+ # line break after binary operator
276
+ # "W504", # not yet implemented
277
+ # controversial
278
+ "B006",
279
+ # controversial
280
+ "B007",
281
+ # controversial
282
+ "B008",
283
+ # setattr is used to side-step mypy
284
+ "B009",
285
+ # getattr is used to side-step mypy
286
+ "B010",
287
+ # tests use assert False
288
+ "B011",
289
+ # tests use comparisons but not their returned value
290
+ "B015",
291
+ # false positives
292
+ "B019",
293
+ # Loop control variable overrides iterable it iterates
294
+ "B020",
295
+ # Function definition does not bind loop variable
296
+ "B023",
297
+ # Functions defined inside a loop must not use variables redefined in the loop
298
+ # "B301", # not yet implemented
299
+ # Only works with python >=3.10
300
+ "B905",
301
+ # Too many arguments to function call
302
+ "PLR0913",
303
+ # Too many returns
304
+ "PLR0911",
305
+ # Too many branches
306
+ "PLR0912",
307
+ # Too many statements
308
+ "PLR0915",
309
+ # Redefined loop name
310
+ "PLW2901",
311
+ # Global statements are discouraged
312
+ "PLW0603",
313
+ # Docstrings should not be included in stubs
314
+ "PYI021",
315
+ # Use `typing.NamedTuple` instead of `collections.namedtuple`
316
+ "PYI024",
317
+ # No builtin `eval()` allowed
318
+ "PGH001",
319
+ # compare-to-empty-string
320
+ "PLC1901",
321
+ # while int | float can be shortened to float, the former is more explicit
322
+ "PYI041",
323
+ # incorrect-dict-iterator, flags valid Series.items usage
324
+ "PERF102",
325
+ # try-except-in-loop, becomes useless in Python 3.11
326
+ "PERF203",
327
+
328
+
329
+ ### TODO: Enable gradually
330
+ # Useless statement
331
+ "B018",
332
+ # Within an except clause, raise exceptions with ...
333
+ "B904",
334
+ # Magic number
335
+ "PLR2004",
336
+ # comparison-with-itself
337
+ "PLR0124",
338
+ # Consider `elif` instead of `else` then `if` to remove indentation level
339
+ "PLR5501",
340
+ # collection-literal-concatenation
341
+ "RUF005",
342
+ # pairwise-over-zipped (>=PY310 only)
343
+ "RUF007",
344
+ # explicit-f-string-type-conversion
345
+ "RUF010",
346
+ # mutable-class-default
347
+ "RUF012"
348
+ ]
349
+
350
+ exclude = [
351
+ "doc/sphinxext/*.py",
352
+ "doc/build/*.py",
353
+ "doc/temp/*.py",
354
+ ".eggs/*.py",
355
+ # vendored files
356
+ "pandas/util/version/*",
357
+ "pandas/io/clipboard/__init__.py",
358
+ # exclude asv benchmark environments from linting
359
+ "env",
360
+ ]
361
+
362
+ [tool.ruff.per-file-ignores]
363
+ # relative imports allowed for asv_bench
364
+ "asv_bench/*" = ["TID", "NPY002"]
365
+ # to be enabled gradually
366
+ "pandas/core/*" = ["PLR5501"]
367
+ "pandas/tests/*" = ["B028", "FLY"]
368
+ "scripts/*" = ["B028"]
369
+ # Keep this one enabled
370
+ "pandas/_typing.py" = ["TCH"]
371
+
372
+ [tool.pylint.messages_control]
373
+ max-line-length = 88
374
+ disable = [
375
+ # intentionally turned off
376
+ "bad-mcs-classmethod-argument",
377
+ "broad-except",
378
+ "c-extension-no-member",
379
+ "comparison-with-itself",
380
+ "consider-using-enumerate",
381
+ "import-error",
382
+ "import-outside-toplevel",
383
+ "invalid-name",
384
+ "invalid-unary-operand-type",
385
+ "line-too-long",
386
+ "no-else-continue",
387
+ "no-else-raise",
388
+ "no-else-return",
389
+ "no-member",
390
+ "no-name-in-module",
391
+ "not-an-iterable",
392
+ "overridden-final-method",
393
+ "pointless-statement",
394
+ "redundant-keyword-arg",
395
+ "singleton-comparison",
396
+ "too-many-ancestors",
397
+ "too-many-arguments",
398
+ "too-many-boolean-expressions",
399
+ "too-many-branches",
400
+ "too-many-function-args",
401
+ "too-many-instance-attributes",
402
+ "too-many-locals",
403
+ "too-many-nested-blocks",
404
+ "too-many-public-methods",
405
+ "too-many-return-statements",
406
+ "too-many-statements",
407
+ "unexpected-keyword-arg",
408
+ "ungrouped-imports",
409
+ "unsubscriptable-object",
410
+ "unsupported-assignment-operation",
411
+ "unsupported-membership-test",
412
+ "unused-import",
413
+ "use-dict-literal",
414
+ "use-implicit-booleaness-not-comparison",
415
+ "use-implicit-booleaness-not-len",
416
+ "wrong-import-order",
417
+ "wrong-import-position",
418
+ "redefined-loop-name",
419
+
420
+ # misc
421
+ "abstract-class-instantiated",
422
+ "no-value-for-parameter",
423
+ "undefined-variable",
424
+ "unpacking-non-sequence",
425
+ "used-before-assignment",
426
+
427
+ # pylint type "C": convention, for programming standard violation
428
+ "missing-class-docstring",
429
+ "missing-function-docstring",
430
+ "missing-module-docstring",
431
+ "superfluous-parens",
432
+ "too-many-lines",
433
+ "unidiomatic-typecheck",
434
+ "unnecessary-dunder-call",
435
+ "unnecessary-lambda-assignment",
436
+
437
+ # pylint type "R": refactor, for bad code smell
438
+ "consider-using-with",
439
+ "cyclic-import",
440
+ "duplicate-code",
441
+ "inconsistent-return-statements",
442
+ "redefined-argument-from-local",
443
+ "too-few-public-methods",
444
+
445
+ # pylint type "W": warning, for python specific problems
446
+ "abstract-method",
447
+ "arguments-differ",
448
+ "arguments-out-of-order",
449
+ "arguments-renamed",
450
+ "attribute-defined-outside-init",
451
+ "broad-exception-raised",
452
+ "comparison-with-callable",
453
+ "dangerous-default-value",
454
+ "deprecated-module",
455
+ "eval-used",
456
+ "expression-not-assigned",
457
+ "fixme",
458
+ "global-statement",
459
+ "invalid-overridden-method",
460
+ "keyword-arg-before-vararg",
461
+ "possibly-unused-variable",
462
+ "protected-access",
463
+ "raise-missing-from",
464
+ "redefined-builtin",
465
+ "redefined-outer-name",
466
+ "self-cls-assignment",
467
+ "signature-differs",
468
+ "super-init-not-called",
469
+ "try-except-raise",
470
+ "unnecessary-lambda",
471
+ "unused-argument",
472
+ "unused-variable",
473
+ "using-constant-test"
474
+ ]
475
+
476
+ [tool.pytest.ini_options]
477
+ # sync minversion with pyproject.toml & install.rst
478
+ minversion = "7.3.2"
479
+ addopts = "--strict-markers --strict-config --capture=no --durations=30 --junitxml=test-data.xml"
480
+ empty_parameter_set_mark = "fail_at_collect"
481
+ xfail_strict = true
482
+ testpaths = "pandas"
483
+ doctest_optionflags = [
484
+ "NORMALIZE_WHITESPACE",
485
+ "IGNORE_EXCEPTION_DETAIL",
486
+ "ELLIPSIS",
487
+ ]
488
+ filterwarnings = [
489
+ "error:::pandas",
490
+ "error::ResourceWarning",
491
+ "error::pytest.PytestUnraisableExceptionWarning",
492
+ # TODO(PY311-minimum): Specify EncodingWarning
493
+ # Ignore 3rd party EncodingWarning but raise on pandas'
494
+ "ignore:.*encoding.* argument not specified",
495
+ "error:.*encoding.* argument not specified::pandas",
496
+ "ignore:.*ssl.SSLSocket:pytest.PytestUnraisableExceptionWarning",
497
+ "ignore:.*ssl.SSLSocket:ResourceWarning",
498
+ # GH 44844: Can remove once minimum matplotlib version >= 3.7
499
+ "ignore:.*FileIO:pytest.PytestUnraisableExceptionWarning",
500
+ "ignore:.*BufferedRandom:ResourceWarning",
501
+ "ignore::ResourceWarning:asyncio",
502
+ # From plotting doctests
503
+ "ignore:More than 20 figures have been opened:RuntimeWarning",
504
+ # Will be fixed in numba 0.56: https://github.com/numba/numba/issues/7758
505
+ "ignore:`np.MachAr` is deprecated:DeprecationWarning:numba",
506
+ "ignore:.*urllib3:DeprecationWarning:botocore",
507
+ "ignore:Setuptools is replacing distutils.:UserWarning:_distutils_hack",
508
+ # https://github.com/PyTables/PyTables/issues/822
509
+ "ignore:a closed node found in the registry:UserWarning:tables",
510
+ "ignore:`np.object` is a deprecated:DeprecationWarning:tables",
511
+ "ignore:tostring:DeprecationWarning:tables",
512
+ "ignore:distutils Version classes are deprecated:DeprecationWarning:pandas_datareader",
513
+ "ignore:distutils Version classes are deprecated:DeprecationWarning:numexpr",
514
+ "ignore:distutils Version classes are deprecated:DeprecationWarning:fastparquet",
515
+ "ignore:distutils Version classes are deprecated:DeprecationWarning:fsspec",
516
+ # Can be removed once https://github.com/numpy/numpy/pull/24794 is merged
517
+ "ignore:.*In the future `np.long` will be defined as.*:FutureWarning",
518
+ ]
519
+ junit_family = "xunit2"
520
+ markers = [
521
+ "single_cpu: tests that should run on a single cpu only",
522
+ "slow: mark a test as slow",
523
+ "network: mark a test as network",
524
+ "db: tests requiring a database (mysql or postgres)",
525
+ "clipboard: mark a pd.read_clipboard test",
526
+ "arm_slow: mark a test as slow for arm64 architecture",
527
+ "skip_ubsan: Tests known to fail UBSAN check",
528
+ ]
529
+
530
+ [tool.mypy]
531
+ # Import discovery
532
+ mypy_path = "typings"
533
+ files = ["pandas", "typings"]
534
+ namespace_packages = false
535
+ explicit_package_bases = false
536
+ ignore_missing_imports = true
537
+ follow_imports = "normal"
538
+ follow_imports_for_stubs = false
539
+ no_site_packages = false
540
+ no_silence_site_packages = false
541
+ # Platform configuration
542
+ python_version = "3.11"
543
+ platform = "linux-64"
544
+ # Disallow dynamic typing
545
+ disallow_any_unimported = false # TODO
546
+ disallow_any_expr = false # TODO
547
+ disallow_any_decorated = false # TODO
548
+ disallow_any_explicit = false # TODO
549
+ disallow_any_generics = false # TODO
550
+ disallow_subclassing_any = false # TODO
551
+ # Untyped definitions and calls
552
+ disallow_untyped_calls = true
553
+ disallow_untyped_defs = true
554
+ disallow_incomplete_defs = true
555
+ check_untyped_defs = true
556
+ disallow_untyped_decorators = true
557
+ # None and Optional handling
558
+ no_implicit_optional = true
559
+ strict_optional = true
560
+ # Configuring warnings
561
+ warn_redundant_casts = true
562
+ warn_unused_ignores = true
563
+ warn_no_return = true
564
+ warn_return_any = false # TODO
565
+ warn_unreachable = false # GH#27396
566
+ # Suppressing errors
567
+ ignore_errors = false
568
+ enable_error_code = "ignore-without-code"
569
+ # Miscellaneous strictness flags
570
+ allow_untyped_globals = false
571
+ allow_redefinition = false
572
+ local_partial_types = false
573
+ implicit_reexport = true
574
+ strict_equality = true
575
+ # Configuring error messages
576
+ show_error_context = false
577
+ show_column_numbers = false
578
+ show_error_codes = true
579
+
580
+ [[tool.mypy.overrides]]
581
+ module = [
582
+ "pandas._config.config", # TODO
583
+ "pandas._libs.*",
584
+ "pandas._testing.*", # TODO
585
+ "pandas.arrays", # TODO
586
+ "pandas.compat.numpy.function", # TODO
587
+ "pandas.compat._optional", # TODO
588
+ "pandas.compat.compressors", # TODO
589
+ "pandas.compat.pickle_compat", # TODO
590
+ "pandas.core._numba.executor", # TODO
591
+ "pandas.core.array_algos.datetimelike_accumulations", # TODO
592
+ "pandas.core.array_algos.masked_accumulations", # TODO
593
+ "pandas.core.array_algos.masked_reductions", # TODO
594
+ "pandas.core.array_algos.putmask", # TODO
595
+ "pandas.core.array_algos.quantile", # TODO
596
+ "pandas.core.array_algos.replace", # TODO
597
+ "pandas.core.array_algos.take", # TODO
598
+ "pandas.core.arrays.*", # TODO
599
+ "pandas.core.computation.*", # TODO
600
+ "pandas.core.dtypes.astype", # TODO
601
+ "pandas.core.dtypes.cast", # TODO
602
+ "pandas.core.dtypes.common", # TODO
603
+ "pandas.core.dtypes.concat", # TODO
604
+ "pandas.core.dtypes.dtypes", # TODO
605
+ "pandas.core.dtypes.generic", # TODO
606
+ "pandas.core.dtypes.inference", # TODO
607
+ "pandas.core.dtypes.missing", # TODO
608
+ "pandas.core.groupby.categorical", # TODO
609
+ "pandas.core.groupby.generic", # TODO
610
+ "pandas.core.groupby.grouper", # TODO
611
+ "pandas.core.groupby.groupby", # TODO
612
+ "pandas.core.groupby.ops", # TODO
613
+ "pandas.core.indexers.*", # TODO
614
+ "pandas.core.indexes.*", # TODO
615
+ "pandas.core.interchange.column", # TODO
616
+ "pandas.core.interchange.dataframe_protocol", # TODO
617
+ "pandas.core.interchange.from_dataframe", # TODO
618
+ "pandas.core.internals.*", # TODO
619
+ "pandas.core.methods.*", # TODO
620
+ "pandas.core.ops.array_ops", # TODO
621
+ "pandas.core.ops.common", # TODO
622
+ "pandas.core.ops.invalid", # TODO
623
+ "pandas.core.ops.mask_ops", # TODO
624
+ "pandas.core.ops.missing", # TODO
625
+ "pandas.core.reshape.*", # TODO
626
+ "pandas.core.strings.*", # TODO
627
+ "pandas.core.tools.*", # TODO
628
+ "pandas.core.window.common", # TODO
629
+ "pandas.core.window.ewm", # TODO
630
+ "pandas.core.window.expanding", # TODO
631
+ "pandas.core.window.numba_", # TODO
632
+ "pandas.core.window.online", # TODO
633
+ "pandas.core.window.rolling", # TODO
634
+ "pandas.core.accessor", # TODO
635
+ "pandas.core.algorithms", # TODO
636
+ "pandas.core.apply", # TODO
637
+ "pandas.core.arraylike", # TODO
638
+ "pandas.core.base", # TODO
639
+ "pandas.core.common", # TODO
640
+ "pandas.core.config_init", # TODO
641
+ "pandas.core.construction", # TODO
642
+ "pandas.core.flags", # TODO
643
+ "pandas.core.frame", # TODO
644
+ "pandas.core.generic", # TODO
645
+ "pandas.core.indexing", # TODO
646
+ "pandas.core.missing", # TODO
647
+ "pandas.core.nanops", # TODO
648
+ "pandas.core.resample", # TODO
649
+ "pandas.core.roperator", # TODO
650
+ "pandas.core.sample", # TODO
651
+ "pandas.core.series", # TODO
652
+ "pandas.core.sorting", # TODO
653
+ "pandas.errors", # TODO
654
+ "pandas.io.clipboard", # TODO
655
+ "pandas.io.excel._base", # TODO
656
+ "pandas.io.excel._odfreader", # TODO
657
+ "pandas.io.excel._odswriter", # TODO
658
+ "pandas.io.excel._openpyxl", # TODO
659
+ "pandas.io.excel._pyxlsb", # TODO
660
+ "pandas.io.excel._xlrd", # TODO
661
+ "pandas.io.excel._xlsxwriter", # TODO
662
+ "pandas.io.formats.console", # TODO
663
+ "pandas.io.formats.css", # TODO
664
+ "pandas.io.formats.excel", # TODO
665
+ "pandas.io.formats.format", # TODO
666
+ "pandas.io.formats.info", # TODO
667
+ "pandas.io.formats.printing", # TODO
668
+ "pandas.io.formats.style", # TODO
669
+ "pandas.io.formats.style_render", # TODO
670
+ "pandas.io.formats.xml", # TODO
671
+ "pandas.io.json.*", # TODO
672
+ "pandas.io.parsers.*", # TODO
673
+ "pandas.io.sas.sas_xport", # TODO
674
+ "pandas.io.sas.sas7bdat", # TODO
675
+ "pandas.io.clipboards", # TODO
676
+ "pandas.io.common", # TODO
677
+ "pandas.io.gbq", # TODO
678
+ "pandas.io.html", # TODO
679
+ "pandas.io.gbq", # TODO
680
+ "pandas.io.parquet", # TODO
681
+ "pandas.io.pytables", # TODO
682
+ "pandas.io.sql", # TODO
683
+ "pandas.io.stata", # TODO
684
+ "pandas.io.xml", # TODO
685
+ "pandas.plotting.*", # TODO
686
+ "pandas.tests.*",
687
+ "pandas.tseries.frequencies", # TODO
688
+ "pandas.tseries.holiday", # TODO
689
+ "pandas.util._decorators", # TODO
690
+ "pandas.util._doctools", # TODO
691
+ "pandas.util._print_versions", # TODO
692
+ "pandas.util._test_decorators", # TODO
693
+ "pandas.util._validators", # TODO
694
+ "pandas.util", # TODO
695
+ "pandas._version",
696
+ "pandas.conftest",
697
+ "pandas"
698
+ ]
699
+ disallow_untyped_calls = false
700
+ disallow_untyped_defs = false
701
+ disallow_incomplete_defs = false
702
+
703
+ [[tool.mypy.overrides]]
704
+ module = [
705
+ "pandas.tests.*",
706
+ "pandas._version",
707
+ "pandas.io.clipboard",
708
+ ]
709
+ check_untyped_defs = false
710
+
711
+ [[tool.mypy.overrides]]
712
+ module = [
713
+ "pandas.tests.apply.test_series_apply",
714
+ "pandas.tests.arithmetic.conftest",
715
+ "pandas.tests.arrays.sparse.test_combine_concat",
716
+ "pandas.tests.dtypes.test_common",
717
+ "pandas.tests.frame.methods.test_to_records",
718
+ "pandas.tests.groupby.test_rank",
719
+ "pandas.tests.groupby.transform.test_transform",
720
+ "pandas.tests.indexes.interval.test_interval",
721
+ "pandas.tests.indexing.test_categorical",
722
+ "pandas.tests.io.excel.test_writers",
723
+ "pandas.tests.reductions.test_reductions",
724
+ "pandas.tests.test_expressions",
725
+ ]
726
+ ignore_errors = true
727
+
728
+ # To be kept consistent with "Import Formatting" section in contributing.rst
729
+ [tool.isort]
730
+ known_pre_libs = "pandas._config"
731
+ known_pre_core = ["pandas._libs", "pandas._typing", "pandas.util._*", "pandas.compat", "pandas.errors"]
732
+ known_dtypes = "pandas.core.dtypes"
733
+ known_post_core = ["pandas.tseries", "pandas.io", "pandas.plotting"]
734
+ sections = ["FUTURE", "STDLIB", "THIRDPARTY" ,"PRE_LIBS" , "PRE_CORE", "DTYPES", "FIRSTPARTY", "POST_CORE", "LOCALFOLDER"]
735
+ profile = "black"
736
+ combine_as_imports = true
737
+ force_grid_wrap = 2
738
+ force_sort_within_sections = true
739
+ skip_glob = "env"
740
+ skip = "pandas/__init__.py"
741
+
742
+ [tool.pyright]
743
+ pythonVersion = "3.11"
744
+ typeCheckingMode = "basic"
745
+ useLibraryCodeForTypes = false
746
+ include = ["pandas", "typings"]
747
+ exclude = ["pandas/tests", "pandas/io/clipboard", "pandas/util/version", "pandas/core/_numba/extensions.py"]
748
+ # enable subset of "strict"
749
+ reportDuplicateImport = true
750
+ reportInconsistentConstructor = true
751
+ reportInvalidStubStatement = true
752
+ reportOverlappingOverload = true
753
+ reportPropertyTypeMismatch = true
754
+ reportUntypedClassDecorator = true
755
+ reportUntypedFunctionDecorator = true
756
+ reportUntypedNamedTuple = true
757
+ reportUnusedImport = true
758
+ disableBytesTypePromotions = true
759
+ # disable subset of "basic"
760
+ reportGeneralTypeIssues = false
761
+ reportMissingModuleSource = false
762
+ reportOptionalCall = false
763
+ reportOptionalIterable = false
764
+ reportOptionalMemberAccess = false
765
+ reportOptionalOperand = false
766
+ reportOptionalSubscript = false
767
+ reportPrivateImportUsage = false
768
+ reportUnboundVariable = false
769
+
770
+ [tool.coverage.run]
771
+ branch = true
772
+ omit = ["pandas/_typing.py", "pandas/_version.py"]
773
+ plugins = ["Cython.Coverage"]
774
+ source = ["pandas"]
775
+
776
+ [tool.coverage.report]
777
+ ignore_errors = false
778
+ show_missing = true
779
+ omit = ["pandas/_version.py"]
780
+ exclude_lines = [
781
+ # Have to re-enable the standard pragma
782
+ "pragma: no cover",
783
+ # Don't complain about missing debug-only code:s
784
+ "def __repr__",
785
+ "if self.debug",
786
+ # Don't complain if tests don't hit defensive assertion code:
787
+ "raise AssertionError",
788
+ "raise NotImplementedError",
789
+ "AbstractMethodError",
790
+ # Don't complain if non-runnable code isn't run:
791
+ "if 0:",
792
+ "if __name__ == .__main__.:",
793
+ "if TYPE_CHECKING:",
794
+ ]
795
+
796
+ [tool.coverage.html]
797
+ directory = "coverage_html_report"
798
+
799
+ [tool.codespell]
800
+ ignore-words-list = "blocs, coo, hist, nd, sav, ser, recuse, nin, timere, expec, expecs"
801
+ ignore-regex = 'https://([\w/\.])+'
venv/lib/python3.10/site-packages/pandas/testing.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public testing utility functions.
3
+ """
4
+
5
+
6
+ from pandas._testing import (
7
+ assert_extension_array_equal,
8
+ assert_frame_equal,
9
+ assert_index_equal,
10
+ assert_series_equal,
11
+ )
12
+
13
+ __all__ = [
14
+ "assert_extension_array_equal",
15
+ "assert_frame_equal",
16
+ "assert_series_equal",
17
+ "assert_index_equal",
18
+ ]
venv/lib/python3.10/site-packages/pandas/tests/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/pandas/tests/frame/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/pandas/tests/frame/common.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
4
+
5
+ from pandas import (
6
+ DataFrame,
7
+ concat,
8
+ )
9
+
10
+ if TYPE_CHECKING:
11
+ from pandas._typing import AxisInt
12
+
13
+
14
+ def _check_mixed_float(df, dtype=None):
15
+ # float16 are most likely to be upcasted to float32
16
+ dtypes = {"A": "float32", "B": "float32", "C": "float16", "D": "float64"}
17
+ if isinstance(dtype, str):
18
+ dtypes = {k: dtype for k, v in dtypes.items()}
19
+ elif isinstance(dtype, dict):
20
+ dtypes.update(dtype)
21
+ if dtypes.get("A"):
22
+ assert df.dtypes["A"] == dtypes["A"]
23
+ if dtypes.get("B"):
24
+ assert df.dtypes["B"] == dtypes["B"]
25
+ if dtypes.get("C"):
26
+ assert df.dtypes["C"] == dtypes["C"]
27
+ if dtypes.get("D"):
28
+ assert df.dtypes["D"] == dtypes["D"]
29
+
30
+
31
+ def _check_mixed_int(df, dtype=None):
32
+ dtypes = {"A": "int32", "B": "uint64", "C": "uint8", "D": "int64"}
33
+ if isinstance(dtype, str):
34
+ dtypes = {k: dtype for k, v in dtypes.items()}
35
+ elif isinstance(dtype, dict):
36
+ dtypes.update(dtype)
37
+ if dtypes.get("A"):
38
+ assert df.dtypes["A"] == dtypes["A"]
39
+ if dtypes.get("B"):
40
+ assert df.dtypes["B"] == dtypes["B"]
41
+ if dtypes.get("C"):
42
+ assert df.dtypes["C"] == dtypes["C"]
43
+ if dtypes.get("D"):
44
+ assert df.dtypes["D"] == dtypes["D"]
45
+
46
+
47
+ def zip_frames(frames: list[DataFrame], axis: AxisInt = 1) -> DataFrame:
48
+ """
49
+ take a list of frames, zip them together under the
50
+ assumption that these all have the first frames' index/columns.
51
+
52
+ Returns
53
+ -------
54
+ new_frame : DataFrame
55
+ """
56
+ if axis == 1:
57
+ columns = frames[0].columns
58
+ zipped = [f.loc[:, c] for c in columns for f in frames]
59
+ return concat(zipped, axis=1)
60
+ else:
61
+ index = frames[0].index
62
+ zipped = [f.loc[i, :] for i in index for f in frames]
63
+ return DataFrame(zipped)
venv/lib/python3.10/site-packages/pandas/tests/frame/conftest.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ DataFrame,
6
+ Index,
7
+ NaT,
8
+ date_range,
9
+ )
10
+
11
+
12
+ @pytest.fixture
13
+ def datetime_frame() -> DataFrame:
14
+ """
15
+ Fixture for DataFrame of floats with DatetimeIndex
16
+
17
+ Columns are ['A', 'B', 'C', 'D']
18
+ """
19
+ return DataFrame(
20
+ np.random.default_rng(2).standard_normal((100, 4)),
21
+ columns=Index(list("ABCD"), dtype=object),
22
+ index=date_range("2000-01-01", periods=100, freq="B"),
23
+ )
24
+
25
+
26
+ @pytest.fixture
27
+ def float_string_frame():
28
+ """
29
+ Fixture for DataFrame of floats and strings with index of unique strings
30
+
31
+ Columns are ['A', 'B', 'C', 'D', 'foo'].
32
+ """
33
+ df = DataFrame(
34
+ np.random.default_rng(2).standard_normal((30, 4)),
35
+ index=Index([f"foo_{i}" for i in range(30)], dtype=object),
36
+ columns=Index(list("ABCD"), dtype=object),
37
+ )
38
+ df["foo"] = "bar"
39
+ return df
40
+
41
+
42
+ @pytest.fixture
43
+ def mixed_float_frame():
44
+ """
45
+ Fixture for DataFrame of different float types with index of unique strings
46
+
47
+ Columns are ['A', 'B', 'C', 'D'].
48
+ """
49
+ df = DataFrame(
50
+ {
51
+ col: np.random.default_rng(2).random(30, dtype=dtype)
52
+ for col, dtype in zip(
53
+ list("ABCD"), ["float32", "float32", "float32", "float64"]
54
+ )
55
+ },
56
+ index=Index([f"foo_{i}" for i in range(30)], dtype=object),
57
+ )
58
+ # not supported by numpy random
59
+ df["C"] = df["C"].astype("float16")
60
+ return df
61
+
62
+
63
+ @pytest.fixture
64
+ def mixed_int_frame():
65
+ """
66
+ Fixture for DataFrame of different int types with index of unique strings
67
+
68
+ Columns are ['A', 'B', 'C', 'D'].
69
+ """
70
+ return DataFrame(
71
+ {
72
+ col: np.ones(30, dtype=dtype)
73
+ for col, dtype in zip(list("ABCD"), ["int32", "uint64", "uint8", "int64"])
74
+ },
75
+ index=Index([f"foo_{i}" for i in range(30)], dtype=object),
76
+ )
77
+
78
+
79
+ @pytest.fixture
80
+ def timezone_frame():
81
+ """
82
+ Fixture for DataFrame of date_range Series with different time zones
83
+
84
+ Columns are ['A', 'B', 'C']; some entries are missing
85
+
86
+ A B C
87
+ 0 2013-01-01 2013-01-01 00:00:00-05:00 2013-01-01 00:00:00+01:00
88
+ 1 2013-01-02 NaT NaT
89
+ 2 2013-01-03 2013-01-03 00:00:00-05:00 2013-01-03 00:00:00+01:00
90
+ """
91
+ df = DataFrame(
92
+ {
93
+ "A": date_range("20130101", periods=3),
94
+ "B": date_range("20130101", periods=3, tz="US/Eastern"),
95
+ "C": date_range("20130101", periods=3, tz="CET"),
96
+ }
97
+ )
98
+ df.iloc[1, 1] = NaT
99
+ df.iloc[1, 2] = NaT
100
+ return df
venv/lib/python3.10/site-packages/pandas/tests/frame/test_alter_axes.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import pytz
4
+
5
+ from pandas import DataFrame
6
+ import pandas._testing as tm
7
+
8
+
9
+ class TestDataFrameAlterAxes:
10
+ # Tests for setting index/columns attributes directly (i.e. __setattr__)
11
+
12
+ def test_set_axis_setattr_index(self):
13
+ # GH 6785
14
+ # set the index manually
15
+
16
+ df = DataFrame([{"ts": datetime(2014, 4, 1, tzinfo=pytz.utc), "foo": 1}])
17
+ expected = df.set_index("ts")
18
+ df.index = df["ts"]
19
+ df.pop("ts")
20
+ tm.assert_frame_equal(df, expected)
21
+
22
+ # Renaming
23
+
24
+ def test_assign_columns(self, float_frame):
25
+ float_frame["hi"] = "there"
26
+
27
+ df = float_frame.copy()
28
+ df.columns = ["foo", "bar", "baz", "quux", "foo2"]
29
+ tm.assert_series_equal(float_frame["C"], df["baz"], check_names=False)
30
+ tm.assert_series_equal(float_frame["hi"], df["foo2"], check_names=False)
venv/lib/python3.10/site-packages/pandas/tests/frame/test_arithmetic.py ADDED
@@ -0,0 +1,2136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import deque
2
+ from datetime import (
3
+ datetime,
4
+ timezone,
5
+ )
6
+ from enum import Enum
7
+ import functools
8
+ import operator
9
+ import re
10
+
11
+ import numpy as np
12
+ import pytest
13
+
14
+ from pandas._config import using_pyarrow_string_dtype
15
+
16
+ import pandas.util._test_decorators as td
17
+
18
+ import pandas as pd
19
+ from pandas import (
20
+ DataFrame,
21
+ Index,
22
+ MultiIndex,
23
+ Series,
24
+ )
25
+ import pandas._testing as tm
26
+ from pandas.core.computation import expressions as expr
27
+ from pandas.tests.frame.common import (
28
+ _check_mixed_float,
29
+ _check_mixed_int,
30
+ )
31
+
32
+
33
+ @pytest.fixture
34
+ def simple_frame():
35
+ """
36
+ Fixture for simple 3x3 DataFrame
37
+
38
+ Columns are ['one', 'two', 'three'], index is ['a', 'b', 'c'].
39
+
40
+ one two three
41
+ a 1.0 2.0 3.0
42
+ b 4.0 5.0 6.0
43
+ c 7.0 8.0 9.0
44
+ """
45
+ arr = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]])
46
+
47
+ return DataFrame(arr, columns=["one", "two", "three"], index=["a", "b", "c"])
48
+
49
+
50
+ @pytest.fixture(autouse=True, params=[0, 100], ids=["numexpr", "python"])
51
+ def switch_numexpr_min_elements(request, monkeypatch):
52
+ with monkeypatch.context() as m:
53
+ m.setattr(expr, "_MIN_ELEMENTS", request.param)
54
+ yield request.param
55
+
56
+
57
+ class DummyElement:
58
+ def __init__(self, value, dtype) -> None:
59
+ self.value = value
60
+ self.dtype = np.dtype(dtype)
61
+
62
+ def __array__(self, dtype=None, copy=None):
63
+ return np.array(self.value, dtype=self.dtype)
64
+
65
+ def __str__(self) -> str:
66
+ return f"DummyElement({self.value}, {self.dtype})"
67
+
68
+ def __repr__(self) -> str:
69
+ return str(self)
70
+
71
+ def astype(self, dtype, copy=False):
72
+ self.dtype = dtype
73
+ return self
74
+
75
+ def view(self, dtype):
76
+ return type(self)(self.value.view(dtype), dtype)
77
+
78
+ def any(self, axis=None):
79
+ return bool(self.value)
80
+
81
+
82
+ # -------------------------------------------------------------------
83
+ # Comparisons
84
+
85
+
86
+ class TestFrameComparisons:
87
+ # Specifically _not_ flex-comparisons
88
+
89
+ def test_comparison_with_categorical_dtype(self):
90
+ # GH#12564
91
+
92
+ df = DataFrame({"A": ["foo", "bar", "baz"]})
93
+ exp = DataFrame({"A": [True, False, False]})
94
+
95
+ res = df == "foo"
96
+ tm.assert_frame_equal(res, exp)
97
+
98
+ # casting to categorical shouldn't affect the result
99
+ df["A"] = df["A"].astype("category")
100
+
101
+ res = df == "foo"
102
+ tm.assert_frame_equal(res, exp)
103
+
104
+ def test_frame_in_list(self):
105
+ # GH#12689 this should raise at the DataFrame level, not blocks
106
+ df = DataFrame(
107
+ np.random.default_rng(2).standard_normal((6, 4)), columns=list("ABCD")
108
+ )
109
+ msg = "The truth value of a DataFrame is ambiguous"
110
+ with pytest.raises(ValueError, match=msg):
111
+ df in [None]
112
+
113
+ @pytest.mark.parametrize(
114
+ "arg, arg2",
115
+ [
116
+ [
117
+ {
118
+ "a": np.random.default_rng(2).integers(10, size=10),
119
+ "b": pd.date_range("20010101", periods=10),
120
+ },
121
+ {
122
+ "a": np.random.default_rng(2).integers(10, size=10),
123
+ "b": np.random.default_rng(2).integers(10, size=10),
124
+ },
125
+ ],
126
+ [
127
+ {
128
+ "a": np.random.default_rng(2).integers(10, size=10),
129
+ "b": np.random.default_rng(2).integers(10, size=10),
130
+ },
131
+ {
132
+ "a": np.random.default_rng(2).integers(10, size=10),
133
+ "b": pd.date_range("20010101", periods=10),
134
+ },
135
+ ],
136
+ [
137
+ {
138
+ "a": pd.date_range("20010101", periods=10),
139
+ "b": pd.date_range("20010101", periods=10),
140
+ },
141
+ {
142
+ "a": np.random.default_rng(2).integers(10, size=10),
143
+ "b": np.random.default_rng(2).integers(10, size=10),
144
+ },
145
+ ],
146
+ [
147
+ {
148
+ "a": np.random.default_rng(2).integers(10, size=10),
149
+ "b": pd.date_range("20010101", periods=10),
150
+ },
151
+ {
152
+ "a": pd.date_range("20010101", periods=10),
153
+ "b": pd.date_range("20010101", periods=10),
154
+ },
155
+ ],
156
+ ],
157
+ )
158
+ def test_comparison_invalid(self, arg, arg2):
159
+ # GH4968
160
+ # invalid date/int comparisons
161
+ x = DataFrame(arg)
162
+ y = DataFrame(arg2)
163
+ # we expect the result to match Series comparisons for
164
+ # == and !=, inequalities should raise
165
+ result = x == y
166
+ expected = DataFrame(
167
+ {col: x[col] == y[col] for col in x.columns},
168
+ index=x.index,
169
+ columns=x.columns,
170
+ )
171
+ tm.assert_frame_equal(result, expected)
172
+
173
+ result = x != y
174
+ expected = DataFrame(
175
+ {col: x[col] != y[col] for col in x.columns},
176
+ index=x.index,
177
+ columns=x.columns,
178
+ )
179
+ tm.assert_frame_equal(result, expected)
180
+
181
+ msgs = [
182
+ r"Invalid comparison between dtype=datetime64\[ns\] and ndarray",
183
+ "invalid type promotion",
184
+ (
185
+ # npdev 1.20.0
186
+ r"The DTypes <class 'numpy.dtype\[.*\]'> and "
187
+ r"<class 'numpy.dtype\[.*\]'> do not have a common DType."
188
+ ),
189
+ ]
190
+ msg = "|".join(msgs)
191
+ with pytest.raises(TypeError, match=msg):
192
+ x >= y
193
+ with pytest.raises(TypeError, match=msg):
194
+ x > y
195
+ with pytest.raises(TypeError, match=msg):
196
+ x < y
197
+ with pytest.raises(TypeError, match=msg):
198
+ x <= y
199
+
200
+ @pytest.mark.parametrize(
201
+ "left, right",
202
+ [
203
+ ("gt", "lt"),
204
+ ("lt", "gt"),
205
+ ("ge", "le"),
206
+ ("le", "ge"),
207
+ ("eq", "eq"),
208
+ ("ne", "ne"),
209
+ ],
210
+ )
211
+ def test_timestamp_compare(self, left, right):
212
+ # make sure we can compare Timestamps on the right AND left hand side
213
+ # GH#4982
214
+ df = DataFrame(
215
+ {
216
+ "dates1": pd.date_range("20010101", periods=10),
217
+ "dates2": pd.date_range("20010102", periods=10),
218
+ "intcol": np.random.default_rng(2).integers(1000000000, size=10),
219
+ "floatcol": np.random.default_rng(2).standard_normal(10),
220
+ "stringcol": [chr(100 + i) for i in range(10)],
221
+ }
222
+ )
223
+ df.loc[np.random.default_rng(2).random(len(df)) > 0.5, "dates2"] = pd.NaT
224
+ left_f = getattr(operator, left)
225
+ right_f = getattr(operator, right)
226
+
227
+ # no nats
228
+ if left in ["eq", "ne"]:
229
+ expected = left_f(df, pd.Timestamp("20010109"))
230
+ result = right_f(pd.Timestamp("20010109"), df)
231
+ tm.assert_frame_equal(result, expected)
232
+ else:
233
+ msg = (
234
+ "'(<|>)=?' not supported between "
235
+ "instances of 'numpy.ndarray' and 'Timestamp'"
236
+ )
237
+ with pytest.raises(TypeError, match=msg):
238
+ left_f(df, pd.Timestamp("20010109"))
239
+ with pytest.raises(TypeError, match=msg):
240
+ right_f(pd.Timestamp("20010109"), df)
241
+ # nats
242
+ if left in ["eq", "ne"]:
243
+ expected = left_f(df, pd.Timestamp("nat"))
244
+ result = right_f(pd.Timestamp("nat"), df)
245
+ tm.assert_frame_equal(result, expected)
246
+ else:
247
+ msg = (
248
+ "'(<|>)=?' not supported between "
249
+ "instances of 'numpy.ndarray' and 'NaTType'"
250
+ )
251
+ with pytest.raises(TypeError, match=msg):
252
+ left_f(df, pd.Timestamp("nat"))
253
+ with pytest.raises(TypeError, match=msg):
254
+ right_f(pd.Timestamp("nat"), df)
255
+
256
+ @pytest.mark.xfail(
257
+ using_pyarrow_string_dtype(), reason="can't compare string and int"
258
+ )
259
+ def test_mixed_comparison(self):
260
+ # GH#13128, GH#22163 != datetime64 vs non-dt64 should be False,
261
+ # not raise TypeError
262
+ # (this appears to be fixed before GH#22163, not sure when)
263
+ df = DataFrame([["1989-08-01", 1], ["1989-08-01", 2]])
264
+ other = DataFrame([["a", "b"], ["c", "d"]])
265
+
266
+ result = df == other
267
+ assert not result.any().any()
268
+
269
+ result = df != other
270
+ assert result.all().all()
271
+
272
+ def test_df_boolean_comparison_error(self):
273
+ # GH#4576, GH#22880
274
+ # comparing DataFrame against list/tuple with len(obj) matching
275
+ # len(df.columns) is supported as of GH#22800
276
+ df = DataFrame(np.arange(6).reshape((3, 2)))
277
+
278
+ expected = DataFrame([[False, False], [True, False], [False, False]])
279
+
280
+ result = df == (2, 2)
281
+ tm.assert_frame_equal(result, expected)
282
+
283
+ result = df == [2, 2]
284
+ tm.assert_frame_equal(result, expected)
285
+
286
+ def test_df_float_none_comparison(self):
287
+ df = DataFrame(
288
+ np.random.default_rng(2).standard_normal((8, 3)),
289
+ index=range(8),
290
+ columns=["A", "B", "C"],
291
+ )
292
+
293
+ result = df.__eq__(None)
294
+ assert not result.any().any()
295
+
296
+ def test_df_string_comparison(self):
297
+ df = DataFrame([{"a": 1, "b": "foo"}, {"a": 2, "b": "bar"}])
298
+ mask_a = df.a > 1
299
+ tm.assert_frame_equal(df[mask_a], df.loc[1:1, :])
300
+ tm.assert_frame_equal(df[-mask_a], df.loc[0:0, :])
301
+
302
+ mask_b = df.b == "foo"
303
+ tm.assert_frame_equal(df[mask_b], df.loc[0:0, :])
304
+ tm.assert_frame_equal(df[-mask_b], df.loc[1:1, :])
305
+
306
+
307
+ class TestFrameFlexComparisons:
308
+ # TODO: test_bool_flex_frame needs a better name
309
+ @pytest.mark.parametrize("op", ["eq", "ne", "gt", "lt", "ge", "le"])
310
+ def test_bool_flex_frame(self, op):
311
+ data = np.random.default_rng(2).standard_normal((5, 3))
312
+ other_data = np.random.default_rng(2).standard_normal((5, 3))
313
+ df = DataFrame(data)
314
+ other = DataFrame(other_data)
315
+ ndim_5 = np.ones(df.shape + (1, 3))
316
+
317
+ # DataFrame
318
+ assert df.eq(df).values.all()
319
+ assert not df.ne(df).values.any()
320
+ f = getattr(df, op)
321
+ o = getattr(operator, op)
322
+ # No NAs
323
+ tm.assert_frame_equal(f(other), o(df, other))
324
+ # Unaligned
325
+ part_o = other.loc[3:, 1:].copy()
326
+ rs = f(part_o)
327
+ xp = o(df, part_o.reindex(index=df.index, columns=df.columns))
328
+ tm.assert_frame_equal(rs, xp)
329
+ # ndarray
330
+ tm.assert_frame_equal(f(other.values), o(df, other.values))
331
+ # scalar
332
+ tm.assert_frame_equal(f(0), o(df, 0))
333
+ # NAs
334
+ msg = "Unable to coerce to Series/DataFrame"
335
+ tm.assert_frame_equal(f(np.nan), o(df, np.nan))
336
+ with pytest.raises(ValueError, match=msg):
337
+ f(ndim_5)
338
+
339
+ @pytest.mark.parametrize("box", [np.array, Series])
340
+ def test_bool_flex_series(self, box):
341
+ # Series
342
+ # list/tuple
343
+ data = np.random.default_rng(2).standard_normal((5, 3))
344
+ df = DataFrame(data)
345
+ idx_ser = box(np.random.default_rng(2).standard_normal(5))
346
+ col_ser = box(np.random.default_rng(2).standard_normal(3))
347
+
348
+ idx_eq = df.eq(idx_ser, axis=0)
349
+ col_eq = df.eq(col_ser)
350
+ idx_ne = df.ne(idx_ser, axis=0)
351
+ col_ne = df.ne(col_ser)
352
+ tm.assert_frame_equal(col_eq, df == Series(col_ser))
353
+ tm.assert_frame_equal(col_eq, -col_ne)
354
+ tm.assert_frame_equal(idx_eq, -idx_ne)
355
+ tm.assert_frame_equal(idx_eq, df.T.eq(idx_ser).T)
356
+ tm.assert_frame_equal(col_eq, df.eq(list(col_ser)))
357
+ tm.assert_frame_equal(idx_eq, df.eq(Series(idx_ser), axis=0))
358
+ tm.assert_frame_equal(idx_eq, df.eq(list(idx_ser), axis=0))
359
+
360
+ idx_gt = df.gt(idx_ser, axis=0)
361
+ col_gt = df.gt(col_ser)
362
+ idx_le = df.le(idx_ser, axis=0)
363
+ col_le = df.le(col_ser)
364
+
365
+ tm.assert_frame_equal(col_gt, df > Series(col_ser))
366
+ tm.assert_frame_equal(col_gt, -col_le)
367
+ tm.assert_frame_equal(idx_gt, -idx_le)
368
+ tm.assert_frame_equal(idx_gt, df.T.gt(idx_ser).T)
369
+
370
+ idx_ge = df.ge(idx_ser, axis=0)
371
+ col_ge = df.ge(col_ser)
372
+ idx_lt = df.lt(idx_ser, axis=0)
373
+ col_lt = df.lt(col_ser)
374
+ tm.assert_frame_equal(col_ge, df >= Series(col_ser))
375
+ tm.assert_frame_equal(col_ge, -col_lt)
376
+ tm.assert_frame_equal(idx_ge, -idx_lt)
377
+ tm.assert_frame_equal(idx_ge, df.T.ge(idx_ser).T)
378
+
379
+ idx_ser = Series(np.random.default_rng(2).standard_normal(5))
380
+ col_ser = Series(np.random.default_rng(2).standard_normal(3))
381
+
382
+ def test_bool_flex_frame_na(self):
383
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 3)))
384
+ # NA
385
+ df.loc[0, 0] = np.nan
386
+ rs = df.eq(df)
387
+ assert not rs.loc[0, 0]
388
+ rs = df.ne(df)
389
+ assert rs.loc[0, 0]
390
+ rs = df.gt(df)
391
+ assert not rs.loc[0, 0]
392
+ rs = df.lt(df)
393
+ assert not rs.loc[0, 0]
394
+ rs = df.ge(df)
395
+ assert not rs.loc[0, 0]
396
+ rs = df.le(df)
397
+ assert not rs.loc[0, 0]
398
+
399
+ def test_bool_flex_frame_complex_dtype(self):
400
+ # complex
401
+ arr = np.array([np.nan, 1, 6, np.nan])
402
+ arr2 = np.array([2j, np.nan, 7, None])
403
+ df = DataFrame({"a": arr})
404
+ df2 = DataFrame({"a": arr2})
405
+
406
+ msg = "|".join(
407
+ [
408
+ "'>' not supported between instances of '.*' and 'complex'",
409
+ r"unorderable types: .*complex\(\)", # PY35
410
+ ]
411
+ )
412
+ with pytest.raises(TypeError, match=msg):
413
+ # inequalities are not well-defined for complex numbers
414
+ df.gt(df2)
415
+ with pytest.raises(TypeError, match=msg):
416
+ # regression test that we get the same behavior for Series
417
+ df["a"].gt(df2["a"])
418
+ with pytest.raises(TypeError, match=msg):
419
+ # Check that we match numpy behavior here
420
+ df.values > df2.values
421
+
422
+ rs = df.ne(df2)
423
+ assert rs.values.all()
424
+
425
+ arr3 = np.array([2j, np.nan, None])
426
+ df3 = DataFrame({"a": arr3})
427
+
428
+ with pytest.raises(TypeError, match=msg):
429
+ # inequalities are not well-defined for complex numbers
430
+ df3.gt(2j)
431
+ with pytest.raises(TypeError, match=msg):
432
+ # regression test that we get the same behavior for Series
433
+ df3["a"].gt(2j)
434
+ with pytest.raises(TypeError, match=msg):
435
+ # Check that we match numpy behavior here
436
+ df3.values > 2j
437
+
438
+ def test_bool_flex_frame_object_dtype(self):
439
+ # corner, dtype=object
440
+ df1 = DataFrame({"col": ["foo", np.nan, "bar"]}, dtype=object)
441
+ df2 = DataFrame({"col": ["foo", datetime.now(), "bar"]}, dtype=object)
442
+ result = df1.ne(df2)
443
+ exp = DataFrame({"col": [False, True, False]})
444
+ tm.assert_frame_equal(result, exp)
445
+
446
+ def test_flex_comparison_nat(self):
447
+ # GH 15697, GH 22163 df.eq(pd.NaT) should behave like df == pd.NaT,
448
+ # and _definitely_ not be NaN
449
+ df = DataFrame([pd.NaT])
450
+
451
+ result = df == pd.NaT
452
+ # result.iloc[0, 0] is a np.bool_ object
453
+ assert result.iloc[0, 0].item() is False
454
+
455
+ result = df.eq(pd.NaT)
456
+ assert result.iloc[0, 0].item() is False
457
+
458
+ result = df != pd.NaT
459
+ assert result.iloc[0, 0].item() is True
460
+
461
+ result = df.ne(pd.NaT)
462
+ assert result.iloc[0, 0].item() is True
463
+
464
+ @pytest.mark.parametrize("opname", ["eq", "ne", "gt", "lt", "ge", "le"])
465
+ def test_df_flex_cmp_constant_return_types(self, opname):
466
+ # GH 15077, non-empty DataFrame
467
+ df = DataFrame({"x": [1, 2, 3], "y": [1.0, 2.0, 3.0]})
468
+ const = 2
469
+
470
+ result = getattr(df, opname)(const).dtypes.value_counts()
471
+ tm.assert_series_equal(
472
+ result, Series([2], index=[np.dtype(bool)], name="count")
473
+ )
474
+
475
+ @pytest.mark.parametrize("opname", ["eq", "ne", "gt", "lt", "ge", "le"])
476
+ def test_df_flex_cmp_constant_return_types_empty(self, opname):
477
+ # GH 15077 empty DataFrame
478
+ df = DataFrame({"x": [1, 2, 3], "y": [1.0, 2.0, 3.0]})
479
+ const = 2
480
+
481
+ empty = df.iloc[:0]
482
+ result = getattr(empty, opname)(const).dtypes.value_counts()
483
+ tm.assert_series_equal(
484
+ result, Series([2], index=[np.dtype(bool)], name="count")
485
+ )
486
+
487
+ def test_df_flex_cmp_ea_dtype_with_ndarray_series(self):
488
+ ii = pd.IntervalIndex.from_breaks([1, 2, 3])
489
+ df = DataFrame({"A": ii, "B": ii})
490
+
491
+ ser = Series([0, 0])
492
+ res = df.eq(ser, axis=0)
493
+
494
+ expected = DataFrame({"A": [False, False], "B": [False, False]})
495
+ tm.assert_frame_equal(res, expected)
496
+
497
+ ser2 = Series([1, 2], index=["A", "B"])
498
+ res2 = df.eq(ser2, axis=1)
499
+ tm.assert_frame_equal(res2, expected)
500
+
501
+
502
+ # -------------------------------------------------------------------
503
+ # Arithmetic
504
+
505
+
506
+ class TestFrameFlexArithmetic:
507
+ def test_floordiv_axis0(self):
508
+ # make sure we df.floordiv(ser, axis=0) matches column-wise result
509
+ arr = np.arange(3)
510
+ ser = Series(arr)
511
+ df = DataFrame({"A": ser, "B": ser})
512
+
513
+ result = df.floordiv(ser, axis=0)
514
+
515
+ expected = DataFrame({col: df[col] // ser for col in df.columns})
516
+
517
+ tm.assert_frame_equal(result, expected)
518
+
519
+ result2 = df.floordiv(ser.values, axis=0)
520
+ tm.assert_frame_equal(result2, expected)
521
+
522
+ def test_df_add_td64_columnwise(self):
523
+ # GH 22534 Check that column-wise addition broadcasts correctly
524
+ dti = pd.date_range("2016-01-01", periods=10)
525
+ tdi = pd.timedelta_range("1", periods=10)
526
+ tser = Series(tdi)
527
+ df = DataFrame({0: dti, 1: tdi})
528
+
529
+ result = df.add(tser, axis=0)
530
+ expected = DataFrame({0: dti + tdi, 1: tdi + tdi})
531
+ tm.assert_frame_equal(result, expected)
532
+
533
+ def test_df_add_flex_filled_mixed_dtypes(self):
534
+ # GH 19611
535
+ dti = pd.date_range("2016-01-01", periods=3)
536
+ ser = Series(["1 Day", "NaT", "2 Days"], dtype="timedelta64[ns]")
537
+ df = DataFrame({"A": dti, "B": ser})
538
+ other = DataFrame({"A": ser, "B": ser})
539
+ fill = pd.Timedelta(days=1).to_timedelta64()
540
+ result = df.add(other, fill_value=fill)
541
+
542
+ expected = DataFrame(
543
+ {
544
+ "A": Series(
545
+ ["2016-01-02", "2016-01-03", "2016-01-05"], dtype="datetime64[ns]"
546
+ ),
547
+ "B": ser * 2,
548
+ }
549
+ )
550
+ tm.assert_frame_equal(result, expected)
551
+
552
+ def test_arith_flex_frame(
553
+ self, all_arithmetic_operators, float_frame, mixed_float_frame
554
+ ):
555
+ # one instance of parametrized fixture
556
+ op = all_arithmetic_operators
557
+
558
+ def f(x, y):
559
+ # r-versions not in operator-stdlib; get op without "r" and invert
560
+ if op.startswith("__r"):
561
+ return getattr(operator, op.replace("__r", "__"))(y, x)
562
+ return getattr(operator, op)(x, y)
563
+
564
+ result = getattr(float_frame, op)(2 * float_frame)
565
+ expected = f(float_frame, 2 * float_frame)
566
+ tm.assert_frame_equal(result, expected)
567
+
568
+ # vs mix float
569
+ result = getattr(mixed_float_frame, op)(2 * mixed_float_frame)
570
+ expected = f(mixed_float_frame, 2 * mixed_float_frame)
571
+ tm.assert_frame_equal(result, expected)
572
+ _check_mixed_float(result, dtype={"C": None})
573
+
574
+ @pytest.mark.parametrize("op", ["__add__", "__sub__", "__mul__"])
575
+ def test_arith_flex_frame_mixed(
576
+ self,
577
+ op,
578
+ int_frame,
579
+ mixed_int_frame,
580
+ mixed_float_frame,
581
+ switch_numexpr_min_elements,
582
+ ):
583
+ f = getattr(operator, op)
584
+
585
+ # vs mix int
586
+ result = getattr(mixed_int_frame, op)(2 + mixed_int_frame)
587
+ expected = f(mixed_int_frame, 2 + mixed_int_frame)
588
+
589
+ # no overflow in the uint
590
+ dtype = None
591
+ if op in ["__sub__"]:
592
+ dtype = {"B": "uint64", "C": None}
593
+ elif op in ["__add__", "__mul__"]:
594
+ dtype = {"C": None}
595
+ if expr.USE_NUMEXPR and switch_numexpr_min_elements == 0:
596
+ # when using numexpr, the casting rules are slightly different:
597
+ # in the `2 + mixed_int_frame` operation, int32 column becomes
598
+ # and int64 column (not preserving dtype in operation with Python
599
+ # scalar), and then the int32/int64 combo results in int64 result
600
+ dtype["A"] = (2 + mixed_int_frame)["A"].dtype
601
+ tm.assert_frame_equal(result, expected)
602
+ _check_mixed_int(result, dtype=dtype)
603
+
604
+ # vs mix float
605
+ result = getattr(mixed_float_frame, op)(2 * mixed_float_frame)
606
+ expected = f(mixed_float_frame, 2 * mixed_float_frame)
607
+ tm.assert_frame_equal(result, expected)
608
+ _check_mixed_float(result, dtype={"C": None})
609
+
610
+ # vs plain int
611
+ result = getattr(int_frame, op)(2 * int_frame)
612
+ expected = f(int_frame, 2 * int_frame)
613
+ tm.assert_frame_equal(result, expected)
614
+
615
+ @pytest.mark.parametrize("dim", range(3, 6))
616
+ def test_arith_flex_frame_raise(self, all_arithmetic_operators, float_frame, dim):
617
+ # one instance of parametrized fixture
618
+ op = all_arithmetic_operators
619
+
620
+ # Check that arrays with dim >= 3 raise
621
+ arr = np.ones((1,) * dim)
622
+ msg = "Unable to coerce to Series/DataFrame"
623
+ with pytest.raises(ValueError, match=msg):
624
+ getattr(float_frame, op)(arr)
625
+
626
+ def test_arith_flex_frame_corner(self, float_frame):
627
+ const_add = float_frame.add(1)
628
+ tm.assert_frame_equal(const_add, float_frame + 1)
629
+
630
+ # corner cases
631
+ result = float_frame.add(float_frame[:0])
632
+ expected = float_frame.sort_index() * np.nan
633
+ tm.assert_frame_equal(result, expected)
634
+
635
+ result = float_frame[:0].add(float_frame)
636
+ expected = float_frame.sort_index() * np.nan
637
+ tm.assert_frame_equal(result, expected)
638
+
639
+ with pytest.raises(NotImplementedError, match="fill_value"):
640
+ float_frame.add(float_frame.iloc[0], fill_value=3)
641
+
642
+ with pytest.raises(NotImplementedError, match="fill_value"):
643
+ float_frame.add(float_frame.iloc[0], axis="index", fill_value=3)
644
+
645
+ @pytest.mark.parametrize("op", ["add", "sub", "mul", "mod"])
646
+ def test_arith_flex_series_ops(self, simple_frame, op):
647
+ # after arithmetic refactor, add truediv here
648
+ df = simple_frame
649
+
650
+ row = df.xs("a")
651
+ col = df["two"]
652
+ f = getattr(df, op)
653
+ op = getattr(operator, op)
654
+ tm.assert_frame_equal(f(row), op(df, row))
655
+ tm.assert_frame_equal(f(col, axis=0), op(df.T, col).T)
656
+
657
+ def test_arith_flex_series(self, simple_frame):
658
+ df = simple_frame
659
+
660
+ row = df.xs("a")
661
+ col = df["two"]
662
+ # special case for some reason
663
+ tm.assert_frame_equal(df.add(row, axis=None), df + row)
664
+
665
+ # cases which will be refactored after big arithmetic refactor
666
+ tm.assert_frame_equal(df.div(row), df / row)
667
+ tm.assert_frame_equal(df.div(col, axis=0), (df.T / col).T)
668
+
669
+ @pytest.mark.parametrize("dtype", ["int64", "float64"])
670
+ def test_arith_flex_series_broadcasting(self, dtype):
671
+ # broadcasting issue in GH 7325
672
+ df = DataFrame(np.arange(3 * 2).reshape((3, 2)), dtype=dtype)
673
+ expected = DataFrame([[np.nan, np.inf], [1.0, 1.5], [1.0, 1.25]])
674
+ result = df.div(df[0], axis="index")
675
+ tm.assert_frame_equal(result, expected)
676
+
677
+ def test_arith_flex_zero_len_raises(self):
678
+ # GH 19522 passing fill_value to frame flex arith methods should
679
+ # raise even in the zero-length special cases
680
+ ser_len0 = Series([], dtype=object)
681
+ df_len0 = DataFrame(columns=["A", "B"])
682
+ df = DataFrame([[1, 2], [3, 4]], columns=["A", "B"])
683
+
684
+ with pytest.raises(NotImplementedError, match="fill_value"):
685
+ df.add(ser_len0, fill_value="E")
686
+
687
+ with pytest.raises(NotImplementedError, match="fill_value"):
688
+ df_len0.sub(df["A"], axis=None, fill_value=3)
689
+
690
+ def test_flex_add_scalar_fill_value(self):
691
+ # GH#12723
692
+ dat = np.array([0, 1, np.nan, 3, 4, 5], dtype="float")
693
+ df = DataFrame({"foo": dat}, index=range(6))
694
+
695
+ exp = df.fillna(0).add(2)
696
+ res = df.add(2, fill_value=0)
697
+ tm.assert_frame_equal(res, exp)
698
+
699
+ def test_sub_alignment_with_duplicate_index(self):
700
+ # GH#5185 dup aligning operations should work
701
+ df1 = DataFrame([1, 2, 3, 4, 5], index=[1, 2, 1, 2, 3])
702
+ df2 = DataFrame([1, 2, 3], index=[1, 2, 3])
703
+ expected = DataFrame([0, 2, 0, 2, 2], index=[1, 1, 2, 2, 3])
704
+ result = df1.sub(df2)
705
+ tm.assert_frame_equal(result, expected)
706
+
707
+ @pytest.mark.parametrize("op", ["__add__", "__mul__", "__sub__", "__truediv__"])
708
+ def test_arithmetic_with_duplicate_columns(self, op):
709
+ # operations
710
+ df = DataFrame({"A": np.arange(10), "B": np.random.default_rng(2).random(10)})
711
+ expected = getattr(df, op)(df)
712
+ expected.columns = ["A", "A"]
713
+ df.columns = ["A", "A"]
714
+ result = getattr(df, op)(df)
715
+ tm.assert_frame_equal(result, expected)
716
+
717
+ @pytest.mark.parametrize("level", [0, None])
718
+ def test_broadcast_multiindex(self, level):
719
+ # GH34388
720
+ df1 = DataFrame({"A": [0, 1, 2], "B": [1, 2, 3]})
721
+ df1.columns = df1.columns.set_names("L1")
722
+
723
+ df2 = DataFrame({("A", "C"): [0, 0, 0], ("A", "D"): [0, 0, 0]})
724
+ df2.columns = df2.columns.set_names(["L1", "L2"])
725
+
726
+ result = df1.add(df2, level=level)
727
+ expected = DataFrame({("A", "C"): [0, 1, 2], ("A", "D"): [0, 1, 2]})
728
+ expected.columns = expected.columns.set_names(["L1", "L2"])
729
+
730
+ tm.assert_frame_equal(result, expected)
731
+
732
+ def test_frame_multiindex_operations(self):
733
+ # GH 43321
734
+ df = DataFrame(
735
+ {2010: [1, 2, 3], 2020: [3, 4, 5]},
736
+ index=MultiIndex.from_product(
737
+ [["a"], ["b"], [0, 1, 2]], names=["scen", "mod", "id"]
738
+ ),
739
+ )
740
+
741
+ series = Series(
742
+ [0.4],
743
+ index=MultiIndex.from_product([["b"], ["a"]], names=["mod", "scen"]),
744
+ )
745
+
746
+ expected = DataFrame(
747
+ {2010: [1.4, 2.4, 3.4], 2020: [3.4, 4.4, 5.4]},
748
+ index=MultiIndex.from_product(
749
+ [["a"], ["b"], [0, 1, 2]], names=["scen", "mod", "id"]
750
+ ),
751
+ )
752
+ result = df.add(series, axis=0)
753
+
754
+ tm.assert_frame_equal(result, expected)
755
+
756
+ def test_frame_multiindex_operations_series_index_to_frame_index(self):
757
+ # GH 43321
758
+ df = DataFrame(
759
+ {2010: [1], 2020: [3]},
760
+ index=MultiIndex.from_product([["a"], ["b"]], names=["scen", "mod"]),
761
+ )
762
+
763
+ series = Series(
764
+ [10.0, 20.0, 30.0],
765
+ index=MultiIndex.from_product(
766
+ [["a"], ["b"], [0, 1, 2]], names=["scen", "mod", "id"]
767
+ ),
768
+ )
769
+
770
+ expected = DataFrame(
771
+ {2010: [11.0, 21, 31.0], 2020: [13.0, 23.0, 33.0]},
772
+ index=MultiIndex.from_product(
773
+ [["a"], ["b"], [0, 1, 2]], names=["scen", "mod", "id"]
774
+ ),
775
+ )
776
+ result = df.add(series, axis=0)
777
+
778
+ tm.assert_frame_equal(result, expected)
779
+
780
+ def test_frame_multiindex_operations_no_align(self):
781
+ df = DataFrame(
782
+ {2010: [1, 2, 3], 2020: [3, 4, 5]},
783
+ index=MultiIndex.from_product(
784
+ [["a"], ["b"], [0, 1, 2]], names=["scen", "mod", "id"]
785
+ ),
786
+ )
787
+
788
+ series = Series(
789
+ [0.4],
790
+ index=MultiIndex.from_product([["c"], ["a"]], names=["mod", "scen"]),
791
+ )
792
+
793
+ expected = DataFrame(
794
+ {2010: np.nan, 2020: np.nan},
795
+ index=MultiIndex.from_tuples(
796
+ [
797
+ ("a", "b", 0),
798
+ ("a", "b", 1),
799
+ ("a", "b", 2),
800
+ ("a", "c", np.nan),
801
+ ],
802
+ names=["scen", "mod", "id"],
803
+ ),
804
+ )
805
+ result = df.add(series, axis=0)
806
+
807
+ tm.assert_frame_equal(result, expected)
808
+
809
+ def test_frame_multiindex_operations_part_align(self):
810
+ df = DataFrame(
811
+ {2010: [1, 2, 3], 2020: [3, 4, 5]},
812
+ index=MultiIndex.from_tuples(
813
+ [
814
+ ("a", "b", 0),
815
+ ("a", "b", 1),
816
+ ("a", "c", 2),
817
+ ],
818
+ names=["scen", "mod", "id"],
819
+ ),
820
+ )
821
+
822
+ series = Series(
823
+ [0.4],
824
+ index=MultiIndex.from_product([["b"], ["a"]], names=["mod", "scen"]),
825
+ )
826
+
827
+ expected = DataFrame(
828
+ {2010: [1.4, 2.4, np.nan], 2020: [3.4, 4.4, np.nan]},
829
+ index=MultiIndex.from_tuples(
830
+ [
831
+ ("a", "b", 0),
832
+ ("a", "b", 1),
833
+ ("a", "c", 2),
834
+ ],
835
+ names=["scen", "mod", "id"],
836
+ ),
837
+ )
838
+ result = df.add(series, axis=0)
839
+
840
+ tm.assert_frame_equal(result, expected)
841
+
842
+
843
+ class TestFrameArithmetic:
844
+ def test_td64_op_nat_casting(self):
845
+ # Make sure we don't accidentally treat timedelta64(NaT) as datetime64
846
+ # when calling dispatch_to_series in DataFrame arithmetic
847
+ ser = Series(["NaT", "NaT"], dtype="timedelta64[ns]")
848
+ df = DataFrame([[1, 2], [3, 4]])
849
+
850
+ result = df * ser
851
+ expected = DataFrame({0: ser, 1: ser})
852
+ tm.assert_frame_equal(result, expected)
853
+
854
+ def test_df_add_2d_array_rowlike_broadcasts(self):
855
+ # GH#23000
856
+ arr = np.arange(6).reshape(3, 2)
857
+ df = DataFrame(arr, columns=[True, False], index=["A", "B", "C"])
858
+
859
+ rowlike = arr[[1], :] # shape --> (1, ncols)
860
+ assert rowlike.shape == (1, df.shape[1])
861
+
862
+ expected = DataFrame(
863
+ [[2, 4], [4, 6], [6, 8]],
864
+ columns=df.columns,
865
+ index=df.index,
866
+ # specify dtype explicitly to avoid failing
867
+ # on 32bit builds
868
+ dtype=arr.dtype,
869
+ )
870
+ result = df + rowlike
871
+ tm.assert_frame_equal(result, expected)
872
+ result = rowlike + df
873
+ tm.assert_frame_equal(result, expected)
874
+
875
+ def test_df_add_2d_array_collike_broadcasts(self):
876
+ # GH#23000
877
+ arr = np.arange(6).reshape(3, 2)
878
+ df = DataFrame(arr, columns=[True, False], index=["A", "B", "C"])
879
+
880
+ collike = arr[:, [1]] # shape --> (nrows, 1)
881
+ assert collike.shape == (df.shape[0], 1)
882
+
883
+ expected = DataFrame(
884
+ [[1, 2], [5, 6], [9, 10]],
885
+ columns=df.columns,
886
+ index=df.index,
887
+ # specify dtype explicitly to avoid failing
888
+ # on 32bit builds
889
+ dtype=arr.dtype,
890
+ )
891
+ result = df + collike
892
+ tm.assert_frame_equal(result, expected)
893
+ result = collike + df
894
+ tm.assert_frame_equal(result, expected)
895
+
896
+ def test_df_arith_2d_array_rowlike_broadcasts(
897
+ self, request, all_arithmetic_operators, using_array_manager
898
+ ):
899
+ # GH#23000
900
+ opname = all_arithmetic_operators
901
+
902
+ if using_array_manager and opname in ("__rmod__", "__rfloordiv__"):
903
+ # TODO(ArrayManager) decide on dtypes
904
+ td.mark_array_manager_not_yet_implemented(request)
905
+
906
+ arr = np.arange(6).reshape(3, 2)
907
+ df = DataFrame(arr, columns=[True, False], index=["A", "B", "C"])
908
+
909
+ rowlike = arr[[1], :] # shape --> (1, ncols)
910
+ assert rowlike.shape == (1, df.shape[1])
911
+
912
+ exvals = [
913
+ getattr(df.loc["A"], opname)(rowlike.squeeze()),
914
+ getattr(df.loc["B"], opname)(rowlike.squeeze()),
915
+ getattr(df.loc["C"], opname)(rowlike.squeeze()),
916
+ ]
917
+
918
+ expected = DataFrame(exvals, columns=df.columns, index=df.index)
919
+
920
+ result = getattr(df, opname)(rowlike)
921
+ tm.assert_frame_equal(result, expected)
922
+
923
+ def test_df_arith_2d_array_collike_broadcasts(
924
+ self, request, all_arithmetic_operators, using_array_manager
925
+ ):
926
+ # GH#23000
927
+ opname = all_arithmetic_operators
928
+
929
+ if using_array_manager and opname in ("__rmod__", "__rfloordiv__"):
930
+ # TODO(ArrayManager) decide on dtypes
931
+ td.mark_array_manager_not_yet_implemented(request)
932
+
933
+ arr = np.arange(6).reshape(3, 2)
934
+ df = DataFrame(arr, columns=[True, False], index=["A", "B", "C"])
935
+
936
+ collike = arr[:, [1]] # shape --> (nrows, 1)
937
+ assert collike.shape == (df.shape[0], 1)
938
+
939
+ exvals = {
940
+ True: getattr(df[True], opname)(collike.squeeze()),
941
+ False: getattr(df[False], opname)(collike.squeeze()),
942
+ }
943
+
944
+ dtype = None
945
+ if opname in ["__rmod__", "__rfloordiv__"]:
946
+ # Series ops may return mixed int/float dtypes in cases where
947
+ # DataFrame op will return all-float. So we upcast `expected`
948
+ dtype = np.common_type(*(x.values for x in exvals.values()))
949
+
950
+ expected = DataFrame(exvals, columns=df.columns, index=df.index, dtype=dtype)
951
+
952
+ result = getattr(df, opname)(collike)
953
+ tm.assert_frame_equal(result, expected)
954
+
955
+ def test_df_bool_mul_int(self):
956
+ # GH 22047, GH 22163 multiplication by 1 should result in int dtype,
957
+ # not object dtype
958
+ df = DataFrame([[False, True], [False, False]])
959
+ result = df * 1
960
+
961
+ # On appveyor this comes back as np.int32 instead of np.int64,
962
+ # so we check dtype.kind instead of just dtype
963
+ kinds = result.dtypes.apply(lambda x: x.kind)
964
+ assert (kinds == "i").all()
965
+
966
+ result = 1 * df
967
+ kinds = result.dtypes.apply(lambda x: x.kind)
968
+ assert (kinds == "i").all()
969
+
970
+ def test_arith_mixed(self):
971
+ left = DataFrame({"A": ["a", "b", "c"], "B": [1, 2, 3]})
972
+
973
+ result = left + left
974
+ expected = DataFrame({"A": ["aa", "bb", "cc"], "B": [2, 4, 6]})
975
+ tm.assert_frame_equal(result, expected)
976
+
977
+ @pytest.mark.parametrize("col", ["A", "B"])
978
+ def test_arith_getitem_commute(self, all_arithmetic_functions, col):
979
+ df = DataFrame({"A": [1.1, 3.3], "B": [2.5, -3.9]})
980
+ result = all_arithmetic_functions(df, 1)[col]
981
+ expected = all_arithmetic_functions(df[col], 1)
982
+ tm.assert_series_equal(result, expected)
983
+
984
+ @pytest.mark.parametrize(
985
+ "values", [[1, 2], (1, 2), np.array([1, 2]), range(1, 3), deque([1, 2])]
986
+ )
987
+ def test_arith_alignment_non_pandas_object(self, values):
988
+ # GH#17901
989
+ df = DataFrame({"A": [1, 1], "B": [1, 1]})
990
+ expected = DataFrame({"A": [2, 2], "B": [3, 3]})
991
+ result = df + values
992
+ tm.assert_frame_equal(result, expected)
993
+
994
+ def test_arith_non_pandas_object(self):
995
+ df = DataFrame(
996
+ np.arange(1, 10, dtype="f8").reshape(3, 3),
997
+ columns=["one", "two", "three"],
998
+ index=["a", "b", "c"],
999
+ )
1000
+
1001
+ val1 = df.xs("a").values
1002
+ added = DataFrame(df.values + val1, index=df.index, columns=df.columns)
1003
+ tm.assert_frame_equal(df + val1, added)
1004
+
1005
+ added = DataFrame((df.values.T + val1).T, index=df.index, columns=df.columns)
1006
+ tm.assert_frame_equal(df.add(val1, axis=0), added)
1007
+
1008
+ val2 = list(df["two"])
1009
+
1010
+ added = DataFrame(df.values + val2, index=df.index, columns=df.columns)
1011
+ tm.assert_frame_equal(df + val2, added)
1012
+
1013
+ added = DataFrame((df.values.T + val2).T, index=df.index, columns=df.columns)
1014
+ tm.assert_frame_equal(df.add(val2, axis="index"), added)
1015
+
1016
+ val3 = np.random.default_rng(2).random(df.shape)
1017
+ added = DataFrame(df.values + val3, index=df.index, columns=df.columns)
1018
+ tm.assert_frame_equal(df.add(val3), added)
1019
+
1020
+ def test_operations_with_interval_categories_index(self, all_arithmetic_operators):
1021
+ # GH#27415
1022
+ op = all_arithmetic_operators
1023
+ ind = pd.CategoricalIndex(pd.interval_range(start=0.0, end=2.0))
1024
+ data = [1, 2]
1025
+ df = DataFrame([data], columns=ind)
1026
+ num = 10
1027
+ result = getattr(df, op)(num)
1028
+ expected = DataFrame([[getattr(n, op)(num) for n in data]], columns=ind)
1029
+ tm.assert_frame_equal(result, expected)
1030
+
1031
+ def test_frame_with_frame_reindex(self):
1032
+ # GH#31623
1033
+ df = DataFrame(
1034
+ {
1035
+ "foo": [pd.Timestamp("2019"), pd.Timestamp("2020")],
1036
+ "bar": [pd.Timestamp("2018"), pd.Timestamp("2021")],
1037
+ },
1038
+ columns=["foo", "bar"],
1039
+ dtype="M8[ns]",
1040
+ )
1041
+ df2 = df[["foo"]]
1042
+
1043
+ result = df - df2
1044
+
1045
+ expected = DataFrame(
1046
+ {"foo": [pd.Timedelta(0), pd.Timedelta(0)], "bar": [np.nan, np.nan]},
1047
+ columns=["bar", "foo"],
1048
+ )
1049
+ tm.assert_frame_equal(result, expected)
1050
+
1051
+ @pytest.mark.parametrize(
1052
+ "value, dtype",
1053
+ [
1054
+ (1, "i8"),
1055
+ (1.0, "f8"),
1056
+ (2**63, "f8"),
1057
+ (1j, "complex128"),
1058
+ (2**63, "complex128"),
1059
+ (True, "bool"),
1060
+ (np.timedelta64(20, "ns"), "<m8[ns]"),
1061
+ (np.datetime64(20, "ns"), "<M8[ns]"),
1062
+ ],
1063
+ )
1064
+ @pytest.mark.parametrize(
1065
+ "op",
1066
+ [
1067
+ operator.add,
1068
+ operator.sub,
1069
+ operator.mul,
1070
+ operator.truediv,
1071
+ operator.mod,
1072
+ operator.pow,
1073
+ ],
1074
+ ids=lambda x: x.__name__,
1075
+ )
1076
+ def test_binop_other(self, op, value, dtype, switch_numexpr_min_elements):
1077
+ skip = {
1078
+ (operator.truediv, "bool"),
1079
+ (operator.pow, "bool"),
1080
+ (operator.add, "bool"),
1081
+ (operator.mul, "bool"),
1082
+ }
1083
+
1084
+ elem = DummyElement(value, dtype)
1085
+ df = DataFrame({"A": [elem.value, elem.value]}, dtype=elem.dtype)
1086
+
1087
+ invalid = {
1088
+ (operator.pow, "<M8[ns]"),
1089
+ (operator.mod, "<M8[ns]"),
1090
+ (operator.truediv, "<M8[ns]"),
1091
+ (operator.mul, "<M8[ns]"),
1092
+ (operator.add, "<M8[ns]"),
1093
+ (operator.pow, "<m8[ns]"),
1094
+ (operator.mul, "<m8[ns]"),
1095
+ (operator.sub, "bool"),
1096
+ (operator.mod, "complex128"),
1097
+ }
1098
+
1099
+ if (op, dtype) in invalid:
1100
+ warn = None
1101
+ if (dtype == "<M8[ns]" and op == operator.add) or (
1102
+ dtype == "<m8[ns]" and op == operator.mul
1103
+ ):
1104
+ msg = None
1105
+ elif dtype == "complex128":
1106
+ msg = "ufunc 'remainder' not supported for the input types"
1107
+ elif op is operator.sub:
1108
+ msg = "numpy boolean subtract, the `-` operator, is "
1109
+ if (
1110
+ dtype == "bool"
1111
+ and expr.USE_NUMEXPR
1112
+ and switch_numexpr_min_elements == 0
1113
+ ):
1114
+ warn = UserWarning # "evaluating in Python space because ..."
1115
+ else:
1116
+ msg = (
1117
+ f"cannot perform __{op.__name__}__ with this "
1118
+ "index type: (DatetimeArray|TimedeltaArray)"
1119
+ )
1120
+
1121
+ with pytest.raises(TypeError, match=msg):
1122
+ with tm.assert_produces_warning(warn):
1123
+ op(df, elem.value)
1124
+
1125
+ elif (op, dtype) in skip:
1126
+ if op in [operator.add, operator.mul]:
1127
+ if expr.USE_NUMEXPR and switch_numexpr_min_elements == 0:
1128
+ # "evaluating in Python space because ..."
1129
+ warn = UserWarning
1130
+ else:
1131
+ warn = None
1132
+ with tm.assert_produces_warning(warn):
1133
+ op(df, elem.value)
1134
+
1135
+ else:
1136
+ msg = "operator '.*' not implemented for .* dtypes"
1137
+ with pytest.raises(NotImplementedError, match=msg):
1138
+ op(df, elem.value)
1139
+
1140
+ else:
1141
+ with tm.assert_produces_warning(None):
1142
+ result = op(df, elem.value).dtypes
1143
+ expected = op(df, value).dtypes
1144
+ tm.assert_series_equal(result, expected)
1145
+
1146
+ def test_arithmetic_midx_cols_different_dtypes(self):
1147
+ # GH#49769
1148
+ midx = MultiIndex.from_arrays([Series([1, 2]), Series([3, 4])])
1149
+ midx2 = MultiIndex.from_arrays([Series([1, 2], dtype="Int8"), Series([3, 4])])
1150
+ left = DataFrame([[1, 2], [3, 4]], columns=midx)
1151
+ right = DataFrame([[1, 2], [3, 4]], columns=midx2)
1152
+ result = left - right
1153
+ expected = DataFrame([[0, 0], [0, 0]], columns=midx)
1154
+ tm.assert_frame_equal(result, expected)
1155
+
1156
+ def test_arithmetic_midx_cols_different_dtypes_different_order(self):
1157
+ # GH#49769
1158
+ midx = MultiIndex.from_arrays([Series([1, 2]), Series([3, 4])])
1159
+ midx2 = MultiIndex.from_arrays([Series([2, 1], dtype="Int8"), Series([4, 3])])
1160
+ left = DataFrame([[1, 2], [3, 4]], columns=midx)
1161
+ right = DataFrame([[1, 2], [3, 4]], columns=midx2)
1162
+ result = left - right
1163
+ expected = DataFrame([[-1, 1], [-1, 1]], columns=midx)
1164
+ tm.assert_frame_equal(result, expected)
1165
+
1166
+
1167
+ def test_frame_with_zero_len_series_corner_cases():
1168
+ # GH#28600
1169
+ # easy all-float case
1170
+ df = DataFrame(
1171
+ np.random.default_rng(2).standard_normal(6).reshape(3, 2), columns=["A", "B"]
1172
+ )
1173
+ ser = Series(dtype=np.float64)
1174
+
1175
+ result = df + ser
1176
+ expected = DataFrame(df.values * np.nan, columns=df.columns)
1177
+ tm.assert_frame_equal(result, expected)
1178
+
1179
+ with pytest.raises(ValueError, match="not aligned"):
1180
+ # Automatic alignment for comparisons deprecated GH#36795, enforced 2.0
1181
+ df == ser
1182
+
1183
+ # non-float case should not raise TypeError on comparison
1184
+ df2 = DataFrame(df.values.view("M8[ns]"), columns=df.columns)
1185
+ with pytest.raises(ValueError, match="not aligned"):
1186
+ # Automatic alignment for comparisons deprecated
1187
+ df2 == ser
1188
+
1189
+
1190
+ def test_zero_len_frame_with_series_corner_cases():
1191
+ # GH#28600
1192
+ df = DataFrame(columns=["A", "B"], dtype=np.float64)
1193
+ ser = Series([1, 2], index=["A", "B"])
1194
+
1195
+ result = df + ser
1196
+ expected = df
1197
+ tm.assert_frame_equal(result, expected)
1198
+
1199
+
1200
+ def test_frame_single_columns_object_sum_axis_1():
1201
+ # GH 13758
1202
+ data = {
1203
+ "One": Series(["A", 1.2, np.nan]),
1204
+ }
1205
+ df = DataFrame(data)
1206
+ result = df.sum(axis=1)
1207
+ expected = Series(["A", 1.2, 0])
1208
+ tm.assert_series_equal(result, expected)
1209
+
1210
+
1211
+ # -------------------------------------------------------------------
1212
+ # Unsorted
1213
+ # These arithmetic tests were previously in other files, eventually
1214
+ # should be parametrized and put into tests.arithmetic
1215
+
1216
+
1217
+ class TestFrameArithmeticUnsorted:
1218
+ def test_frame_add_tz_mismatch_converts_to_utc(self):
1219
+ rng = pd.date_range("1/1/2011", periods=10, freq="h", tz="US/Eastern")
1220
+ df = DataFrame(
1221
+ np.random.default_rng(2).standard_normal(len(rng)), index=rng, columns=["a"]
1222
+ )
1223
+
1224
+ df_moscow = df.tz_convert("Europe/Moscow")
1225
+ result = df + df_moscow
1226
+ assert result.index.tz is timezone.utc
1227
+
1228
+ result = df_moscow + df
1229
+ assert result.index.tz is timezone.utc
1230
+
1231
+ def test_align_frame(self):
1232
+ rng = pd.period_range("1/1/2000", "1/1/2010", freq="Y")
1233
+ ts = DataFrame(
1234
+ np.random.default_rng(2).standard_normal((len(rng), 3)), index=rng
1235
+ )
1236
+
1237
+ result = ts + ts[::2]
1238
+ expected = ts + ts
1239
+ expected.iloc[1::2] = np.nan
1240
+ tm.assert_frame_equal(result, expected)
1241
+
1242
+ half = ts[::2]
1243
+ result = ts + half.take(np.random.default_rng(2).permutation(len(half)))
1244
+ tm.assert_frame_equal(result, expected)
1245
+
1246
+ @pytest.mark.parametrize(
1247
+ "op", [operator.add, operator.sub, operator.mul, operator.truediv]
1248
+ )
1249
+ def test_operators_none_as_na(self, op):
1250
+ df = DataFrame(
1251
+ {"col1": [2, 5.0, 123, None], "col2": [1, 2, 3, 4]}, dtype=object
1252
+ )
1253
+
1254
+ # since filling converts dtypes from object, changed expected to be
1255
+ # object
1256
+ msg = "Downcasting object dtype arrays"
1257
+ with tm.assert_produces_warning(FutureWarning, match=msg):
1258
+ filled = df.fillna(np.nan)
1259
+ result = op(df, 3)
1260
+ expected = op(filled, 3).astype(object)
1261
+ expected[pd.isna(expected)] = np.nan
1262
+ tm.assert_frame_equal(result, expected)
1263
+
1264
+ result = op(df, df)
1265
+ expected = op(filled, filled).astype(object)
1266
+ expected[pd.isna(expected)] = np.nan
1267
+ tm.assert_frame_equal(result, expected)
1268
+
1269
+ msg = "Downcasting object dtype arrays"
1270
+ with tm.assert_produces_warning(FutureWarning, match=msg):
1271
+ result = op(df, df.fillna(7))
1272
+ tm.assert_frame_equal(result, expected)
1273
+
1274
+ msg = "Downcasting object dtype arrays"
1275
+ with tm.assert_produces_warning(FutureWarning, match=msg):
1276
+ result = op(df.fillna(7), df)
1277
+ tm.assert_frame_equal(result, expected)
1278
+
1279
+ @pytest.mark.parametrize("op,res", [("__eq__", False), ("__ne__", True)])
1280
+ # TODO: not sure what's correct here.
1281
+ @pytest.mark.filterwarnings("ignore:elementwise:FutureWarning")
1282
+ def test_logical_typeerror_with_non_valid(self, op, res, float_frame):
1283
+ # we are comparing floats vs a string
1284
+ result = getattr(float_frame, op)("foo")
1285
+ assert bool(result.all().all()) is res
1286
+
1287
+ @pytest.mark.parametrize("op", ["add", "sub", "mul", "div", "truediv"])
1288
+ def test_binary_ops_align(self, op):
1289
+ # test aligning binary ops
1290
+
1291
+ # GH 6681
1292
+ index = MultiIndex.from_product(
1293
+ [list("abc"), ["one", "two", "three"], [1, 2, 3]],
1294
+ names=["first", "second", "third"],
1295
+ )
1296
+
1297
+ df = DataFrame(
1298
+ np.arange(27 * 3).reshape(27, 3),
1299
+ index=index,
1300
+ columns=["value1", "value2", "value3"],
1301
+ ).sort_index()
1302
+
1303
+ idx = pd.IndexSlice
1304
+ opa = getattr(operator, op, None)
1305
+ if opa is None:
1306
+ return
1307
+
1308
+ x = Series([1.0, 10.0, 100.0], [1, 2, 3])
1309
+ result = getattr(df, op)(x, level="third", axis=0)
1310
+
1311
+ expected = pd.concat(
1312
+ [opa(df.loc[idx[:, :, i], :], v) for i, v in x.items()]
1313
+ ).sort_index()
1314
+ tm.assert_frame_equal(result, expected)
1315
+
1316
+ x = Series([1.0, 10.0], ["two", "three"])
1317
+ result = getattr(df, op)(x, level="second", axis=0)
1318
+
1319
+ expected = (
1320
+ pd.concat([opa(df.loc[idx[:, i], :], v) for i, v in x.items()])
1321
+ .reindex_like(df)
1322
+ .sort_index()
1323
+ )
1324
+ tm.assert_frame_equal(result, expected)
1325
+
1326
+ def test_binary_ops_align_series_dataframe(self):
1327
+ # GH9463 (alignment level of dataframe with series)
1328
+
1329
+ midx = MultiIndex.from_product([["A", "B"], ["a", "b"]])
1330
+ df = DataFrame(np.ones((2, 4), dtype="int64"), columns=midx)
1331
+ s = Series({"a": 1, "b": 2})
1332
+
1333
+ df2 = df.copy()
1334
+ df2.columns.names = ["lvl0", "lvl1"]
1335
+ s2 = s.copy()
1336
+ s2.index.name = "lvl1"
1337
+
1338
+ # different cases of integer/string level names:
1339
+ res1 = df.mul(s, axis=1, level=1)
1340
+ res2 = df.mul(s2, axis=1, level=1)
1341
+ res3 = df2.mul(s, axis=1, level=1)
1342
+ res4 = df2.mul(s2, axis=1, level=1)
1343
+ res5 = df2.mul(s, axis=1, level="lvl1")
1344
+ res6 = df2.mul(s2, axis=1, level="lvl1")
1345
+
1346
+ exp = DataFrame(
1347
+ np.array([[1, 2, 1, 2], [1, 2, 1, 2]], dtype="int64"), columns=midx
1348
+ )
1349
+
1350
+ for res in [res1, res2]:
1351
+ tm.assert_frame_equal(res, exp)
1352
+
1353
+ exp.columns.names = ["lvl0", "lvl1"]
1354
+ for res in [res3, res4, res5, res6]:
1355
+ tm.assert_frame_equal(res, exp)
1356
+
1357
+ def test_add_with_dti_mismatched_tzs(self):
1358
+ base = pd.DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"], tz="UTC")
1359
+ idx1 = base.tz_convert("Asia/Tokyo")[:2]
1360
+ idx2 = base.tz_convert("US/Eastern")[1:]
1361
+
1362
+ df1 = DataFrame({"A": [1, 2]}, index=idx1)
1363
+ df2 = DataFrame({"A": [1, 1]}, index=idx2)
1364
+ exp = DataFrame({"A": [np.nan, 3, np.nan]}, index=base)
1365
+ tm.assert_frame_equal(df1 + df2, exp)
1366
+
1367
+ def test_combineFrame(self, float_frame, mixed_float_frame, mixed_int_frame):
1368
+ frame_copy = float_frame.reindex(float_frame.index[::2])
1369
+
1370
+ del frame_copy["D"]
1371
+ # adding NAs to first 5 values of column "C"
1372
+ frame_copy.loc[: frame_copy.index[4], "C"] = np.nan
1373
+
1374
+ added = float_frame + frame_copy
1375
+
1376
+ indexer = added["A"].dropna().index
1377
+ exp = (float_frame["A"] * 2).copy()
1378
+
1379
+ tm.assert_series_equal(added["A"].dropna(), exp.loc[indexer])
1380
+
1381
+ exp.loc[~exp.index.isin(indexer)] = np.nan
1382
+ tm.assert_series_equal(added["A"], exp.loc[added["A"].index])
1383
+
1384
+ assert np.isnan(added["C"].reindex(frame_copy.index)[:5]).all()
1385
+
1386
+ # assert(False)
1387
+
1388
+ assert np.isnan(added["D"]).all()
1389
+
1390
+ self_added = float_frame + float_frame
1391
+ tm.assert_index_equal(self_added.index, float_frame.index)
1392
+
1393
+ added_rev = frame_copy + float_frame
1394
+ assert np.isnan(added["D"]).all()
1395
+ assert np.isnan(added_rev["D"]).all()
1396
+
1397
+ # corner cases
1398
+
1399
+ # empty
1400
+ plus_empty = float_frame + DataFrame()
1401
+ assert np.isnan(plus_empty.values).all()
1402
+
1403
+ empty_plus = DataFrame() + float_frame
1404
+ assert np.isnan(empty_plus.values).all()
1405
+
1406
+ empty_empty = DataFrame() + DataFrame()
1407
+ assert empty_empty.empty
1408
+
1409
+ # out of order
1410
+ reverse = float_frame.reindex(columns=float_frame.columns[::-1])
1411
+
1412
+ tm.assert_frame_equal(reverse + float_frame, float_frame * 2)
1413
+
1414
+ # mix vs float64, upcast
1415
+ added = float_frame + mixed_float_frame
1416
+ _check_mixed_float(added, dtype="float64")
1417
+ added = mixed_float_frame + float_frame
1418
+ _check_mixed_float(added, dtype="float64")
1419
+
1420
+ # mix vs mix
1421
+ added = mixed_float_frame + mixed_float_frame
1422
+ _check_mixed_float(added, dtype={"C": None})
1423
+
1424
+ # with int
1425
+ added = float_frame + mixed_int_frame
1426
+ _check_mixed_float(added, dtype="float64")
1427
+
1428
+ def test_combine_series(self, float_frame, mixed_float_frame, mixed_int_frame):
1429
+ # Series
1430
+ series = float_frame.xs(float_frame.index[0])
1431
+
1432
+ added = float_frame + series
1433
+
1434
+ for key, s in added.items():
1435
+ tm.assert_series_equal(s, float_frame[key] + series[key])
1436
+
1437
+ larger_series = series.to_dict()
1438
+ larger_series["E"] = 1
1439
+ larger_series = Series(larger_series)
1440
+ larger_added = float_frame + larger_series
1441
+
1442
+ for key, s in float_frame.items():
1443
+ tm.assert_series_equal(larger_added[key], s + series[key])
1444
+ assert "E" in larger_added
1445
+ assert np.isnan(larger_added["E"]).all()
1446
+
1447
+ # no upcast needed
1448
+ added = mixed_float_frame + series
1449
+ assert np.all(added.dtypes == series.dtype)
1450
+
1451
+ # vs mix (upcast) as needed
1452
+ added = mixed_float_frame + series.astype("float32")
1453
+ _check_mixed_float(added, dtype={"C": None})
1454
+ added = mixed_float_frame + series.astype("float16")
1455
+ _check_mixed_float(added, dtype={"C": None})
1456
+
1457
+ # these used to raise with numexpr as we are adding an int64 to an
1458
+ # uint64....weird vs int
1459
+ added = mixed_int_frame + (100 * series).astype("int64")
1460
+ _check_mixed_int(
1461
+ added, dtype={"A": "int64", "B": "float64", "C": "int64", "D": "int64"}
1462
+ )
1463
+ added = mixed_int_frame + (100 * series).astype("int32")
1464
+ _check_mixed_int(
1465
+ added, dtype={"A": "int32", "B": "float64", "C": "int32", "D": "int64"}
1466
+ )
1467
+
1468
+ def test_combine_timeseries(self, datetime_frame):
1469
+ # TimeSeries
1470
+ ts = datetime_frame["A"]
1471
+
1472
+ # 10890
1473
+ # we no longer allow auto timeseries broadcasting
1474
+ # and require explicit broadcasting
1475
+ added = datetime_frame.add(ts, axis="index")
1476
+
1477
+ for key, col in datetime_frame.items():
1478
+ result = col + ts
1479
+ tm.assert_series_equal(added[key], result, check_names=False)
1480
+ assert added[key].name == key
1481
+ if col.name == ts.name:
1482
+ assert result.name == "A"
1483
+ else:
1484
+ assert result.name is None
1485
+
1486
+ smaller_frame = datetime_frame[:-5]
1487
+ smaller_added = smaller_frame.add(ts, axis="index")
1488
+
1489
+ tm.assert_index_equal(smaller_added.index, datetime_frame.index)
1490
+
1491
+ smaller_ts = ts[:-5]
1492
+ smaller_added2 = datetime_frame.add(smaller_ts, axis="index")
1493
+ tm.assert_frame_equal(smaller_added, smaller_added2)
1494
+
1495
+ # length 0, result is all-nan
1496
+ result = datetime_frame.add(ts[:0], axis="index")
1497
+ expected = DataFrame(
1498
+ np.nan, index=datetime_frame.index, columns=datetime_frame.columns
1499
+ )
1500
+ tm.assert_frame_equal(result, expected)
1501
+
1502
+ # Frame is all-nan
1503
+ result = datetime_frame[:0].add(ts, axis="index")
1504
+ expected = DataFrame(
1505
+ np.nan, index=datetime_frame.index, columns=datetime_frame.columns
1506
+ )
1507
+ tm.assert_frame_equal(result, expected)
1508
+
1509
+ # empty but with non-empty index
1510
+ frame = datetime_frame[:1].reindex(columns=[])
1511
+ result = frame.mul(ts, axis="index")
1512
+ assert len(result) == len(ts)
1513
+
1514
+ def test_combineFunc(self, float_frame, mixed_float_frame):
1515
+ result = float_frame * 2
1516
+ tm.assert_numpy_array_equal(result.values, float_frame.values * 2)
1517
+
1518
+ # vs mix
1519
+ result = mixed_float_frame * 2
1520
+ for c, s in result.items():
1521
+ tm.assert_numpy_array_equal(s.values, mixed_float_frame[c].values * 2)
1522
+ _check_mixed_float(result, dtype={"C": None})
1523
+
1524
+ result = DataFrame() * 2
1525
+ assert result.index.equals(DataFrame().index)
1526
+ assert len(result.columns) == 0
1527
+
1528
+ @pytest.mark.parametrize(
1529
+ "func",
1530
+ [operator.eq, operator.ne, operator.lt, operator.gt, operator.ge, operator.le],
1531
+ )
1532
+ def test_comparisons(self, simple_frame, float_frame, func):
1533
+ df1 = DataFrame(
1534
+ np.random.default_rng(2).standard_normal((30, 4)),
1535
+ columns=Index(list("ABCD"), dtype=object),
1536
+ index=pd.date_range("2000-01-01", periods=30, freq="B"),
1537
+ )
1538
+ df2 = df1.copy()
1539
+
1540
+ row = simple_frame.xs("a")
1541
+ ndim_5 = np.ones(df1.shape + (1, 1, 1))
1542
+
1543
+ result = func(df1, df2)
1544
+ tm.assert_numpy_array_equal(result.values, func(df1.values, df2.values))
1545
+
1546
+ msg = (
1547
+ "Unable to coerce to Series/DataFrame, "
1548
+ "dimension must be <= 2: (30, 4, 1, 1, 1)"
1549
+ )
1550
+ with pytest.raises(ValueError, match=re.escape(msg)):
1551
+ func(df1, ndim_5)
1552
+
1553
+ result2 = func(simple_frame, row)
1554
+ tm.assert_numpy_array_equal(
1555
+ result2.values, func(simple_frame.values, row.values)
1556
+ )
1557
+
1558
+ result3 = func(float_frame, 0)
1559
+ tm.assert_numpy_array_equal(result3.values, func(float_frame.values, 0))
1560
+
1561
+ msg = (
1562
+ r"Can only compare identically-labeled \(both index and columns\) "
1563
+ "DataFrame objects"
1564
+ )
1565
+ with pytest.raises(ValueError, match=msg):
1566
+ func(simple_frame, simple_frame[:2])
1567
+
1568
+ def test_strings_to_numbers_comparisons_raises(self, compare_operators_no_eq_ne):
1569
+ # GH 11565
1570
+ df = DataFrame(
1571
+ {x: {"x": "foo", "y": "bar", "z": "baz"} for x in ["a", "b", "c"]}
1572
+ )
1573
+
1574
+ f = getattr(operator, compare_operators_no_eq_ne)
1575
+ msg = "'[<>]=?' not supported between instances of 'str' and 'int'"
1576
+ with pytest.raises(TypeError, match=msg):
1577
+ f(df, 0)
1578
+
1579
+ def test_comparison_protected_from_errstate(self):
1580
+ missing_df = DataFrame(
1581
+ np.ones((10, 4), dtype=np.float64),
1582
+ columns=Index(list("ABCD"), dtype=object),
1583
+ )
1584
+ missing_df.loc[missing_df.index[0], "A"] = np.nan
1585
+ with np.errstate(invalid="ignore"):
1586
+ expected = missing_df.values < 0
1587
+ with np.errstate(invalid="raise"):
1588
+ result = (missing_df < 0).values
1589
+ tm.assert_numpy_array_equal(result, expected)
1590
+
1591
+ def test_boolean_comparison(self):
1592
+ # GH 4576
1593
+ # boolean comparisons with a tuple/list give unexpected results
1594
+ df = DataFrame(np.arange(6).reshape((3, 2)))
1595
+ b = np.array([2, 2])
1596
+ b_r = np.atleast_2d([2, 2])
1597
+ b_c = b_r.T
1598
+ lst = [2, 2, 2]
1599
+ tup = tuple(lst)
1600
+
1601
+ # gt
1602
+ expected = DataFrame([[False, False], [False, True], [True, True]])
1603
+ result = df > b
1604
+ tm.assert_frame_equal(result, expected)
1605
+
1606
+ result = df.values > b
1607
+ tm.assert_numpy_array_equal(result, expected.values)
1608
+
1609
+ msg1d = "Unable to coerce to Series, length must be 2: given 3"
1610
+ msg2d = "Unable to coerce to DataFrame, shape must be"
1611
+ msg2db = "operands could not be broadcast together with shapes"
1612
+ with pytest.raises(ValueError, match=msg1d):
1613
+ # wrong shape
1614
+ df > lst
1615
+
1616
+ with pytest.raises(ValueError, match=msg1d):
1617
+ # wrong shape
1618
+ df > tup
1619
+
1620
+ # broadcasts like ndarray (GH#23000)
1621
+ result = df > b_r
1622
+ tm.assert_frame_equal(result, expected)
1623
+
1624
+ result = df.values > b_r
1625
+ tm.assert_numpy_array_equal(result, expected.values)
1626
+
1627
+ with pytest.raises(ValueError, match=msg2d):
1628
+ df > b_c
1629
+
1630
+ with pytest.raises(ValueError, match=msg2db):
1631
+ df.values > b_c
1632
+
1633
+ # ==
1634
+ expected = DataFrame([[False, False], [True, False], [False, False]])
1635
+ result = df == b
1636
+ tm.assert_frame_equal(result, expected)
1637
+
1638
+ with pytest.raises(ValueError, match=msg1d):
1639
+ df == lst
1640
+
1641
+ with pytest.raises(ValueError, match=msg1d):
1642
+ df == tup
1643
+
1644
+ # broadcasts like ndarray (GH#23000)
1645
+ result = df == b_r
1646
+ tm.assert_frame_equal(result, expected)
1647
+
1648
+ result = df.values == b_r
1649
+ tm.assert_numpy_array_equal(result, expected.values)
1650
+
1651
+ with pytest.raises(ValueError, match=msg2d):
1652
+ df == b_c
1653
+
1654
+ assert df.values.shape != b_c.shape
1655
+
1656
+ # with alignment
1657
+ df = DataFrame(
1658
+ np.arange(6).reshape((3, 2)), columns=list("AB"), index=list("abc")
1659
+ )
1660
+ expected.index = df.index
1661
+ expected.columns = df.columns
1662
+
1663
+ with pytest.raises(ValueError, match=msg1d):
1664
+ df == lst
1665
+
1666
+ with pytest.raises(ValueError, match=msg1d):
1667
+ df == tup
1668
+
1669
+ def test_inplace_ops_alignment(self):
1670
+ # inplace ops / ops alignment
1671
+ # GH 8511
1672
+
1673
+ columns = list("abcdefg")
1674
+ X_orig = DataFrame(
1675
+ np.arange(10 * len(columns)).reshape(-1, len(columns)),
1676
+ columns=columns,
1677
+ index=range(10),
1678
+ )
1679
+ Z = 100 * X_orig.iloc[:, 1:-1].copy()
1680
+ block1 = list("bedcf")
1681
+ subs = list("bcdef")
1682
+
1683
+ # add
1684
+ X = X_orig.copy()
1685
+ result1 = (X[block1] + Z).reindex(columns=subs)
1686
+
1687
+ X[block1] += Z
1688
+ result2 = X.reindex(columns=subs)
1689
+
1690
+ X = X_orig.copy()
1691
+ result3 = (X[block1] + Z[block1]).reindex(columns=subs)
1692
+
1693
+ X[block1] += Z[block1]
1694
+ result4 = X.reindex(columns=subs)
1695
+
1696
+ tm.assert_frame_equal(result1, result2)
1697
+ tm.assert_frame_equal(result1, result3)
1698
+ tm.assert_frame_equal(result1, result4)
1699
+
1700
+ # sub
1701
+ X = X_orig.copy()
1702
+ result1 = (X[block1] - Z).reindex(columns=subs)
1703
+
1704
+ X[block1] -= Z
1705
+ result2 = X.reindex(columns=subs)
1706
+
1707
+ X = X_orig.copy()
1708
+ result3 = (X[block1] - Z[block1]).reindex(columns=subs)
1709
+
1710
+ X[block1] -= Z[block1]
1711
+ result4 = X.reindex(columns=subs)
1712
+
1713
+ tm.assert_frame_equal(result1, result2)
1714
+ tm.assert_frame_equal(result1, result3)
1715
+ tm.assert_frame_equal(result1, result4)
1716
+
1717
+ def test_inplace_ops_identity(self):
1718
+ # GH 5104
1719
+ # make sure that we are actually changing the object
1720
+ s_orig = Series([1, 2, 3])
1721
+ df_orig = DataFrame(
1722
+ np.random.default_rng(2).integers(0, 5, size=10).reshape(-1, 5)
1723
+ )
1724
+
1725
+ # no dtype change
1726
+ s = s_orig.copy()
1727
+ s2 = s
1728
+ s += 1
1729
+ tm.assert_series_equal(s, s2)
1730
+ tm.assert_series_equal(s_orig + 1, s)
1731
+ assert s is s2
1732
+ assert s._mgr is s2._mgr
1733
+
1734
+ df = df_orig.copy()
1735
+ df2 = df
1736
+ df += 1
1737
+ tm.assert_frame_equal(df, df2)
1738
+ tm.assert_frame_equal(df_orig + 1, df)
1739
+ assert df is df2
1740
+ assert df._mgr is df2._mgr
1741
+
1742
+ # dtype change
1743
+ s = s_orig.copy()
1744
+ s2 = s
1745
+ s += 1.5
1746
+ tm.assert_series_equal(s, s2)
1747
+ tm.assert_series_equal(s_orig + 1.5, s)
1748
+
1749
+ df = df_orig.copy()
1750
+ df2 = df
1751
+ df += 1.5
1752
+ tm.assert_frame_equal(df, df2)
1753
+ tm.assert_frame_equal(df_orig + 1.5, df)
1754
+ assert df is df2
1755
+ assert df._mgr is df2._mgr
1756
+
1757
+ # mixed dtype
1758
+ arr = np.random.default_rng(2).integers(0, 10, size=5)
1759
+ df_orig = DataFrame({"A": arr.copy(), "B": "foo"})
1760
+ df = df_orig.copy()
1761
+ df2 = df
1762
+ df["A"] += 1
1763
+ expected = DataFrame({"A": arr.copy() + 1, "B": "foo"})
1764
+ tm.assert_frame_equal(df, expected)
1765
+ tm.assert_frame_equal(df2, expected)
1766
+ assert df._mgr is df2._mgr
1767
+
1768
+ df = df_orig.copy()
1769
+ df2 = df
1770
+ df["A"] += 1.5
1771
+ expected = DataFrame({"A": arr.copy() + 1.5, "B": "foo"})
1772
+ tm.assert_frame_equal(df, expected)
1773
+ tm.assert_frame_equal(df2, expected)
1774
+ assert df._mgr is df2._mgr
1775
+
1776
+ @pytest.mark.parametrize(
1777
+ "op",
1778
+ [
1779
+ "add",
1780
+ "and",
1781
+ pytest.param(
1782
+ "div",
1783
+ marks=pytest.mark.xfail(
1784
+ raises=AttributeError, reason="__idiv__ not implemented"
1785
+ ),
1786
+ ),
1787
+ "floordiv",
1788
+ "mod",
1789
+ "mul",
1790
+ "or",
1791
+ "pow",
1792
+ "sub",
1793
+ "truediv",
1794
+ "xor",
1795
+ ],
1796
+ )
1797
+ def test_inplace_ops_identity2(self, op):
1798
+ df = DataFrame({"a": [1.0, 2.0, 3.0], "b": [1, 2, 3]})
1799
+
1800
+ operand = 2
1801
+ if op in ("and", "or", "xor"):
1802
+ # cannot use floats for boolean ops
1803
+ df["a"] = [True, False, True]
1804
+
1805
+ df_copy = df.copy()
1806
+ iop = f"__i{op}__"
1807
+ op = f"__{op}__"
1808
+
1809
+ # no id change and value is correct
1810
+ getattr(df, iop)(operand)
1811
+ expected = getattr(df_copy, op)(operand)
1812
+ tm.assert_frame_equal(df, expected)
1813
+ expected = id(df)
1814
+ assert id(df) == expected
1815
+
1816
+ @pytest.mark.parametrize(
1817
+ "val",
1818
+ [
1819
+ [1, 2, 3],
1820
+ (1, 2, 3),
1821
+ np.array([1, 2, 3], dtype=np.int64),
1822
+ range(1, 4),
1823
+ ],
1824
+ )
1825
+ def test_alignment_non_pandas(self, val):
1826
+ index = ["A", "B", "C"]
1827
+ columns = ["X", "Y", "Z"]
1828
+ df = DataFrame(
1829
+ np.random.default_rng(2).standard_normal((3, 3)),
1830
+ index=index,
1831
+ columns=columns,
1832
+ )
1833
+
1834
+ align = DataFrame._align_for_op
1835
+
1836
+ expected = DataFrame({"X": val, "Y": val, "Z": val}, index=df.index)
1837
+ tm.assert_frame_equal(align(df, val, axis=0)[1], expected)
1838
+
1839
+ expected = DataFrame(
1840
+ {"X": [1, 1, 1], "Y": [2, 2, 2], "Z": [3, 3, 3]}, index=df.index
1841
+ )
1842
+ tm.assert_frame_equal(align(df, val, axis=1)[1], expected)
1843
+
1844
+ @pytest.mark.parametrize("val", [[1, 2], (1, 2), np.array([1, 2]), range(1, 3)])
1845
+ def test_alignment_non_pandas_length_mismatch(self, val):
1846
+ index = ["A", "B", "C"]
1847
+ columns = ["X", "Y", "Z"]
1848
+ df = DataFrame(
1849
+ np.random.default_rng(2).standard_normal((3, 3)),
1850
+ index=index,
1851
+ columns=columns,
1852
+ )
1853
+
1854
+ align = DataFrame._align_for_op
1855
+ # length mismatch
1856
+ msg = "Unable to coerce to Series, length must be 3: given 2"
1857
+ with pytest.raises(ValueError, match=msg):
1858
+ align(df, val, axis=0)
1859
+
1860
+ with pytest.raises(ValueError, match=msg):
1861
+ align(df, val, axis=1)
1862
+
1863
+ def test_alignment_non_pandas_index_columns(self):
1864
+ index = ["A", "B", "C"]
1865
+ columns = ["X", "Y", "Z"]
1866
+ df = DataFrame(
1867
+ np.random.default_rng(2).standard_normal((3, 3)),
1868
+ index=index,
1869
+ columns=columns,
1870
+ )
1871
+
1872
+ align = DataFrame._align_for_op
1873
+ val = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
1874
+ tm.assert_frame_equal(
1875
+ align(df, val, axis=0)[1],
1876
+ DataFrame(val, index=df.index, columns=df.columns),
1877
+ )
1878
+ tm.assert_frame_equal(
1879
+ align(df, val, axis=1)[1],
1880
+ DataFrame(val, index=df.index, columns=df.columns),
1881
+ )
1882
+
1883
+ # shape mismatch
1884
+ msg = "Unable to coerce to DataFrame, shape must be"
1885
+ val = np.array([[1, 2, 3], [4, 5, 6]])
1886
+ with pytest.raises(ValueError, match=msg):
1887
+ align(df, val, axis=0)
1888
+
1889
+ with pytest.raises(ValueError, match=msg):
1890
+ align(df, val, axis=1)
1891
+
1892
+ val = np.zeros((3, 3, 3))
1893
+ msg = re.escape(
1894
+ "Unable to coerce to Series/DataFrame, dimension must be <= 2: (3, 3, 3)"
1895
+ )
1896
+ with pytest.raises(ValueError, match=msg):
1897
+ align(df, val, axis=0)
1898
+ with pytest.raises(ValueError, match=msg):
1899
+ align(df, val, axis=1)
1900
+
1901
+ def test_no_warning(self, all_arithmetic_operators):
1902
+ df = DataFrame({"A": [0.0, 0.0], "B": [0.0, None]})
1903
+ b = df["B"]
1904
+ with tm.assert_produces_warning(None):
1905
+ getattr(df, all_arithmetic_operators)(b)
1906
+
1907
+ def test_dunder_methods_binary(self, all_arithmetic_operators):
1908
+ # GH#??? frame.__foo__ should only accept one argument
1909
+ df = DataFrame({"A": [0.0, 0.0], "B": [0.0, None]})
1910
+ b = df["B"]
1911
+ with pytest.raises(TypeError, match="takes 2 positional arguments"):
1912
+ getattr(df, all_arithmetic_operators)(b, 0)
1913
+
1914
+ def test_align_int_fill_bug(self):
1915
+ # GH#910
1916
+ X = np.arange(10 * 10, dtype="float64").reshape(10, 10)
1917
+ Y = np.ones((10, 1), dtype=int)
1918
+
1919
+ df1 = DataFrame(X)
1920
+ df1["0.X"] = Y.squeeze()
1921
+
1922
+ df2 = df1.astype(float)
1923
+
1924
+ result = df1 - df1.mean()
1925
+ expected = df2 - df2.mean()
1926
+ tm.assert_frame_equal(result, expected)
1927
+
1928
+
1929
+ def test_pow_with_realignment():
1930
+ # GH#32685 pow has special semantics for operating with null values
1931
+ left = DataFrame({"A": [0, 1, 2]})
1932
+ right = DataFrame(index=[0, 1, 2])
1933
+
1934
+ result = left**right
1935
+ expected = DataFrame({"A": [np.nan, 1.0, np.nan]})
1936
+ tm.assert_frame_equal(result, expected)
1937
+
1938
+
1939
+ def test_dataframe_series_extension_dtypes():
1940
+ # https://github.com/pandas-dev/pandas/issues/34311
1941
+ df = DataFrame(
1942
+ np.random.default_rng(2).integers(0, 100, (10, 3)), columns=["a", "b", "c"]
1943
+ )
1944
+ ser = Series([1, 2, 3], index=["a", "b", "c"])
1945
+
1946
+ expected = df.to_numpy("int64") + ser.to_numpy("int64").reshape(-1, 3)
1947
+ expected = DataFrame(expected, columns=df.columns, dtype="Int64")
1948
+
1949
+ df_ea = df.astype("Int64")
1950
+ result = df_ea + ser
1951
+ tm.assert_frame_equal(result, expected)
1952
+ result = df_ea + ser.astype("Int64")
1953
+ tm.assert_frame_equal(result, expected)
1954
+
1955
+
1956
+ def test_dataframe_blockwise_slicelike():
1957
+ # GH#34367
1958
+ arr = np.random.default_rng(2).integers(0, 1000, (100, 10))
1959
+ df1 = DataFrame(arr)
1960
+ # Explicit cast to float to avoid implicit cast when setting nan
1961
+ df2 = df1.copy().astype({1: "float", 3: "float", 7: "float"})
1962
+ df2.iloc[0, [1, 3, 7]] = np.nan
1963
+
1964
+ # Explicit cast to float to avoid implicit cast when setting nan
1965
+ df3 = df1.copy().astype({5: "float"})
1966
+ df3.iloc[0, [5]] = np.nan
1967
+
1968
+ # Explicit cast to float to avoid implicit cast when setting nan
1969
+ df4 = df1.copy().astype({2: "float", 3: "float", 4: "float"})
1970
+ df4.iloc[0, np.arange(2, 5)] = np.nan
1971
+ # Explicit cast to float to avoid implicit cast when setting nan
1972
+ df5 = df1.copy().astype({4: "float", 5: "float", 6: "float"})
1973
+ df5.iloc[0, np.arange(4, 7)] = np.nan
1974
+
1975
+ for left, right in [(df1, df2), (df2, df3), (df4, df5)]:
1976
+ res = left + right
1977
+
1978
+ expected = DataFrame({i: left[i] + right[i] for i in left.columns})
1979
+ tm.assert_frame_equal(res, expected)
1980
+
1981
+
1982
+ @pytest.mark.parametrize(
1983
+ "df, col_dtype",
1984
+ [
1985
+ (DataFrame([[1.0, 2.0], [4.0, 5.0]], columns=list("ab")), "float64"),
1986
+ (
1987
+ DataFrame([[1.0, "b"], [4.0, "b"]], columns=list("ab")).astype(
1988
+ {"b": object}
1989
+ ),
1990
+ "object",
1991
+ ),
1992
+ ],
1993
+ )
1994
+ def test_dataframe_operation_with_non_numeric_types(df, col_dtype):
1995
+ # GH #22663
1996
+ expected = DataFrame([[0.0, np.nan], [3.0, np.nan]], columns=list("ab"))
1997
+ expected = expected.astype({"b": col_dtype})
1998
+ result = df + Series([-1.0], index=list("a"))
1999
+ tm.assert_frame_equal(result, expected)
2000
+
2001
+
2002
+ def test_arith_reindex_with_duplicates():
2003
+ # https://github.com/pandas-dev/pandas/issues/35194
2004
+ df1 = DataFrame(data=[[0]], columns=["second"])
2005
+ df2 = DataFrame(data=[[0, 0, 0]], columns=["first", "second", "second"])
2006
+ result = df1 + df2
2007
+ expected = DataFrame([[np.nan, 0, 0]], columns=["first", "second", "second"])
2008
+ tm.assert_frame_equal(result, expected)
2009
+
2010
+
2011
+ @pytest.mark.parametrize("to_add", [[Series([1, 1])], [Series([1, 1]), Series([1, 1])]])
2012
+ def test_arith_list_of_arraylike_raise(to_add):
2013
+ # GH 36702. Raise when trying to add list of array-like to DataFrame
2014
+ df = DataFrame({"x": [1, 2], "y": [1, 2]})
2015
+
2016
+ msg = f"Unable to coerce list of {type(to_add[0])} to Series/DataFrame"
2017
+ with pytest.raises(ValueError, match=msg):
2018
+ df + to_add
2019
+ with pytest.raises(ValueError, match=msg):
2020
+ to_add + df
2021
+
2022
+
2023
+ def test_inplace_arithmetic_series_update(using_copy_on_write, warn_copy_on_write):
2024
+ # https://github.com/pandas-dev/pandas/issues/36373
2025
+ df = DataFrame({"A": [1, 2, 3]})
2026
+ df_orig = df.copy()
2027
+ series = df["A"]
2028
+ vals = series._values
2029
+
2030
+ with tm.assert_cow_warning(warn_copy_on_write):
2031
+ series += 1
2032
+ if using_copy_on_write:
2033
+ assert series._values is not vals
2034
+ tm.assert_frame_equal(df, df_orig)
2035
+ else:
2036
+ assert series._values is vals
2037
+
2038
+ expected = DataFrame({"A": [2, 3, 4]})
2039
+ tm.assert_frame_equal(df, expected)
2040
+
2041
+
2042
+ def test_arithmetic_multiindex_align():
2043
+ """
2044
+ Regression test for: https://github.com/pandas-dev/pandas/issues/33765
2045
+ """
2046
+ df1 = DataFrame(
2047
+ [[1]],
2048
+ index=["a"],
2049
+ columns=MultiIndex.from_product([[0], [1]], names=["a", "b"]),
2050
+ )
2051
+ df2 = DataFrame([[1]], index=["a"], columns=Index([0], name="a"))
2052
+ expected = DataFrame(
2053
+ [[0]],
2054
+ index=["a"],
2055
+ columns=MultiIndex.from_product([[0], [1]], names=["a", "b"]),
2056
+ )
2057
+ result = df1 - df2
2058
+ tm.assert_frame_equal(result, expected)
2059
+
2060
+
2061
+ def test_bool_frame_mult_float():
2062
+ # GH 18549
2063
+ df = DataFrame(True, list("ab"), list("cd"))
2064
+ result = df * 1.0
2065
+ expected = DataFrame(np.ones((2, 2)), list("ab"), list("cd"))
2066
+ tm.assert_frame_equal(result, expected)
2067
+
2068
+
2069
+ def test_frame_sub_nullable_int(any_int_ea_dtype):
2070
+ # GH 32822
2071
+ series1 = Series([1, 2, None], dtype=any_int_ea_dtype)
2072
+ series2 = Series([1, 2, 3], dtype=any_int_ea_dtype)
2073
+ expected = DataFrame([0, 0, None], dtype=any_int_ea_dtype)
2074
+ result = series1.to_frame() - series2.to_frame()
2075
+ tm.assert_frame_equal(result, expected)
2076
+
2077
+
2078
+ @pytest.mark.filterwarnings(
2079
+ "ignore:Passing a BlockManager|Passing a SingleBlockManager:DeprecationWarning"
2080
+ )
2081
+ def test_frame_op_subclass_nonclass_constructor():
2082
+ # GH#43201 subclass._constructor is a function, not the subclass itself
2083
+
2084
+ class SubclassedSeries(Series):
2085
+ @property
2086
+ def _constructor(self):
2087
+ return SubclassedSeries
2088
+
2089
+ @property
2090
+ def _constructor_expanddim(self):
2091
+ return SubclassedDataFrame
2092
+
2093
+ class SubclassedDataFrame(DataFrame):
2094
+ _metadata = ["my_extra_data"]
2095
+
2096
+ def __init__(self, my_extra_data, *args, **kwargs) -> None:
2097
+ self.my_extra_data = my_extra_data
2098
+ super().__init__(*args, **kwargs)
2099
+
2100
+ @property
2101
+ def _constructor(self):
2102
+ return functools.partial(type(self), self.my_extra_data)
2103
+
2104
+ @property
2105
+ def _constructor_sliced(self):
2106
+ return SubclassedSeries
2107
+
2108
+ sdf = SubclassedDataFrame("some_data", {"A": [1, 2, 3], "B": [4, 5, 6]})
2109
+ result = sdf * 2
2110
+ expected = SubclassedDataFrame("some_data", {"A": [2, 4, 6], "B": [8, 10, 12]})
2111
+ tm.assert_frame_equal(result, expected)
2112
+
2113
+ result = sdf + sdf
2114
+ tm.assert_frame_equal(result, expected)
2115
+
2116
+
2117
+ def test_enum_column_equality():
2118
+ Cols = Enum("Cols", "col1 col2")
2119
+
2120
+ q1 = DataFrame({Cols.col1: [1, 2, 3]})
2121
+ q2 = DataFrame({Cols.col1: [1, 2, 3]})
2122
+
2123
+ result = q1[Cols.col1] == q2[Cols.col1]
2124
+ expected = Series([True, True, True], name=Cols.col1)
2125
+
2126
+ tm.assert_series_equal(result, expected)
2127
+
2128
+
2129
+ def test_mixed_col_index_dtype():
2130
+ # GH 47382
2131
+ df1 = DataFrame(columns=list("abc"), data=1.0, index=[0])
2132
+ df2 = DataFrame(columns=list("abc"), data=0.0, index=[0])
2133
+ df1.columns = df2.columns.astype("string")
2134
+ result = df1 + df2
2135
+ expected = DataFrame(columns=list("abc"), data=1.0, index=[0])
2136
+ tm.assert_frame_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/frame/test_arrow_interface.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ctypes
2
+
3
+ import pytest
4
+
5
+ import pandas.util._test_decorators as td
6
+
7
+ import pandas as pd
8
+
9
+ pa = pytest.importorskip("pyarrow")
10
+
11
+
12
+ @td.skip_if_no("pyarrow", min_version="14.0")
13
+ def test_dataframe_arrow_interface():
14
+ df = pd.DataFrame({"a": [1, 2, 3], "b": ["a", "b", "c"]})
15
+
16
+ capsule = df.__arrow_c_stream__()
17
+ assert (
18
+ ctypes.pythonapi.PyCapsule_IsValid(
19
+ ctypes.py_object(capsule), b"arrow_array_stream"
20
+ )
21
+ == 1
22
+ )
23
+
24
+ table = pa.table(df)
25
+ expected = pa.table({"a": [1, 2, 3], "b": ["a", "b", "c"]})
26
+ assert table.equals(expected)
27
+
28
+ schema = pa.schema([("a", pa.int8()), ("b", pa.string())])
29
+ table = pa.table(df, schema=schema)
30
+ expected = expected.cast(schema)
31
+ assert table.equals(expected)
32
+
33
+
34
+ @td.skip_if_no("pyarrow", min_version="15.0")
35
+ def test_dataframe_to_arrow():
36
+ df = pd.DataFrame({"a": [1, 2, 3], "b": ["a", "b", "c"]})
37
+
38
+ table = pa.RecordBatchReader.from_stream(df).read_all()
39
+ expected = pa.table({"a": [1, 2, 3], "b": ["a", "b", "c"]})
40
+ assert table.equals(expected)
41
+
42
+ schema = pa.schema([("a", pa.int8()), ("b", pa.string())])
43
+ table = pa.RecordBatchReader.from_stream(df, schema=schema).read_all()
44
+ expected = expected.cast(schema)
45
+ assert table.equals(expected)
venv/lib/python3.10/site-packages/pandas/tests/frame/test_block_internals.py ADDED
@@ -0,0 +1,457 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ timedelta,
4
+ )
5
+ import itertools
6
+
7
+ import numpy as np
8
+ import pytest
9
+
10
+ from pandas.errors import PerformanceWarning
11
+ import pandas.util._test_decorators as td
12
+
13
+ import pandas as pd
14
+ from pandas import (
15
+ Categorical,
16
+ DataFrame,
17
+ Series,
18
+ Timestamp,
19
+ date_range,
20
+ option_context,
21
+ )
22
+ import pandas._testing as tm
23
+ from pandas.core.internals.blocks import NumpyBlock
24
+
25
+ # Segregated collection of methods that require the BlockManager internal data
26
+ # structure
27
+
28
+
29
+ # TODO(ArrayManager) check which of those tests need to be rewritten to test the
30
+ # equivalent for ArrayManager
31
+ pytestmark = td.skip_array_manager_invalid_test
32
+
33
+
34
+ class TestDataFrameBlockInternals:
35
+ def test_setitem_invalidates_datetime_index_freq(self):
36
+ # GH#24096 altering a datetime64tz column inplace invalidates the
37
+ # `freq` attribute on the underlying DatetimeIndex
38
+
39
+ dti = date_range("20130101", periods=3, tz="US/Eastern")
40
+ ts = dti[1]
41
+
42
+ df = DataFrame({"B": dti})
43
+ assert df["B"]._values.freq is None
44
+
45
+ df.iloc[1, 0] = pd.NaT
46
+ assert df["B"]._values.freq is None
47
+
48
+ # check that the DatetimeIndex was not altered in place
49
+ assert dti.freq == "D"
50
+ assert dti[1] == ts
51
+
52
+ def test_cast_internals(self, float_frame):
53
+ msg = "Passing a BlockManager to DataFrame"
54
+ with tm.assert_produces_warning(
55
+ DeprecationWarning, match=msg, check_stacklevel=False
56
+ ):
57
+ casted = DataFrame(float_frame._mgr, dtype=int)
58
+ expected = DataFrame(float_frame._series, dtype=int)
59
+ tm.assert_frame_equal(casted, expected)
60
+
61
+ with tm.assert_produces_warning(
62
+ DeprecationWarning, match=msg, check_stacklevel=False
63
+ ):
64
+ casted = DataFrame(float_frame._mgr, dtype=np.int32)
65
+ expected = DataFrame(float_frame._series, dtype=np.int32)
66
+ tm.assert_frame_equal(casted, expected)
67
+
68
+ def test_consolidate(self, float_frame):
69
+ float_frame["E"] = 7.0
70
+ consolidated = float_frame._consolidate()
71
+ assert len(consolidated._mgr.blocks) == 1
72
+
73
+ # Ensure copy, do I want this?
74
+ recons = consolidated._consolidate()
75
+ assert recons is not consolidated
76
+ tm.assert_frame_equal(recons, consolidated)
77
+
78
+ float_frame["F"] = 8.0
79
+ assert len(float_frame._mgr.blocks) == 3
80
+
81
+ return_value = float_frame._consolidate_inplace()
82
+ assert return_value is None
83
+ assert len(float_frame._mgr.blocks) == 1
84
+
85
+ def test_consolidate_inplace(self, float_frame):
86
+ # triggers in-place consolidation
87
+ for letter in range(ord("A"), ord("Z")):
88
+ float_frame[chr(letter)] = chr(letter)
89
+
90
+ def test_modify_values(self, float_frame, using_copy_on_write):
91
+ if using_copy_on_write:
92
+ with pytest.raises(ValueError, match="read-only"):
93
+ float_frame.values[5] = 5
94
+ assert (float_frame.values[5] != 5).all()
95
+ return
96
+
97
+ float_frame.values[5] = 5
98
+ assert (float_frame.values[5] == 5).all()
99
+
100
+ # unconsolidated
101
+ float_frame["E"] = 7.0
102
+ col = float_frame["E"]
103
+ float_frame.values[6] = 6
104
+ # as of 2.0 .values does not consolidate, so subsequent calls to .values
105
+ # does not share data
106
+ assert not (float_frame.values[6] == 6).all()
107
+
108
+ assert (col == 7).all()
109
+
110
+ def test_boolean_set_uncons(self, float_frame):
111
+ float_frame["E"] = 7.0
112
+
113
+ expected = float_frame.values.copy()
114
+ expected[expected > 1] = 2
115
+
116
+ float_frame[float_frame > 1] = 2
117
+ tm.assert_almost_equal(expected, float_frame.values)
118
+
119
+ def test_constructor_with_convert(self):
120
+ # this is actually mostly a test of lib.maybe_convert_objects
121
+ # #2845
122
+ df = DataFrame({"A": [2**63 - 1]})
123
+ result = df["A"]
124
+ expected = Series(np.asarray([2**63 - 1], np.int64), name="A")
125
+ tm.assert_series_equal(result, expected)
126
+
127
+ df = DataFrame({"A": [2**63]})
128
+ result = df["A"]
129
+ expected = Series(np.asarray([2**63], np.uint64), name="A")
130
+ tm.assert_series_equal(result, expected)
131
+
132
+ df = DataFrame({"A": [datetime(2005, 1, 1), True]})
133
+ result = df["A"]
134
+ expected = Series(
135
+ np.asarray([datetime(2005, 1, 1), True], np.object_), name="A"
136
+ )
137
+ tm.assert_series_equal(result, expected)
138
+
139
+ df = DataFrame({"A": [None, 1]})
140
+ result = df["A"]
141
+ expected = Series(np.asarray([np.nan, 1], np.float64), name="A")
142
+ tm.assert_series_equal(result, expected)
143
+
144
+ df = DataFrame({"A": [1.0, 2]})
145
+ result = df["A"]
146
+ expected = Series(np.asarray([1.0, 2], np.float64), name="A")
147
+ tm.assert_series_equal(result, expected)
148
+
149
+ df = DataFrame({"A": [1.0 + 2.0j, 3]})
150
+ result = df["A"]
151
+ expected = Series(np.asarray([1.0 + 2.0j, 3], np.complex128), name="A")
152
+ tm.assert_series_equal(result, expected)
153
+
154
+ df = DataFrame({"A": [1.0 + 2.0j, 3.0]})
155
+ result = df["A"]
156
+ expected = Series(np.asarray([1.0 + 2.0j, 3.0], np.complex128), name="A")
157
+ tm.assert_series_equal(result, expected)
158
+
159
+ df = DataFrame({"A": [1.0 + 2.0j, True]})
160
+ result = df["A"]
161
+ expected = Series(np.asarray([1.0 + 2.0j, True], np.object_), name="A")
162
+ tm.assert_series_equal(result, expected)
163
+
164
+ df = DataFrame({"A": [1.0, None]})
165
+ result = df["A"]
166
+ expected = Series(np.asarray([1.0, np.nan], np.float64), name="A")
167
+ tm.assert_series_equal(result, expected)
168
+
169
+ df = DataFrame({"A": [1.0 + 2.0j, None]})
170
+ result = df["A"]
171
+ expected = Series(np.asarray([1.0 + 2.0j, np.nan], np.complex128), name="A")
172
+ tm.assert_series_equal(result, expected)
173
+
174
+ df = DataFrame({"A": [2.0, 1, True, None]})
175
+ result = df["A"]
176
+ expected = Series(np.asarray([2.0, 1, True, None], np.object_), name="A")
177
+ tm.assert_series_equal(result, expected)
178
+
179
+ df = DataFrame({"A": [2.0, 1, datetime(2006, 1, 1), None]})
180
+ result = df["A"]
181
+ expected = Series(
182
+ np.asarray([2.0, 1, datetime(2006, 1, 1), None], np.object_), name="A"
183
+ )
184
+ tm.assert_series_equal(result, expected)
185
+
186
+ def test_construction_with_mixed(self, float_string_frame, using_infer_string):
187
+ # test construction edge cases with mixed types
188
+
189
+ # f7u12, this does not work without extensive workaround
190
+ data = [
191
+ [datetime(2001, 1, 5), np.nan, datetime(2001, 1, 2)],
192
+ [datetime(2000, 1, 2), datetime(2000, 1, 3), datetime(2000, 1, 1)],
193
+ ]
194
+ df = DataFrame(data)
195
+
196
+ # check dtypes
197
+ result = df.dtypes
198
+ expected = Series({"datetime64[us]": 3})
199
+
200
+ # mixed-type frames
201
+ float_string_frame["datetime"] = datetime.now()
202
+ float_string_frame["timedelta"] = timedelta(days=1, seconds=1)
203
+ assert float_string_frame["datetime"].dtype == "M8[us]"
204
+ assert float_string_frame["timedelta"].dtype == "m8[us]"
205
+ result = float_string_frame.dtypes
206
+ expected = Series(
207
+ [np.dtype("float64")] * 4
208
+ + [
209
+ np.dtype("object") if not using_infer_string else "string",
210
+ np.dtype("datetime64[us]"),
211
+ np.dtype("timedelta64[us]"),
212
+ ],
213
+ index=list("ABCD") + ["foo", "datetime", "timedelta"],
214
+ )
215
+ tm.assert_series_equal(result, expected)
216
+
217
+ def test_construction_with_conversions(self):
218
+ # convert from a numpy array of non-ns timedelta64; as of 2.0 this does
219
+ # *not* convert
220
+ arr = np.array([1, 2, 3], dtype="timedelta64[s]")
221
+ df = DataFrame(index=range(3))
222
+ df["A"] = arr
223
+ expected = DataFrame(
224
+ {"A": pd.timedelta_range("00:00:01", periods=3, freq="s")}, index=range(3)
225
+ )
226
+ tm.assert_numpy_array_equal(df["A"].to_numpy(), arr)
227
+
228
+ expected = DataFrame(
229
+ {
230
+ "dt1": Timestamp("20130101"),
231
+ "dt2": date_range("20130101", periods=3).astype("M8[s]"),
232
+ # 'dt3' : date_range('20130101 00:00:01',periods=3,freq='s'),
233
+ # FIXME: don't leave commented-out
234
+ },
235
+ index=range(3),
236
+ )
237
+ assert expected.dtypes["dt1"] == "M8[s]"
238
+ assert expected.dtypes["dt2"] == "M8[s]"
239
+
240
+ df = DataFrame(index=range(3))
241
+ df["dt1"] = np.datetime64("2013-01-01")
242
+ df["dt2"] = np.array(
243
+ ["2013-01-01", "2013-01-02", "2013-01-03"], dtype="datetime64[D]"
244
+ )
245
+
246
+ # df['dt3'] = np.array(['2013-01-01 00:00:01','2013-01-01
247
+ # 00:00:02','2013-01-01 00:00:03'],dtype='datetime64[s]')
248
+ # FIXME: don't leave commented-out
249
+
250
+ tm.assert_frame_equal(df, expected)
251
+
252
+ def test_constructor_compound_dtypes(self):
253
+ # GH 5191
254
+ # compound dtypes should raise not-implementederror
255
+
256
+ def f(dtype):
257
+ data = list(itertools.repeat((datetime(2001, 1, 1), "aa", 20), 9))
258
+ return DataFrame(data=data, columns=["A", "B", "C"], dtype=dtype)
259
+
260
+ msg = "compound dtypes are not implemented in the DataFrame constructor"
261
+ with pytest.raises(NotImplementedError, match=msg):
262
+ f([("A", "datetime64[h]"), ("B", "str"), ("C", "int32")])
263
+
264
+ # pre-2.0 these used to work (though results may be unexpected)
265
+ with pytest.raises(TypeError, match="argument must be"):
266
+ f("int64")
267
+ with pytest.raises(TypeError, match="argument must be"):
268
+ f("float64")
269
+
270
+ # 10822
271
+ msg = "^Unknown datetime string format, unable to parse: aa, at position 0$"
272
+ with pytest.raises(ValueError, match=msg):
273
+ f("M8[ns]")
274
+
275
+ def test_pickle(self, float_string_frame, timezone_frame):
276
+ empty_frame = DataFrame()
277
+
278
+ unpickled = tm.round_trip_pickle(float_string_frame)
279
+ tm.assert_frame_equal(float_string_frame, unpickled)
280
+
281
+ # buglet
282
+ float_string_frame._mgr.ndim
283
+
284
+ # empty
285
+ unpickled = tm.round_trip_pickle(empty_frame)
286
+ repr(unpickled)
287
+
288
+ # tz frame
289
+ unpickled = tm.round_trip_pickle(timezone_frame)
290
+ tm.assert_frame_equal(timezone_frame, unpickled)
291
+
292
+ def test_consolidate_datetime64(self):
293
+ # numpy vstack bug
294
+
295
+ df = DataFrame(
296
+ {
297
+ "starting": pd.to_datetime(
298
+ [
299
+ "2012-06-21 00:00",
300
+ "2012-06-23 07:00",
301
+ "2012-06-23 16:30",
302
+ "2012-06-25 08:00",
303
+ "2012-06-26 12:00",
304
+ ]
305
+ ),
306
+ "ending": pd.to_datetime(
307
+ [
308
+ "2012-06-23 07:00",
309
+ "2012-06-23 16:30",
310
+ "2012-06-25 08:00",
311
+ "2012-06-26 12:00",
312
+ "2012-06-27 08:00",
313
+ ]
314
+ ),
315
+ "measure": [77, 65, 77, 0, 77],
316
+ }
317
+ )
318
+
319
+ ser_starting = df.starting
320
+ ser_starting.index = ser_starting.values
321
+ ser_starting = ser_starting.tz_localize("US/Eastern")
322
+ ser_starting = ser_starting.tz_convert("UTC")
323
+ ser_starting.index.name = "starting"
324
+
325
+ ser_ending = df.ending
326
+ ser_ending.index = ser_ending.values
327
+ ser_ending = ser_ending.tz_localize("US/Eastern")
328
+ ser_ending = ser_ending.tz_convert("UTC")
329
+ ser_ending.index.name = "ending"
330
+
331
+ df.starting = ser_starting.index
332
+ df.ending = ser_ending.index
333
+
334
+ tm.assert_index_equal(pd.DatetimeIndex(df.starting), ser_starting.index)
335
+ tm.assert_index_equal(pd.DatetimeIndex(df.ending), ser_ending.index)
336
+
337
+ def test_is_mixed_type(self, float_frame, float_string_frame):
338
+ assert not float_frame._is_mixed_type
339
+ assert float_string_frame._is_mixed_type
340
+
341
+ def test_stale_cached_series_bug_473(self, using_copy_on_write, warn_copy_on_write):
342
+ # this is chained, but ok
343
+ with option_context("chained_assignment", None):
344
+ Y = DataFrame(
345
+ np.random.default_rng(2).random((4, 4)),
346
+ index=("a", "b", "c", "d"),
347
+ columns=("e", "f", "g", "h"),
348
+ )
349
+ repr(Y)
350
+ Y["e"] = Y["e"].astype("object")
351
+ with tm.raises_chained_assignment_error():
352
+ Y["g"]["c"] = np.nan
353
+ repr(Y)
354
+ Y.sum()
355
+ Y["g"].sum()
356
+ if using_copy_on_write:
357
+ assert not pd.isna(Y["g"]["c"])
358
+ else:
359
+ assert pd.isna(Y["g"]["c"])
360
+
361
+ @pytest.mark.filterwarnings("ignore:Setting a value on a view:FutureWarning")
362
+ def test_strange_column_corruption_issue(self, using_copy_on_write):
363
+ # TODO(wesm): Unclear how exactly this is related to internal matters
364
+ df = DataFrame(index=[0, 1])
365
+ df[0] = np.nan
366
+ wasCol = {}
367
+
368
+ with tm.assert_produces_warning(
369
+ PerformanceWarning, raise_on_extra_warnings=False
370
+ ):
371
+ for i, dt in enumerate(df.index):
372
+ for col in range(100, 200):
373
+ if col not in wasCol:
374
+ wasCol[col] = 1
375
+ df[col] = np.nan
376
+ if using_copy_on_write:
377
+ df.loc[dt, col] = i
378
+ else:
379
+ df[col][dt] = i
380
+
381
+ myid = 100
382
+
383
+ first = len(df.loc[pd.isna(df[myid]), [myid]])
384
+ second = len(df.loc[pd.isna(df[myid]), [myid]])
385
+ assert first == second == 0
386
+
387
+ def test_constructor_no_pandas_array(self):
388
+ # Ensure that NumpyExtensionArray isn't allowed inside Series
389
+ # See https://github.com/pandas-dev/pandas/issues/23995 for more.
390
+ arr = Series([1, 2, 3]).array
391
+ result = DataFrame({"A": arr})
392
+ expected = DataFrame({"A": [1, 2, 3]})
393
+ tm.assert_frame_equal(result, expected)
394
+ assert isinstance(result._mgr.blocks[0], NumpyBlock)
395
+ assert result._mgr.blocks[0].is_numeric
396
+
397
+ def test_add_column_with_pandas_array(self):
398
+ # GH 26390
399
+ df = DataFrame({"a": [1, 2, 3, 4], "b": ["a", "b", "c", "d"]})
400
+ df["c"] = pd.arrays.NumpyExtensionArray(np.array([1, 2, None, 3], dtype=object))
401
+ df2 = DataFrame(
402
+ {
403
+ "a": [1, 2, 3, 4],
404
+ "b": ["a", "b", "c", "d"],
405
+ "c": pd.arrays.NumpyExtensionArray(
406
+ np.array([1, 2, None, 3], dtype=object)
407
+ ),
408
+ }
409
+ )
410
+ assert type(df["c"]._mgr.blocks[0]) == NumpyBlock
411
+ assert df["c"]._mgr.blocks[0].is_object
412
+ assert type(df2["c"]._mgr.blocks[0]) == NumpyBlock
413
+ assert df2["c"]._mgr.blocks[0].is_object
414
+ tm.assert_frame_equal(df, df2)
415
+
416
+
417
+ def test_update_inplace_sets_valid_block_values(using_copy_on_write):
418
+ # https://github.com/pandas-dev/pandas/issues/33457
419
+ df = DataFrame({"a": Series([1, 2, None], dtype="category")})
420
+
421
+ # inplace update of a single column
422
+ if using_copy_on_write:
423
+ with tm.raises_chained_assignment_error():
424
+ df["a"].fillna(1, inplace=True)
425
+ else:
426
+ with tm.assert_produces_warning(FutureWarning, match="inplace method"):
427
+ df["a"].fillna(1, inplace=True)
428
+
429
+ # check we haven't put a Series into any block.values
430
+ assert isinstance(df._mgr.blocks[0].values, Categorical)
431
+
432
+ if not using_copy_on_write:
433
+ # smoketest for OP bug from GH#35731
434
+ assert df.isnull().sum().sum() == 0
435
+
436
+
437
+ def test_nonconsolidated_item_cache_take():
438
+ # https://github.com/pandas-dev/pandas/issues/35521
439
+
440
+ # create non-consolidated dataframe with object dtype columns
441
+ df = DataFrame()
442
+ df["col1"] = Series(["a"], dtype=object)
443
+ df["col2"] = Series([0], dtype=object)
444
+
445
+ # access column (item cache)
446
+ df["col1"] == "A"
447
+ # take operation
448
+ # (regression was that this consolidated but didn't reset item cache,
449
+ # resulting in an invalid cache and the .at operation not working properly)
450
+ df[df["col2"] == 0]
451
+
452
+ # now setting value should update actual dataframe
453
+ df.at[0, "col1"] = "A"
454
+
455
+ expected = DataFrame({"col1": ["A"], "col2": [0]}, dtype=object)
456
+ tm.assert_frame_equal(df, expected)
457
+ assert df.at[0, "col1"] == "A"
venv/lib/python3.10/site-packages/pandas/tests/frame/test_constructors.py ADDED
The diff for this file is too large to render. See raw diff
 
venv/lib/python3.10/site-packages/pandas/tests/frame/test_cumulative.py ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tests for DataFrame cumulative operations
3
+
4
+ See also
5
+ --------
6
+ tests.series.test_cumulative
7
+ """
8
+
9
+ import numpy as np
10
+ import pytest
11
+
12
+ from pandas import (
13
+ DataFrame,
14
+ Series,
15
+ )
16
+ import pandas._testing as tm
17
+
18
+
19
+ class TestDataFrameCumulativeOps:
20
+ # ---------------------------------------------------------------------
21
+ # Cumulative Operations - cumsum, cummax, ...
22
+
23
+ def test_cumulative_ops_smoke(self):
24
+ # it works
25
+ df = DataFrame({"A": np.arange(20)}, index=np.arange(20))
26
+ df.cummax()
27
+ df.cummin()
28
+ df.cumsum()
29
+
30
+ dm = DataFrame(np.arange(20).reshape(4, 5), index=range(4), columns=range(5))
31
+ # TODO(wesm): do something with this?
32
+ dm.cumsum()
33
+
34
+ def test_cumprod_smoke(self, datetime_frame):
35
+ datetime_frame.iloc[5:10, 0] = np.nan
36
+ datetime_frame.iloc[10:15, 1] = np.nan
37
+ datetime_frame.iloc[15:, 2] = np.nan
38
+
39
+ # ints
40
+ df = datetime_frame.fillna(0).astype(int)
41
+ df.cumprod(0)
42
+ df.cumprod(1)
43
+
44
+ # ints32
45
+ df = datetime_frame.fillna(0).astype(np.int32)
46
+ df.cumprod(0)
47
+ df.cumprod(1)
48
+
49
+ @pytest.mark.parametrize("method", ["cumsum", "cumprod", "cummin", "cummax"])
50
+ def test_cumulative_ops_match_series_apply(self, datetime_frame, method):
51
+ datetime_frame.iloc[5:10, 0] = np.nan
52
+ datetime_frame.iloc[10:15, 1] = np.nan
53
+ datetime_frame.iloc[15:, 2] = np.nan
54
+
55
+ # axis = 0
56
+ result = getattr(datetime_frame, method)()
57
+ expected = datetime_frame.apply(getattr(Series, method))
58
+ tm.assert_frame_equal(result, expected)
59
+
60
+ # axis = 1
61
+ result = getattr(datetime_frame, method)(axis=1)
62
+ expected = datetime_frame.apply(getattr(Series, method), axis=1)
63
+ tm.assert_frame_equal(result, expected)
64
+
65
+ # fix issue TODO: GH ref?
66
+ assert np.shape(result) == np.shape(datetime_frame)
67
+
68
+ def test_cumsum_preserve_dtypes(self):
69
+ # GH#19296 dont incorrectly upcast to object
70
+ df = DataFrame({"A": [1, 2, 3], "B": [1, 2, 3.0], "C": [True, False, False]})
71
+
72
+ result = df.cumsum()
73
+
74
+ expected = DataFrame(
75
+ {
76
+ "A": Series([1, 3, 6], dtype=np.int64),
77
+ "B": Series([1, 3, 6], dtype=np.float64),
78
+ "C": df["C"].cumsum(),
79
+ }
80
+ )
81
+ tm.assert_frame_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/frame/test_iteration.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import datetime
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas.compat import (
7
+ IS64,
8
+ is_platform_windows,
9
+ )
10
+
11
+ from pandas import (
12
+ Categorical,
13
+ DataFrame,
14
+ Series,
15
+ date_range,
16
+ )
17
+ import pandas._testing as tm
18
+
19
+
20
+ class TestIteration:
21
+ def test_keys(self, float_frame):
22
+ assert float_frame.keys() is float_frame.columns
23
+
24
+ def test_iteritems(self):
25
+ df = DataFrame([[1, 2, 3], [4, 5, 6]], columns=["a", "a", "b"])
26
+ for k, v in df.items():
27
+ assert isinstance(v, DataFrame._constructor_sliced)
28
+
29
+ def test_items(self):
30
+ # GH#17213, GH#13918
31
+ cols = ["a", "b", "c"]
32
+ df = DataFrame([[1, 2, 3], [4, 5, 6]], columns=cols)
33
+ for c, (k, v) in zip(cols, df.items()):
34
+ assert c == k
35
+ assert isinstance(v, Series)
36
+ assert (df[k] == v).all()
37
+
38
+ def test_items_names(self, float_string_frame):
39
+ for k, v in float_string_frame.items():
40
+ assert v.name == k
41
+
42
+ def test_iter(self, float_frame):
43
+ assert list(float_frame) == list(float_frame.columns)
44
+
45
+ def test_iterrows(self, float_frame, float_string_frame):
46
+ for k, v in float_frame.iterrows():
47
+ exp = float_frame.loc[k]
48
+ tm.assert_series_equal(v, exp)
49
+
50
+ for k, v in float_string_frame.iterrows():
51
+ exp = float_string_frame.loc[k]
52
+ tm.assert_series_equal(v, exp)
53
+
54
+ def test_iterrows_iso8601(self):
55
+ # GH#19671
56
+ s = DataFrame(
57
+ {
58
+ "non_iso8601": ["M1701", "M1802", "M1903", "M2004"],
59
+ "iso8601": date_range("2000-01-01", periods=4, freq="ME"),
60
+ }
61
+ )
62
+ for k, v in s.iterrows():
63
+ exp = s.loc[k]
64
+ tm.assert_series_equal(v, exp)
65
+
66
+ def test_iterrows_corner(self):
67
+ # GH#12222
68
+ df = DataFrame(
69
+ {
70
+ "a": [datetime.datetime(2015, 1, 1)],
71
+ "b": [None],
72
+ "c": [None],
73
+ "d": [""],
74
+ "e": [[]],
75
+ "f": [set()],
76
+ "g": [{}],
77
+ }
78
+ )
79
+ expected = Series(
80
+ [datetime.datetime(2015, 1, 1), None, None, "", [], set(), {}],
81
+ index=list("abcdefg"),
82
+ name=0,
83
+ dtype="object",
84
+ )
85
+ _, result = next(df.iterrows())
86
+ tm.assert_series_equal(result, expected)
87
+
88
+ def test_itertuples(self, float_frame):
89
+ for i, tup in enumerate(float_frame.itertuples()):
90
+ ser = DataFrame._constructor_sliced(tup[1:])
91
+ ser.name = tup[0]
92
+ expected = float_frame.iloc[i, :].reset_index(drop=True)
93
+ tm.assert_series_equal(ser, expected)
94
+
95
+ def test_itertuples_index_false(self):
96
+ df = DataFrame(
97
+ {"floats": np.random.default_rng(2).standard_normal(5), "ints": range(5)},
98
+ columns=["floats", "ints"],
99
+ )
100
+
101
+ for tup in df.itertuples(index=False):
102
+ assert isinstance(tup[1], int)
103
+
104
+ def test_itertuples_duplicate_cols(self):
105
+ df = DataFrame(data={"a": [1, 2, 3], "b": [4, 5, 6]})
106
+ dfaa = df[["a", "a"]]
107
+
108
+ assert list(dfaa.itertuples()) == [(0, 1, 1), (1, 2, 2), (2, 3, 3)]
109
+
110
+ # repr with int on 32-bit/windows
111
+ if not (is_platform_windows() or not IS64):
112
+ assert (
113
+ repr(list(df.itertuples(name=None)))
114
+ == "[(0, 1, 4), (1, 2, 5), (2, 3, 6)]"
115
+ )
116
+
117
+ def test_itertuples_tuple_name(self):
118
+ df = DataFrame(data={"a": [1, 2, 3], "b": [4, 5, 6]})
119
+ tup = next(df.itertuples(name="TestName"))
120
+ assert tup._fields == ("Index", "a", "b")
121
+ assert (tup.Index, tup.a, tup.b) == tup
122
+ assert type(tup).__name__ == "TestName"
123
+
124
+ def test_itertuples_disallowed_col_labels(self):
125
+ df = DataFrame(data={"def": [1, 2, 3], "return": [4, 5, 6]})
126
+ tup2 = next(df.itertuples(name="TestName"))
127
+ assert tup2 == (0, 1, 4)
128
+ assert tup2._fields == ("Index", "_1", "_2")
129
+
130
+ @pytest.mark.parametrize("limit", [254, 255, 1024])
131
+ @pytest.mark.parametrize("index", [True, False])
132
+ def test_itertuples_py2_3_field_limit_namedtuple(self, limit, index):
133
+ # GH#28282
134
+ df = DataFrame([{f"foo_{i}": f"bar_{i}" for i in range(limit)}])
135
+ result = next(df.itertuples(index=index))
136
+ assert isinstance(result, tuple)
137
+ assert hasattr(result, "_fields")
138
+
139
+ def test_sequence_like_with_categorical(self):
140
+ # GH#7839
141
+ # make sure can iterate
142
+ df = DataFrame(
143
+ {"id": [1, 2, 3, 4, 5, 6], "raw_grade": ["a", "b", "b", "a", "a", "e"]}
144
+ )
145
+ df["grade"] = Categorical(df["raw_grade"])
146
+
147
+ # basic sequencing testing
148
+ result = list(df.grade.values)
149
+ expected = np.array(df.grade.values).tolist()
150
+ tm.assert_almost_equal(result, expected)
151
+
152
+ # iteration
153
+ for t in df.itertuples(index=False):
154
+ str(t)
155
+
156
+ for row, s in df.iterrows():
157
+ str(s)
158
+
159
+ for c, col in df.items():
160
+ str(col)
venv/lib/python3.10/site-packages/pandas/tests/frame/test_logical_ops.py ADDED
@@ -0,0 +1,218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import operator
2
+ import re
3
+
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas import (
8
+ CategoricalIndex,
9
+ DataFrame,
10
+ Interval,
11
+ Series,
12
+ isnull,
13
+ )
14
+ import pandas._testing as tm
15
+
16
+
17
+ class TestDataFrameLogicalOperators:
18
+ # &, |, ^
19
+
20
+ @pytest.mark.parametrize(
21
+ "left, right, op, expected",
22
+ [
23
+ (
24
+ [True, False, np.nan],
25
+ [True, False, True],
26
+ operator.and_,
27
+ [True, False, False],
28
+ ),
29
+ (
30
+ [True, False, True],
31
+ [True, False, np.nan],
32
+ operator.and_,
33
+ [True, False, False],
34
+ ),
35
+ (
36
+ [True, False, np.nan],
37
+ [True, False, True],
38
+ operator.or_,
39
+ [True, False, False],
40
+ ),
41
+ (
42
+ [True, False, True],
43
+ [True, False, np.nan],
44
+ operator.or_,
45
+ [True, False, True],
46
+ ),
47
+ ],
48
+ )
49
+ def test_logical_operators_nans(self, left, right, op, expected, frame_or_series):
50
+ # GH#13896
51
+ result = op(frame_or_series(left), frame_or_series(right))
52
+ expected = frame_or_series(expected)
53
+
54
+ tm.assert_equal(result, expected)
55
+
56
+ def test_logical_ops_empty_frame(self):
57
+ # GH#5808
58
+ # empty frames, non-mixed dtype
59
+ df = DataFrame(index=[1])
60
+
61
+ result = df & df
62
+ tm.assert_frame_equal(result, df)
63
+
64
+ result = df | df
65
+ tm.assert_frame_equal(result, df)
66
+
67
+ df2 = DataFrame(index=[1, 2])
68
+ result = df & df2
69
+ tm.assert_frame_equal(result, df2)
70
+
71
+ dfa = DataFrame(index=[1], columns=["A"])
72
+
73
+ result = dfa & dfa
74
+ expected = DataFrame(False, index=[1], columns=["A"])
75
+ tm.assert_frame_equal(result, expected)
76
+
77
+ def test_logical_ops_bool_frame(self):
78
+ # GH#5808
79
+ df1a_bool = DataFrame(True, index=[1], columns=["A"])
80
+
81
+ result = df1a_bool & df1a_bool
82
+ tm.assert_frame_equal(result, df1a_bool)
83
+
84
+ result = df1a_bool | df1a_bool
85
+ tm.assert_frame_equal(result, df1a_bool)
86
+
87
+ def test_logical_ops_int_frame(self):
88
+ # GH#5808
89
+ df1a_int = DataFrame(1, index=[1], columns=["A"])
90
+ df1a_bool = DataFrame(True, index=[1], columns=["A"])
91
+
92
+ result = df1a_int | df1a_bool
93
+ tm.assert_frame_equal(result, df1a_bool)
94
+
95
+ # Check that this matches Series behavior
96
+ res_ser = df1a_int["A"] | df1a_bool["A"]
97
+ tm.assert_series_equal(res_ser, df1a_bool["A"])
98
+
99
+ def test_logical_ops_invalid(self, using_infer_string):
100
+ # GH#5808
101
+
102
+ df1 = DataFrame(1.0, index=[1], columns=["A"])
103
+ df2 = DataFrame(True, index=[1], columns=["A"])
104
+ msg = re.escape("unsupported operand type(s) for |: 'float' and 'bool'")
105
+ with pytest.raises(TypeError, match=msg):
106
+ df1 | df2
107
+
108
+ df1 = DataFrame("foo", index=[1], columns=["A"])
109
+ df2 = DataFrame(True, index=[1], columns=["A"])
110
+ msg = re.escape("unsupported operand type(s) for |: 'str' and 'bool'")
111
+ if using_infer_string:
112
+ import pyarrow as pa
113
+
114
+ with pytest.raises(pa.lib.ArrowNotImplementedError, match="|has no kernel"):
115
+ df1 | df2
116
+ else:
117
+ with pytest.raises(TypeError, match=msg):
118
+ df1 | df2
119
+
120
+ def test_logical_operators(self):
121
+ def _check_bin_op(op):
122
+ result = op(df1, df2)
123
+ expected = DataFrame(
124
+ op(df1.values, df2.values), index=df1.index, columns=df1.columns
125
+ )
126
+ assert result.values.dtype == np.bool_
127
+ tm.assert_frame_equal(result, expected)
128
+
129
+ def _check_unary_op(op):
130
+ result = op(df1)
131
+ expected = DataFrame(op(df1.values), index=df1.index, columns=df1.columns)
132
+ assert result.values.dtype == np.bool_
133
+ tm.assert_frame_equal(result, expected)
134
+
135
+ df1 = {
136
+ "a": {"a": True, "b": False, "c": False, "d": True, "e": True},
137
+ "b": {"a": False, "b": True, "c": False, "d": False, "e": False},
138
+ "c": {"a": False, "b": False, "c": True, "d": False, "e": False},
139
+ "d": {"a": True, "b": False, "c": False, "d": True, "e": True},
140
+ "e": {"a": True, "b": False, "c": False, "d": True, "e": True},
141
+ }
142
+
143
+ df2 = {
144
+ "a": {"a": True, "b": False, "c": True, "d": False, "e": False},
145
+ "b": {"a": False, "b": True, "c": False, "d": False, "e": False},
146
+ "c": {"a": True, "b": False, "c": True, "d": False, "e": False},
147
+ "d": {"a": False, "b": False, "c": False, "d": True, "e": False},
148
+ "e": {"a": False, "b": False, "c": False, "d": False, "e": True},
149
+ }
150
+
151
+ df1 = DataFrame(df1)
152
+ df2 = DataFrame(df2)
153
+
154
+ _check_bin_op(operator.and_)
155
+ _check_bin_op(operator.or_)
156
+ _check_bin_op(operator.xor)
157
+
158
+ _check_unary_op(operator.inv) # TODO: belongs elsewhere
159
+
160
+ @pytest.mark.filterwarnings("ignore:Downcasting object dtype arrays:FutureWarning")
161
+ def test_logical_with_nas(self):
162
+ d = DataFrame({"a": [np.nan, False], "b": [True, True]})
163
+
164
+ # GH4947
165
+ # bool comparisons should return bool
166
+ result = d["a"] | d["b"]
167
+ expected = Series([False, True])
168
+ tm.assert_series_equal(result, expected)
169
+
170
+ # GH4604, automatic casting here
171
+ result = d["a"].fillna(False) | d["b"]
172
+ expected = Series([True, True])
173
+ tm.assert_series_equal(result, expected)
174
+
175
+ msg = "The 'downcast' keyword in fillna is deprecated"
176
+ with tm.assert_produces_warning(FutureWarning, match=msg):
177
+ result = d["a"].fillna(False, downcast=False) | d["b"]
178
+ expected = Series([True, True])
179
+ tm.assert_series_equal(result, expected)
180
+
181
+ def test_logical_ops_categorical_columns(self):
182
+ # GH#38367
183
+ intervals = [Interval(1, 2), Interval(3, 4)]
184
+ data = DataFrame(
185
+ [[1, np.nan], [2, np.nan]],
186
+ columns=CategoricalIndex(
187
+ intervals, categories=intervals + [Interval(5, 6)]
188
+ ),
189
+ )
190
+ mask = DataFrame(
191
+ [[False, False], [False, False]], columns=data.columns, dtype=bool
192
+ )
193
+ result = mask | isnull(data)
194
+ expected = DataFrame(
195
+ [[False, True], [False, True]],
196
+ columns=CategoricalIndex(
197
+ intervals, categories=intervals + [Interval(5, 6)]
198
+ ),
199
+ )
200
+ tm.assert_frame_equal(result, expected)
201
+
202
+ def test_int_dtype_different_index_not_bool(self):
203
+ # GH 52500
204
+ df1 = DataFrame([1, 2, 3], index=[10, 11, 23], columns=["a"])
205
+ df2 = DataFrame([10, 20, 30], index=[11, 10, 23], columns=["a"])
206
+ result = np.bitwise_xor(df1, df2)
207
+ expected = DataFrame([21, 8, 29], index=[10, 11, 23], columns=["a"])
208
+ tm.assert_frame_equal(result, expected)
209
+
210
+ result = df1 ^ df2
211
+ tm.assert_frame_equal(result, expected)
212
+
213
+ def test_different_dtypes_different_index_raises(self):
214
+ # GH 52538
215
+ df1 = DataFrame([1, 2], index=["a", "b"])
216
+ df2 = DataFrame([3, 4], index=["b", "c"])
217
+ with pytest.raises(TypeError, match="unsupported operand type"):
218
+ df1 & df2
venv/lib/python3.10/site-packages/pandas/tests/frame/test_nonunique_indexes.py ADDED
@@ -0,0 +1,337 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import (
6
+ DataFrame,
7
+ Series,
8
+ date_range,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ class TestDataFrameNonuniqueIndexes:
14
+ def test_setattr_columns_vs_construct_with_columns(self):
15
+ # assignment
16
+ # GH 3687
17
+ arr = np.random.default_rng(2).standard_normal((3, 2))
18
+ idx = list(range(2))
19
+ df = DataFrame(arr, columns=["A", "A"])
20
+ df.columns = idx
21
+ expected = DataFrame(arr, columns=idx)
22
+ tm.assert_frame_equal(df, expected)
23
+
24
+ def test_setattr_columns_vs_construct_with_columns_datetimeindx(self):
25
+ idx = date_range("20130101", periods=4, freq="QE-NOV")
26
+ df = DataFrame(
27
+ [[1, 1, 1, 5], [1, 1, 2, 5], [2, 1, 3, 5]], columns=["a", "a", "a", "a"]
28
+ )
29
+ df.columns = idx
30
+ expected = DataFrame([[1, 1, 1, 5], [1, 1, 2, 5], [2, 1, 3, 5]], columns=idx)
31
+ tm.assert_frame_equal(df, expected)
32
+
33
+ def test_insert_with_duplicate_columns(self):
34
+ # insert
35
+ df = DataFrame(
36
+ [[1, 1, 1, 5], [1, 1, 2, 5], [2, 1, 3, 5]],
37
+ columns=["foo", "bar", "foo", "hello"],
38
+ )
39
+ df["string"] = "bah"
40
+ expected = DataFrame(
41
+ [[1, 1, 1, 5, "bah"], [1, 1, 2, 5, "bah"], [2, 1, 3, 5, "bah"]],
42
+ columns=["foo", "bar", "foo", "hello", "string"],
43
+ )
44
+ tm.assert_frame_equal(df, expected)
45
+ with pytest.raises(ValueError, match="Length of value"):
46
+ df.insert(0, "AnotherColumn", range(len(df.index) - 1))
47
+
48
+ # insert same dtype
49
+ df["foo2"] = 3
50
+ expected = DataFrame(
51
+ [[1, 1, 1, 5, "bah", 3], [1, 1, 2, 5, "bah", 3], [2, 1, 3, 5, "bah", 3]],
52
+ columns=["foo", "bar", "foo", "hello", "string", "foo2"],
53
+ )
54
+ tm.assert_frame_equal(df, expected)
55
+
56
+ # set (non-dup)
57
+ df["foo2"] = 4
58
+ expected = DataFrame(
59
+ [[1, 1, 1, 5, "bah", 4], [1, 1, 2, 5, "bah", 4], [2, 1, 3, 5, "bah", 4]],
60
+ columns=["foo", "bar", "foo", "hello", "string", "foo2"],
61
+ )
62
+ tm.assert_frame_equal(df, expected)
63
+ df["foo2"] = 3
64
+
65
+ # delete (non dup)
66
+ del df["bar"]
67
+ expected = DataFrame(
68
+ [[1, 1, 5, "bah", 3], [1, 2, 5, "bah", 3], [2, 3, 5, "bah", 3]],
69
+ columns=["foo", "foo", "hello", "string", "foo2"],
70
+ )
71
+ tm.assert_frame_equal(df, expected)
72
+
73
+ # try to delete again (its not consolidated)
74
+ del df["hello"]
75
+ expected = DataFrame(
76
+ [[1, 1, "bah", 3], [1, 2, "bah", 3], [2, 3, "bah", 3]],
77
+ columns=["foo", "foo", "string", "foo2"],
78
+ )
79
+ tm.assert_frame_equal(df, expected)
80
+
81
+ # consolidate
82
+ df = df._consolidate()
83
+ expected = DataFrame(
84
+ [[1, 1, "bah", 3], [1, 2, "bah", 3], [2, 3, "bah", 3]],
85
+ columns=["foo", "foo", "string", "foo2"],
86
+ )
87
+ tm.assert_frame_equal(df, expected)
88
+
89
+ # insert
90
+ df.insert(2, "new_col", 5.0)
91
+ expected = DataFrame(
92
+ [[1, 1, 5.0, "bah", 3], [1, 2, 5.0, "bah", 3], [2, 3, 5.0, "bah", 3]],
93
+ columns=["foo", "foo", "new_col", "string", "foo2"],
94
+ )
95
+ tm.assert_frame_equal(df, expected)
96
+
97
+ # insert a dup
98
+ with pytest.raises(ValueError, match="cannot insert"):
99
+ df.insert(2, "new_col", 4.0)
100
+
101
+ df.insert(2, "new_col", 4.0, allow_duplicates=True)
102
+ expected = DataFrame(
103
+ [
104
+ [1, 1, 4.0, 5.0, "bah", 3],
105
+ [1, 2, 4.0, 5.0, "bah", 3],
106
+ [2, 3, 4.0, 5.0, "bah", 3],
107
+ ],
108
+ columns=["foo", "foo", "new_col", "new_col", "string", "foo2"],
109
+ )
110
+ tm.assert_frame_equal(df, expected)
111
+
112
+ # delete (dup)
113
+ del df["foo"]
114
+ expected = DataFrame(
115
+ [[4.0, 5.0, "bah", 3], [4.0, 5.0, "bah", 3], [4.0, 5.0, "bah", 3]],
116
+ columns=["new_col", "new_col", "string", "foo2"],
117
+ )
118
+ tm.assert_frame_equal(df, expected)
119
+
120
+ def test_dup_across_dtypes(self):
121
+ # dup across dtypes
122
+ df = DataFrame(
123
+ [[1, 1, 1.0, 5], [1, 1, 2.0, 5], [2, 1, 3.0, 5]],
124
+ columns=["foo", "bar", "foo", "hello"],
125
+ )
126
+
127
+ df["foo2"] = 7.0
128
+ expected = DataFrame(
129
+ [[1, 1, 1.0, 5, 7.0], [1, 1, 2.0, 5, 7.0], [2, 1, 3.0, 5, 7.0]],
130
+ columns=["foo", "bar", "foo", "hello", "foo2"],
131
+ )
132
+ tm.assert_frame_equal(df, expected)
133
+
134
+ result = df["foo"]
135
+ expected = DataFrame([[1, 1.0], [1, 2.0], [2, 3.0]], columns=["foo", "foo"])
136
+ tm.assert_frame_equal(result, expected)
137
+
138
+ # multiple replacements
139
+ df["foo"] = "string"
140
+ expected = DataFrame(
141
+ [
142
+ ["string", 1, "string", 5, 7.0],
143
+ ["string", 1, "string", 5, 7.0],
144
+ ["string", 1, "string", 5, 7.0],
145
+ ],
146
+ columns=["foo", "bar", "foo", "hello", "foo2"],
147
+ )
148
+ tm.assert_frame_equal(df, expected)
149
+
150
+ del df["foo"]
151
+ expected = DataFrame(
152
+ [[1, 5, 7.0], [1, 5, 7.0], [1, 5, 7.0]], columns=["bar", "hello", "foo2"]
153
+ )
154
+ tm.assert_frame_equal(df, expected)
155
+
156
+ def test_column_dups_indexes(self):
157
+ # check column dups with index equal and not equal to df's index
158
+ df = DataFrame(
159
+ np.random.default_rng(2).standard_normal((5, 3)),
160
+ index=["a", "b", "c", "d", "e"],
161
+ columns=["A", "B", "A"],
162
+ )
163
+ for index in [df.index, pd.Index(list("edcba"))]:
164
+ this_df = df.copy()
165
+ expected_ser = Series(index.values, index=this_df.index)
166
+ expected_df = DataFrame(
167
+ {"A": expected_ser, "B": this_df["B"]},
168
+ columns=["A", "B", "A"],
169
+ )
170
+ this_df["A"] = index
171
+ tm.assert_frame_equal(this_df, expected_df)
172
+
173
+ def test_changing_dtypes_with_duplicate_columns(self):
174
+ # multiple assignments that change dtypes
175
+ # the location indexer is a slice
176
+ # GH 6120
177
+ df = DataFrame(
178
+ np.random.default_rng(2).standard_normal((5, 2)), columns=["that", "that"]
179
+ )
180
+ expected = DataFrame(1.0, index=range(5), columns=["that", "that"])
181
+
182
+ df["that"] = 1.0
183
+ tm.assert_frame_equal(df, expected)
184
+
185
+ df = DataFrame(
186
+ np.random.default_rng(2).random((5, 2)), columns=["that", "that"]
187
+ )
188
+ expected = DataFrame(1, index=range(5), columns=["that", "that"])
189
+
190
+ df["that"] = 1
191
+ tm.assert_frame_equal(df, expected)
192
+
193
+ def test_dup_columns_comparisons(self):
194
+ # equality
195
+ df1 = DataFrame([[1, 2], [2, np.nan], [3, 4], [4, 4]], columns=["A", "B"])
196
+ df2 = DataFrame([[0, 1], [2, 4], [2, np.nan], [4, 5]], columns=["A", "A"])
197
+
198
+ # not-comparing like-labelled
199
+ msg = (
200
+ r"Can only compare identically-labeled \(both index and columns\) "
201
+ "DataFrame objects"
202
+ )
203
+ with pytest.raises(ValueError, match=msg):
204
+ df1 == df2
205
+
206
+ df1r = df1.reindex_like(df2)
207
+ result = df1r == df2
208
+ expected = DataFrame(
209
+ [[False, True], [True, False], [False, False], [True, False]],
210
+ columns=["A", "A"],
211
+ )
212
+ tm.assert_frame_equal(result, expected)
213
+
214
+ def test_mixed_column_selection(self):
215
+ # mixed column selection
216
+ # GH 5639
217
+ dfbool = DataFrame(
218
+ {
219
+ "one": Series([True, True, False], index=["a", "b", "c"]),
220
+ "two": Series([False, False, True, False], index=["a", "b", "c", "d"]),
221
+ "three": Series([False, True, True, True], index=["a", "b", "c", "d"]),
222
+ }
223
+ )
224
+ expected = pd.concat([dfbool["one"], dfbool["three"], dfbool["one"]], axis=1)
225
+ result = dfbool[["one", "three", "one"]]
226
+ tm.assert_frame_equal(result, expected)
227
+
228
+ def test_multi_axis_dups(self):
229
+ # multi-axis dups
230
+ # GH 6121
231
+ df = DataFrame(
232
+ np.arange(25.0).reshape(5, 5),
233
+ index=["a", "b", "c", "d", "e"],
234
+ columns=["A", "B", "C", "D", "E"],
235
+ )
236
+ z = df[["A", "C", "A"]].copy()
237
+ expected = z.loc[["a", "c", "a"]]
238
+
239
+ df = DataFrame(
240
+ np.arange(25.0).reshape(5, 5),
241
+ index=["a", "b", "c", "d", "e"],
242
+ columns=["A", "B", "C", "D", "E"],
243
+ )
244
+ z = df[["A", "C", "A"]]
245
+ result = z.loc[["a", "c", "a"]]
246
+ tm.assert_frame_equal(result, expected)
247
+
248
+ def test_columns_with_dups(self):
249
+ # GH 3468 related
250
+
251
+ # basic
252
+ df = DataFrame([[1, 2]], columns=["a", "a"])
253
+ df.columns = ["a", "a.1"]
254
+ expected = DataFrame([[1, 2]], columns=["a", "a.1"])
255
+ tm.assert_frame_equal(df, expected)
256
+
257
+ df = DataFrame([[1, 2, 3]], columns=["b", "a", "a"])
258
+ df.columns = ["b", "a", "a.1"]
259
+ expected = DataFrame([[1, 2, 3]], columns=["b", "a", "a.1"])
260
+ tm.assert_frame_equal(df, expected)
261
+
262
+ def test_columns_with_dup_index(self):
263
+ # with a dup index
264
+ df = DataFrame([[1, 2]], columns=["a", "a"])
265
+ df.columns = ["b", "b"]
266
+ expected = DataFrame([[1, 2]], columns=["b", "b"])
267
+ tm.assert_frame_equal(df, expected)
268
+
269
+ def test_multi_dtype(self):
270
+ # multi-dtype
271
+ df = DataFrame(
272
+ [[1, 2, 1.0, 2.0, 3.0, "foo", "bar"]],
273
+ columns=["a", "a", "b", "b", "d", "c", "c"],
274
+ )
275
+ df.columns = list("ABCDEFG")
276
+ expected = DataFrame(
277
+ [[1, 2, 1.0, 2.0, 3.0, "foo", "bar"]], columns=list("ABCDEFG")
278
+ )
279
+ tm.assert_frame_equal(df, expected)
280
+
281
+ def test_multi_dtype2(self):
282
+ df = DataFrame([[1, 2, "foo", "bar"]], columns=["a", "a", "a", "a"])
283
+ df.columns = ["a", "a.1", "a.2", "a.3"]
284
+ expected = DataFrame([[1, 2, "foo", "bar"]], columns=["a", "a.1", "a.2", "a.3"])
285
+ tm.assert_frame_equal(df, expected)
286
+
287
+ def test_dups_across_blocks(self, using_array_manager):
288
+ # dups across blocks
289
+ df_float = DataFrame(
290
+ np.random.default_rng(2).standard_normal((10, 3)), dtype="float64"
291
+ )
292
+ df_int = DataFrame(
293
+ np.random.default_rng(2).standard_normal((10, 3)).astype("int64")
294
+ )
295
+ df_bool = DataFrame(True, index=df_float.index, columns=df_float.columns)
296
+ df_object = DataFrame("foo", index=df_float.index, columns=df_float.columns)
297
+ df_dt = DataFrame(
298
+ pd.Timestamp("20010101"), index=df_float.index, columns=df_float.columns
299
+ )
300
+ df = pd.concat([df_float, df_int, df_bool, df_object, df_dt], axis=1)
301
+
302
+ if not using_array_manager:
303
+ assert len(df._mgr.blknos) == len(df.columns)
304
+ assert len(df._mgr.blklocs) == len(df.columns)
305
+
306
+ # testing iloc
307
+ for i in range(len(df.columns)):
308
+ df.iloc[:, i]
309
+
310
+ def test_dup_columns_across_dtype(self):
311
+ # dup columns across dtype GH 2079/2194
312
+ vals = [[1, -1, 2.0], [2, -2, 3.0]]
313
+ rs = DataFrame(vals, columns=["A", "A", "B"])
314
+ xp = DataFrame(vals)
315
+ xp.columns = ["A", "A", "B"]
316
+ tm.assert_frame_equal(rs, xp)
317
+
318
+ def test_set_value_by_index(self):
319
+ # See gh-12344
320
+ warn = None
321
+ msg = "will attempt to set the values inplace"
322
+
323
+ df = DataFrame(np.arange(9).reshape(3, 3).T)
324
+ df.columns = list("AAA")
325
+ expected = df.iloc[:, 2].copy()
326
+
327
+ with tm.assert_produces_warning(warn, match=msg):
328
+ df.iloc[:, 0] = 3
329
+ tm.assert_series_equal(df.iloc[:, 2], expected)
330
+
331
+ df = DataFrame(np.arange(9).reshape(3, 3).T)
332
+ df.columns = [2, float(2), str(2)]
333
+ expected = df.iloc[:, 1].copy()
334
+
335
+ with tm.assert_produces_warning(warn, match=msg):
336
+ df.iloc[:, 0] = 3
337
+ tm.assert_series_equal(df.iloc[:, 1], expected)
venv/lib/python3.10/site-packages/pandas/tests/frame/test_npfuncs.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tests for np.foo applied to DataFrame, not necessarily ufuncs.
3
+ """
4
+ import numpy as np
5
+
6
+ from pandas import (
7
+ Categorical,
8
+ DataFrame,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ class TestAsArray:
14
+ def test_asarray_homogeneous(self):
15
+ df = DataFrame({"A": Categorical([1, 2]), "B": Categorical([1, 2])})
16
+ result = np.asarray(df)
17
+ # may change from object in the future
18
+ expected = np.array([[1, 1], [2, 2]], dtype="object")
19
+ tm.assert_numpy_array_equal(result, expected)
20
+
21
+ def test_np_sqrt(self, float_frame):
22
+ with np.errstate(all="ignore"):
23
+ result = np.sqrt(float_frame)
24
+ assert isinstance(result, type(float_frame))
25
+ assert result.index.is_(float_frame.index)
26
+ assert result.columns.is_(float_frame.columns)
27
+
28
+ tm.assert_frame_equal(result, float_frame.apply(np.sqrt))
29
+
30
+ def test_sum_deprecated_axis_behavior(self):
31
+ # GH#52042 deprecated behavior of df.sum(axis=None), which gets
32
+ # called when we do np.sum(df)
33
+
34
+ arr = np.random.default_rng(2).standard_normal((4, 3))
35
+ df = DataFrame(arr)
36
+
37
+ msg = "The behavior of DataFrame.sum with axis=None is deprecated"
38
+ with tm.assert_produces_warning(
39
+ FutureWarning, match=msg, check_stacklevel=False
40
+ ):
41
+ res = np.sum(df)
42
+
43
+ with tm.assert_produces_warning(FutureWarning, match=msg):
44
+ expected = df.sum(axis=None)
45
+ tm.assert_series_equal(res, expected)
46
+
47
+ def test_np_ravel(self):
48
+ # GH26247
49
+ arr = np.array(
50
+ [
51
+ [0.11197053, 0.44361564, -0.92589452],
52
+ [0.05883648, -0.00948922, -0.26469934],
53
+ ]
54
+ )
55
+
56
+ result = np.ravel([DataFrame(batch.reshape(1, 3)) for batch in arr])
57
+ expected = np.array(
58
+ [
59
+ 0.11197053,
60
+ 0.44361564,
61
+ -0.92589452,
62
+ 0.05883648,
63
+ -0.00948922,
64
+ -0.26469934,
65
+ ]
66
+ )
67
+ tm.assert_numpy_array_equal(result, expected)
68
+
69
+ result = np.ravel(DataFrame(arr[0].reshape(1, 3), columns=["x1", "x2", "x3"]))
70
+ expected = np.array([0.11197053, 0.44361564, -0.92589452])
71
+ tm.assert_numpy_array_equal(result, expected)
72
+
73
+ result = np.ravel(
74
+ [
75
+ DataFrame(batch.reshape(1, 3), columns=["x1", "x2", "x3"])
76
+ for batch in arr
77
+ ]
78
+ )
79
+ expected = np.array(
80
+ [
81
+ 0.11197053,
82
+ 0.44361564,
83
+ -0.92589452,
84
+ 0.05883648,
85
+ -0.00948922,
86
+ -0.26469934,
87
+ ]
88
+ )
89
+ tm.assert_numpy_array_equal(result, expected)