Shamik commited on
Commit
f8f8d64
·
unverified ·
1 Parent(s): 382cb9b

feat: adding requirements

Browse files
Files changed (1) hide show
  1. requirements.txt +538 -0
requirements.txt ADDED
@@ -0,0 +1,538 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file was autogenerated by uv via the following command:
2
+ # uv pip compile pyproject.toml -o requirements.txt
3
+ accelerate==1.7.0
4
+ # via
5
+ # flagembedding
6
+ # peft
7
+ # transformers
8
+ aiohappyeyeballs==2.6.1
9
+ # via aiohttp
10
+ aiohttp==3.12.11
11
+ # via
12
+ # fsspec
13
+ # huggingface-hub
14
+ # llama-index-core
15
+ aiosignal==1.3.2
16
+ # via aiohttp
17
+ aiosqlite==0.21.0
18
+ # via llama-index-core
19
+ annotated-types==0.7.0
20
+ # via pydantic
21
+ anyio==4.9.0
22
+ # via
23
+ # httpx
24
+ # openai
25
+ arxiv==2.2.0
26
+ # via agent-hackathon (pyproject.toml)
27
+ async-timeout==5.0.1
28
+ # via aiohttp
29
+ attrs==25.3.0
30
+ # via aiohttp
31
+ banks==2.1.2
32
+ # via llama-index-core
33
+ beautifulsoup4==4.13.4
34
+ # via
35
+ # ir-datasets
36
+ # llama-index-readers-file
37
+ cbor==1.0.0
38
+ # via trec-car-tools
39
+ certifi==2025.4.26
40
+ # via
41
+ # httpcore
42
+ # httpx
43
+ # llama-cloud
44
+ # requests
45
+ charset-normalizer==3.4.2
46
+ # via requests
47
+ click==8.2.1
48
+ # via
49
+ # llama-cloud-services
50
+ # nltk
51
+ colorama==0.4.6
52
+ # via griffe
53
+ dataclasses-json==0.6.7
54
+ # via llama-index-core
55
+ datasets==3.6.0
56
+ # via flagembedding
57
+ deprecated==1.2.18
58
+ # via
59
+ # banks
60
+ # llama-index-core
61
+ dill==0.3.8
62
+ # via
63
+ # datasets
64
+ # multiprocess
65
+ dirtyjson==1.0.8
66
+ # via llama-index-core
67
+ distro==1.9.0
68
+ # via openai
69
+ exceptiongroup==1.3.0
70
+ # via anyio
71
+ feedparser==6.0.11
72
+ # via arxiv
73
+ filelock==3.18.0
74
+ # via
75
+ # datasets
76
+ # huggingface-hub
77
+ # torch
78
+ # transformers
79
+ filetype==1.2.0
80
+ # via llama-index-core
81
+ flagembedding==1.3.5
82
+ # via agent-hackathon (pyproject.toml)
83
+ frozenlist==1.6.2
84
+ # via
85
+ # aiohttp
86
+ # aiosignal
87
+ fsspec==2025.3.0
88
+ # via
89
+ # datasets
90
+ # huggingface-hub
91
+ # llama-index-core
92
+ # torch
93
+ greenlet==3.2.3
94
+ # via sqlalchemy
95
+ griffe==1.7.3
96
+ # via banks
97
+ grpcio==1.67.1
98
+ # via pymilvus
99
+ h11==0.16.0
100
+ # via httpcore
101
+ hf-xet==1.1.3
102
+ # via huggingface-hub
103
+ html2text==2025.4.15
104
+ # via llama-hub
105
+ httpcore==1.0.9
106
+ # via httpx
107
+ httpx==0.28.1
108
+ # via
109
+ # agent-hackathon (pyproject.toml)
110
+ # llama-cloud
111
+ # llama-index-core
112
+ # openai
113
+ huggingface-hub==0.32.4
114
+ # via
115
+ # agent-hackathon (pyproject.toml)
116
+ # accelerate
117
+ # datasets
118
+ # llama-index-embeddings-huggingface
119
+ # llama-index-embeddings-huggingface-api
120
+ # llama-index-llms-huggingface-api
121
+ # llama-index-utils-huggingface
122
+ # peft
123
+ # sentence-transformers
124
+ # smolagents
125
+ # tokenizers
126
+ # transformers
127
+ idna==3.10
128
+ # via
129
+ # anyio
130
+ # httpx
131
+ # requests
132
+ # yarl
133
+ ijson==3.4.0
134
+ # via ir-datasets
135
+ inscriptis==2.6.0
136
+ # via ir-datasets
137
+ ir-datasets==0.5.10
138
+ # via flagembedding
139
+ jinja2==3.1.6
140
+ # via
141
+ # banks
142
+ # smolagents
143
+ # torch
144
+ jiter==0.10.0
145
+ # via openai
146
+ joblib==1.5.1
147
+ # via
148
+ # nltk
149
+ # scikit-learn
150
+ llama-cloud==0.1.23
151
+ # via
152
+ # llama-cloud-services
153
+ # llama-index-indices-managed-llama-cloud
154
+ llama-cloud-services==0.6.30
155
+ # via llama-parse
156
+ llama-hub==0.0.79.post1
157
+ # via agent-hackathon (pyproject.toml)
158
+ llama-index==0.12.41
159
+ # via llama-hub
160
+ llama-index-agent-openai==0.4.9
161
+ # via
162
+ # llama-index
163
+ # llama-index-program-openai
164
+ llama-index-cli==0.4.3
165
+ # via llama-index
166
+ llama-index-core==0.12.41
167
+ # via
168
+ # llama-cloud-services
169
+ # llama-index
170
+ # llama-index-agent-openai
171
+ # llama-index-cli
172
+ # llama-index-embeddings-huggingface
173
+ # llama-index-embeddings-huggingface-api
174
+ # llama-index-embeddings-openai
175
+ # llama-index-indices-managed-llama-cloud
176
+ # llama-index-llms-huggingface
177
+ # llama-index-llms-huggingface-api
178
+ # llama-index-llms-openai
179
+ # llama-index-multi-modal-llms-openai
180
+ # llama-index-program-openai
181
+ # llama-index-question-gen-openai
182
+ # llama-index-readers-file
183
+ # llama-index-readers-llama-parse
184
+ # llama-index-utils-huggingface
185
+ # llama-index-vector-stores-milvus
186
+ llama-index-embeddings-huggingface==0.5.4
187
+ # via agent-hackathon (pyproject.toml)
188
+ llama-index-embeddings-huggingface-api==0.3.1
189
+ # via agent-hackathon (pyproject.toml)
190
+ llama-index-embeddings-openai==0.3.1
191
+ # via
192
+ # llama-index
193
+ # llama-index-cli
194
+ llama-index-indices-managed-llama-cloud==0.7.4
195
+ # via llama-index
196
+ llama-index-llms-huggingface==0.5.0
197
+ # via agent-hackathon (pyproject.toml)
198
+ llama-index-llms-huggingface-api==0.5.0
199
+ # via agent-hackathon (pyproject.toml)
200
+ llama-index-llms-openai==0.4.4
201
+ # via
202
+ # llama-index
203
+ # llama-index-agent-openai
204
+ # llama-index-cli
205
+ # llama-index-multi-modal-llms-openai
206
+ # llama-index-program-openai
207
+ # llama-index-question-gen-openai
208
+ llama-index-multi-modal-llms-openai==0.5.1
209
+ # via llama-index
210
+ llama-index-program-openai==0.3.2
211
+ # via
212
+ # llama-index
213
+ # llama-index-question-gen-openai
214
+ llama-index-question-gen-openai==0.3.1
215
+ # via llama-index
216
+ llama-index-readers-file==0.4.8
217
+ # via llama-index
218
+ llama-index-readers-llama-parse==0.4.0
219
+ # via llama-index
220
+ llama-index-utils-huggingface==0.3.0
221
+ # via llama-index-embeddings-huggingface-api
222
+ llama-index-vector-stores-milvus==0.8.4
223
+ # via agent-hackathon (pyproject.toml)
224
+ llama-parse==0.6.30
225
+ # via llama-index-readers-llama-parse
226
+ lxml==5.4.0
227
+ # via
228
+ # inscriptis
229
+ # ir-datasets
230
+ lz4==4.4.4
231
+ # via ir-datasets
232
+ markdown-it-py==3.0.0
233
+ # via rich
234
+ markupsafe==3.0.2
235
+ # via jinja2
236
+ marshmallow==3.26.1
237
+ # via dataclasses-json
238
+ mdurl==0.1.2
239
+ # via markdown-it-py
240
+ milvus-lite==2.4.12
241
+ # via pymilvus
242
+ mpmath==1.3.0
243
+ # via sympy
244
+ multidict==6.4.4
245
+ # via
246
+ # aiohttp
247
+ # yarl
248
+ multiprocess==0.70.16
249
+ # via datasets
250
+ mypy-extensions==1.1.0
251
+ # via typing-inspect
252
+ nest-asyncio==1.6.0
253
+ # via llama-index-core
254
+ networkx==3.4.2
255
+ # via
256
+ # llama-index-core
257
+ # torch
258
+ nltk==3.9.1
259
+ # via
260
+ # llama-index
261
+ # llama-index-core
262
+ numpy==2.2.6
263
+ # via
264
+ # accelerate
265
+ # datasets
266
+ # ir-datasets
267
+ # llama-index-core
268
+ # pandas
269
+ # peft
270
+ # scikit-learn
271
+ # scipy
272
+ # transformers
273
+ # trec-car-tools
274
+ nvidia-cublas-cu12==12.4.5.8
275
+ # via
276
+ # nvidia-cudnn-cu12
277
+ # nvidia-cusolver-cu12
278
+ # torch
279
+ nvidia-cuda-cupti-cu12==12.4.127
280
+ # via torch
281
+ nvidia-cuda-nvrtc-cu12==12.4.127
282
+ # via torch
283
+ nvidia-cuda-runtime-cu12==12.4.127
284
+ # via torch
285
+ nvidia-cudnn-cu12==9.1.0.70
286
+ # via torch
287
+ nvidia-cufft-cu12==11.2.1.3
288
+ # via torch
289
+ nvidia-curand-cu12==10.3.5.147
290
+ # via torch
291
+ nvidia-cusolver-cu12==11.6.1.9
292
+ # via torch
293
+ nvidia-cusparse-cu12==12.3.1.170
294
+ # via
295
+ # nvidia-cusolver-cu12
296
+ # torch
297
+ nvidia-cusparselt-cu12==0.6.2
298
+ # via torch
299
+ nvidia-nccl-cu12==2.21.5
300
+ # via torch
301
+ nvidia-nvjitlink-cu12==12.4.127
302
+ # via
303
+ # nvidia-cusolver-cu12
304
+ # nvidia-cusparse-cu12
305
+ # torch
306
+ nvidia-nvtx-cu12==12.4.127
307
+ # via torch
308
+ openai==1.84.0
309
+ # via
310
+ # agent-hackathon (pyproject.toml)
311
+ # llama-index-agent-openai
312
+ # llama-index-embeddings-openai
313
+ # llama-index-llms-openai
314
+ packaging==25.0
315
+ # via
316
+ # accelerate
317
+ # datasets
318
+ # huggingface-hub
319
+ # marshmallow
320
+ # peft
321
+ # transformers
322
+ pandas==2.3.0
323
+ # via
324
+ # datasets
325
+ # llama-index-readers-file
326
+ # pymilvus
327
+ peft==0.15.2
328
+ # via flagembedding
329
+ pillow==11.2.1
330
+ # via
331
+ # llama-index-core
332
+ # sentence-transformers
333
+ # smolagents
334
+ platformdirs==4.3.8
335
+ # via
336
+ # banks
337
+ # llama-cloud-services
338
+ propcache==0.3.1
339
+ # via
340
+ # aiohttp
341
+ # yarl
342
+ protobuf==6.31.1
343
+ # via
344
+ # flagembedding
345
+ # pymilvus
346
+ psutil==7.0.0
347
+ # via
348
+ # accelerate
349
+ # llama-hub
350
+ # peft
351
+ pyaml==23.12.0
352
+ # via llama-hub
353
+ pyarrow==20.0.0
354
+ # via
355
+ # datasets
356
+ # ir-datasets
357
+ pydantic==2.11.5
358
+ # via
359
+ # banks
360
+ # llama-cloud
361
+ # llama-cloud-services
362
+ # llama-index-core
363
+ # openai
364
+ pydantic-core==2.33.2
365
+ # via pydantic
366
+ pygments==2.19.1
367
+ # via rich
368
+ pymilvus==2.5.10
369
+ # via llama-index-vector-stores-milvus
370
+ pypdf==5.6.0
371
+ # via llama-index-readers-file
372
+ pyprojroot==0.3.0
373
+ # via agent-hackathon (pyproject.toml)
374
+ python-dateutil==2.9.0.post0
375
+ # via pandas
376
+ python-dotenv==1.1.0
377
+ # via
378
+ # agent-hackathon (pyproject.toml)
379
+ # llama-cloud-services
380
+ # pymilvus
381
+ # smolagents
382
+ pytz==2025.2
383
+ # via pandas
384
+ pyyaml==6.0.2
385
+ # via
386
+ # accelerate
387
+ # datasets
388
+ # huggingface-hub
389
+ # ir-datasets
390
+ # llama-index-core
391
+ # peft
392
+ # pyaml
393
+ # transformers
394
+ regex==2024.11.6
395
+ # via
396
+ # nltk
397
+ # tiktoken
398
+ # transformers
399
+ requests==2.32.3
400
+ # via
401
+ # arxiv
402
+ # datasets
403
+ # huggingface-hub
404
+ # inscriptis
405
+ # ir-datasets
406
+ # llama-index-core
407
+ # smolagents
408
+ # tiktoken
409
+ # transformers
410
+ retrying==1.3.4
411
+ # via llama-hub
412
+ rich==14.0.0
413
+ # via smolagents
414
+ safetensors==0.5.3
415
+ # via
416
+ # accelerate
417
+ # peft
418
+ # transformers
419
+ scikit-learn==1.7.0
420
+ # via sentence-transformers
421
+ scipy==1.15.3
422
+ # via
423
+ # scikit-learn
424
+ # sentence-transformers
425
+ sentence-transformers==4.1.0
426
+ # via
427
+ # flagembedding
428
+ # llama-index-embeddings-huggingface
429
+ sentencepiece==0.2.0
430
+ # via flagembedding
431
+ setuptools==80.9.0
432
+ # via pymilvus
433
+ sgmllib3k==1.0.0
434
+ # via feedparser
435
+ six==1.17.0
436
+ # via
437
+ # python-dateutil
438
+ # retrying
439
+ smolagents==1.17.0
440
+ # via agent-hackathon (pyproject.toml)
441
+ sniffio==1.3.1
442
+ # via
443
+ # anyio
444
+ # openai
445
+ soupsieve==2.7
446
+ # via beautifulsoup4
447
+ sqlalchemy==2.0.41
448
+ # via llama-index-core
449
+ striprtf==0.0.26
450
+ # via llama-index-readers-file
451
+ sympy==1.13.1
452
+ # via torch
453
+ tenacity==9.1.2
454
+ # via llama-index-core
455
+ threadpoolctl==3.6.0
456
+ # via scikit-learn
457
+ tiktoken==0.9.0
458
+ # via llama-index-core
459
+ tokenizers==0.21.1
460
+ # via transformers
461
+ torch==2.6.0
462
+ # via
463
+ # accelerate
464
+ # flagembedding
465
+ # llama-index-llms-huggingface
466
+ # peft
467
+ # sentence-transformers
468
+ # transformers
469
+ tqdm==4.67.1
470
+ # via
471
+ # datasets
472
+ # huggingface-hub
473
+ # ir-datasets
474
+ # llama-index-core
475
+ # milvus-lite
476
+ # nltk
477
+ # openai
478
+ # peft
479
+ # sentence-transformers
480
+ # transformers
481
+ transformers==4.52.4
482
+ # via
483
+ # flagembedding
484
+ # llama-index-llms-huggingface
485
+ # peft
486
+ # sentence-transformers
487
+ trec-car-tools==2.6
488
+ # via ir-datasets
489
+ triton==3.2.0
490
+ # via torch
491
+ typing-extensions==4.14.0
492
+ # via
493
+ # aiosqlite
494
+ # anyio
495
+ # beautifulsoup4
496
+ # exceptiongroup
497
+ # huggingface-hub
498
+ # llama-index-core
499
+ # multidict
500
+ # openai
501
+ # pydantic
502
+ # pydantic-core
503
+ # pypdf
504
+ # pyprojroot
505
+ # rich
506
+ # sentence-transformers
507
+ # sqlalchemy
508
+ # torch
509
+ # typing-inspect
510
+ # typing-inspection
511
+ typing-inspect==0.9.0
512
+ # via
513
+ # dataclasses-json
514
+ # llama-index-core
515
+ typing-inspection==0.4.1
516
+ # via pydantic
517
+ tzdata==2025.2
518
+ # via pandas
519
+ ujson==5.10.0
520
+ # via pymilvus
521
+ unlzw3==0.2.3
522
+ # via ir-datasets
523
+ urllib3==2.4.0
524
+ # via requests
525
+ warc3-wet==0.2.5
526
+ # via ir-datasets
527
+ warc3-wet-clueweb09==0.2.5
528
+ # via ir-datasets
529
+ wrapt==1.17.2
530
+ # via
531
+ # deprecated
532
+ # llama-index-core
533
+ xxhash==3.5.0
534
+ # via datasets
535
+ yarl==1.20.0
536
+ # via aiohttp
537
+ zlib-state==0.1.9
538
+ # via ir-datasets