File size: 4,574 Bytes
0c45d40
 
1e1cb2a
0c45d40
1e1cb2a
d16e515
 
 
 
1e1cb2a
0c45d40
 
 
1e1cb2a
0c45d40
 
 
1e1cb2a
0c45d40
1e1cb2a
0c45d40
1e1cb2a
0c45d40
 
 
 
1e1cb2a
0c45d40
1e1cb2a
 
d16e515
 
 
 
 
 
 
 
 
 
 
1e1cb2a
0c45d40
1e1cb2a
0c45d40
d16e515
 
1e1cb2a
 
 
0c45d40
 
 
 
d16e515
 
1e1cb2a
0c45d40
 
 
1e1cb2a
0c45d40
d16e515
 
0c45d40
 
 
 
 
 
1e1cb2a
b194753
1e1cb2a
a8bda03
1e1cb2a
b194753
1e1cb2a
0c45d40
1e1cb2a
 
 
0c45d40
a8bda03
d16e515
 
 
0c45d40
 
 
d16e515
 
1e1cb2a
b194753
1e1cb2a
0c45d40
 
 
 
 
1e1cb2a
0c45d40
 
 
1e1cb2a
0c45d40
 
 
b194753
0c45d40
 
 
d16e515
 
 
 
1e1cb2a
0c45d40
1e1cb2a
0c45d40
d16e515
 
 
 
 
1e1cb2a
0c45d40
 
 
d16e515
 
 
 
 
 
1e1cb2a
0c45d40
d16e515
 
0c45d40
d16e515
 
0c45d40
1e1cb2a
d16e515
 
 
 
1e1cb2a
0c45d40
1e1cb2a
0c45d40
1e1cb2a
0c45d40
1e1cb2a
 
d16e515
 
 
 
 
 
 
1e1cb2a
0c45d40
1e1cb2a
0c45d40
1e1cb2a
0c45d40
 
1e1cb2a
0c45d40
1e1cb2a
 
 
0c45d40
 
d16e515
0c45d40
 
1e1cb2a
0c45d40
 
 
d16e515
 
0c45d40
 
1e1cb2a
0c45d40
1e1cb2a
0c45d40
1e1cb2a
0c45d40
 
 
1e1cb2a
0c45d40
1e1cb2a
 
 
 
0c45d40
1e1cb2a
52369d8
0c45d40
1e1cb2a
0c45d40
1e1cb2a
d16e515
 
 
0c45d40
1e1cb2a
0c45d40
 
1e1cb2a
 
 
d16e515
 
 
1e1cb2a
0c45d40
 
d16e515
 
0c45d40
 
 
1e1cb2a
d16e515
 
 
0c45d40
 
1e1cb2a
0c45d40
 
 
 
 
1e1cb2a
 
 
 
 
 
d16e515
 
 
 
1e1cb2a
0c45d40
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
# This file was autogenerated by uv via the following command:
#    uv pip compile requirements.in -o requirements.txt
aiohappyeyeballs==2.4.6
    # via aiohttp
aiohttp==3.11.12
    # via
    #   -r requirements.in
    #   datasets
    #   fsspec
aiosignal==1.3.2
    # via aiohttp
annotated-types==0.7.0
    # via pydantic
anyio==4.8.0
    # via
    #   httpx
    #   starlette
attrs==25.1.0
    # via aiohttp
cashews==7.4.0
    # via -r requirements.in
certifi==2025.1.31
    # via
    #   httpcore
    #   httpx
    #   requests
charset-normalizer==3.4.1
    # via requests
click==8.1.8
    # via uvicorn
coloredlogs==15.0.1
    # via onnxruntime
datasets==2.14.4
    # via
    #   evaluate
    #   optimum
dill==0.3.7
    # via
    #   datasets
    #   evaluate
    #   multiprocess
duckdb==1.2.0
    # via -r requirements.in
einops==0.8.1
    # via -r requirements.in
evaluate==0.4.3
    # via optimum
fastapi==0.115.8
    # via -r requirements.in
filelock==3.17.0
    # via
    #   huggingface-hub
    #   torch
    #   transformers
flatbuffers==25.2.10
    # via onnxruntime
frozenlist==1.5.0
    # via
    #   aiohttp
    #   aiosignal
fsspec==2025.2.0
    # via
    #   datasets
    #   evaluate
    #   huggingface-hub
    #   torch
h11==0.14.0
    # via
    #   httpcore
    #   uvicorn
h2==4.2.0
    # via httpx
hf-transfer==0.1.9
    # via huggingface-hub
hpack==4.1.0
    # via h2
httpcore==1.0.7
    # via httpx
httpx==0.28.1
    # via -r requirements.in
huggingface-hub==0.28.1
    # via
    #   -r requirements.in
    #   datasets
    #   evaluate
    #   optimum
    #   sentence-transformers
    #   tokenizers
    #   transformers
humanfriendly==10.0
    # via coloredlogs
hyperframe==6.1.0
    # via h2
idna==3.10
    # via
    #   anyio
    #   httpx
    #   requests
    #   yarl
jinja2==3.1.5
    # via torch
joblib==1.4.2
    # via scikit-learn
markupsafe==3.0.2
    # via jinja2
mpmath==1.3.0
    # via sympy
multidict==6.1.0
    # via
    #   aiohttp
    #   yarl
multiprocess==0.70.15
    # via
    #   datasets
    #   evaluate
networkx==3.4.2
    # via torch
numpy==2.2.2
    # via
    #   datasets
    #   evaluate
    #   onnx
    #   onnxruntime
    #   optimum
    #   pandas
    #   scikit-learn
    #   scipy
    #   transformers
onnx==1.17.0
    # via optimum
onnxruntime==1.20.1
    # via optimum
optimum==1.24.0
    # via sentence-transformers
packaging==24.2
    # via
    #   datasets
    #   evaluate
    #   huggingface-hub
    #   onnxruntime
    #   optimum
    #   transformers
pandas==2.2.3
    # via
    #   -r requirements.in
    #   datasets
    #   evaluate
pillow==11.1.0
    # via sentence-transformers
polars==1.22.0
    # via -r requirements.in
propcache==0.2.1
    # via
    #   aiohttp
    #   yarl
protobuf==5.29.3
    # via
    #   onnx
    #   onnxruntime
    #   optimum
pyarrow==19.0.0
    # via datasets
pydantic==2.10.6
    # via
    #   -r requirements.in
    #   fastapi
pydantic-core==2.27.2
    # via pydantic
python-dateutil==2.9.0.post0
    # via pandas
python-dotenv==1.0.1
    # via -r requirements.in
pytz==2025.1
    # via pandas
pyyaml==6.0.2
    # via
    #   datasets
    #   huggingface-hub
    #   transformers
regex==2024.11.6
    # via transformers
requests==2.32.3
    # via
    #   datasets
    #   evaluate
    #   huggingface-hub
    #   transformers
safetensors==0.5.2
    # via transformers
scikit-learn==1.6.1
    # via sentence-transformers
scipy==1.15.1
    # via
    #   scikit-learn
    #   sentence-transformers
sentence-transformers==3.4.1
    # via -r requirements.in
setuptools==75.8.0
    # via torch
six==1.17.0
    # via python-dateutil
sniffio==1.3.1
    # via anyio
stamina==24.3.0
    # via -r requirements.in
starlette==0.45.3
    # via fastapi
sympy==1.13.1
    # via
    #   onnxruntime
    #   torch
tenacity==9.0.0
    # via stamina
threadpoolctl==3.5.0
    # via scikit-learn
tokenizers==0.21.0
    # via transformers
torch==2.6.0
    # via
    #   optimum
    #   sentence-transformers
tqdm==4.67.1
    # via
    #   -r requirements.in
    #   datasets
    #   evaluate
    #   huggingface-hub
    #   sentence-transformers
    #   transformers
transformers==4.48.3
    # via
    #   optimum
    #   sentence-transformers
typing-extensions==4.12.2
    # via
    #   anyio
    #   fastapi
    #   huggingface-hub
    #   pydantic
    #   pydantic-core
    #   torch
tzdata==2025.1
    # via pandas
urllib3==2.3.0
    # via requests
uvicorn==0.34.0
    # via -r requirements.in
xxhash==3.5.0
    # via
    #   datasets
    #   evaluate
yarl==1.18.3
    # via aiohttp