Spaces:
VIDraft
/
Running on Zero

hysts HF staff commited on
Commit
4d3ff27
·
1 Parent(s): 384e9c5
Files changed (8) hide show
  1. .pre-commit-config.yaml +10 -14
  2. .vscode/extensions.json +8 -0
  3. .vscode/settings.json +5 -14
  4. README.md +1 -1
  5. app.py +15 -13
  6. pyproject.toml +41 -5
  7. requirements.txt +48 -66
  8. uv.lock +0 -0
.pre-commit-config.yaml CHANGED
@@ -1,6 +1,6 @@
1
  repos:
2
  - repo: https://github.com/pre-commit/pre-commit-hooks
3
- rev: v4.6.0
4
  hooks:
5
  - id: check-executables-have-shebangs
6
  - id: check-json
@@ -18,13 +18,15 @@ repos:
18
  hooks:
19
  - id: docformatter
20
  args: ["--in-place"]
21
- - repo: https://github.com/pycqa/isort
22
- rev: 5.13.2
23
  hooks:
24
- - id: isort
25
- args: ["--profile", "black"]
 
 
26
  - repo: https://github.com/pre-commit/mirrors-mypy
27
- rev: v1.9.0
28
  hooks:
29
  - id: mypy
30
  args: ["--ignore-missing-imports"]
@@ -35,14 +37,8 @@ repos:
35
  "types-PyYAML",
36
  "types-pytz",
37
  ]
38
- - repo: https://github.com/psf/black
39
- rev: 24.4.0
40
- hooks:
41
- - id: black
42
- language_version: python3.10
43
- args: ["--line-length", "119"]
44
  - repo: https://github.com/kynan/nbstripout
45
- rev: 0.7.1
46
  hooks:
47
  - id: nbstripout
48
  args:
@@ -51,7 +47,7 @@ repos:
51
  "metadata.interpreter metadata.kernelspec cell.metadata.pycharm",
52
  ]
53
  - repo: https://github.com/nbQA-dev/nbQA
54
- rev: 1.8.5
55
  hooks:
56
  - id: nbqa-black
57
  - id: nbqa-pyupgrade
 
1
  repos:
2
  - repo: https://github.com/pre-commit/pre-commit-hooks
3
+ rev: v5.0.0
4
  hooks:
5
  - id: check-executables-have-shebangs
6
  - id: check-json
 
18
  hooks:
19
  - id: docformatter
20
  args: ["--in-place"]
21
+ - repo: https://github.com/astral-sh/ruff-pre-commit
22
+ rev: v0.8.4
23
  hooks:
24
+ - id: ruff
25
+ args: ["--fix"]
26
+ - id: ruff-format
27
+ args: ["--line-length", "119"]
28
  - repo: https://github.com/pre-commit/mirrors-mypy
29
+ rev: v1.14.0
30
  hooks:
31
  - id: mypy
32
  args: ["--ignore-missing-imports"]
 
37
  "types-PyYAML",
38
  "types-pytz",
39
  ]
 
 
 
 
 
 
40
  - repo: https://github.com/kynan/nbstripout
41
+ rev: 0.8.1
42
  hooks:
43
  - id: nbstripout
44
  args:
 
47
  "metadata.interpreter metadata.kernelspec cell.metadata.pycharm",
48
  ]
49
  - repo: https://github.com/nbQA-dev/nbQA
50
+ rev: 1.9.1
51
  hooks:
52
  - id: nbqa-black
53
  - id: nbqa-pyupgrade
.vscode/extensions.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "recommendations": [
3
+ "ms-python.python",
4
+ "charliermarsh.ruff",
5
+ "streetsidesoftware.code-spell-checker",
6
+ "tamasfe.even-better-toml"
7
+ ]
8
+ }
.vscode/settings.json CHANGED
@@ -2,29 +2,20 @@
2
  "editor.formatOnSave": true,
3
  "files.insertFinalNewline": false,
4
  "[python]": {
5
- "editor.defaultFormatter": "ms-python.black-formatter",
6
  "editor.formatOnType": true,
7
  "editor.codeActionsOnSave": {
 
8
  "source.organizeImports": "explicit"
9
  }
10
  },
11
  "[jupyter]": {
12
  "files.insertFinalNewline": false
13
  },
14
- "black-formatter.args": [
15
- "--line-length=119"
16
- ],
17
- "isort.args": ["--profile", "black"],
18
- "flake8.args": [
19
- "--max-line-length=119"
20
- ],
21
- "ruff.lint.args": [
22
- "--line-length=119"
23
- ],
24
  "notebook.output.scrolling": true,
25
  "notebook.formatOnCellExecution": true,
26
  "notebook.formatOnSave.enabled": true,
27
- "notebook.codeActionsOnSave": {
28
- "source.organizeImports": "explicit"
29
- }
30
  }
 
2
  "editor.formatOnSave": true,
3
  "files.insertFinalNewline": false,
4
  "[python]": {
5
+ "editor.defaultFormatter": "charliermarsh.ruff",
6
  "editor.formatOnType": true,
7
  "editor.codeActionsOnSave": {
8
+ "source.fixAll.ruff": "explicit",
9
  "source.organizeImports": "explicit"
10
  }
11
  },
12
  "[jupyter]": {
13
  "files.insertFinalNewline": false
14
  },
 
 
 
 
 
 
 
 
 
 
15
  "notebook.output.scrolling": true,
16
  "notebook.formatOnCellExecution": true,
17
  "notebook.formatOnSave.enabled": true,
18
+ "notebook.codeActionsOnSave": {
19
+ "source.organizeImports": "explicit"
20
+ }
21
  }
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: 🌖
4
  colorFrom: blue
5
  colorTo: pink
6
  sdk: gradio
7
- sdk_version: 4.44.1
8
  app_file: app.py
9
  pinned: false
10
  license: bsd-3-clause
 
4
  colorFrom: blue
5
  colorTo: pink
6
  sdk: gradio
7
+ sdk_version: 5.9.1
8
  app_file: app.py
9
  pinned: false
10
  license: bsd-3-clause
app.py CHANGED
@@ -1,7 +1,5 @@
1
  #!/usr/bin/env python
2
 
3
- from __future__ import annotations
4
-
5
  import os
6
  import string
7
 
@@ -9,7 +7,7 @@ import gradio as gr
9
  import PIL.Image
10
  import spaces
11
  import torch
12
- from transformers import AutoProcessor, Blip2ForConditionalGeneration
13
 
14
  DESCRIPTION = "# [BLIP-2](https://github.com/salesforce/LAVIS/tree/main/projects/blip2)"
15
 
@@ -23,11 +21,15 @@ MODEL_ID_OPT_6_7B = "Salesforce/blip2-opt-6.7b"
23
  MODEL_ID_FLAN_T5_XL = "Salesforce/blip2-flan-t5-xl"
24
  MODEL_ID_FLAN_T5_XXL = "Salesforce/blip2-flan-t5-xxl"
25
  MODEL_ID = os.getenv("MODEL_ID", MODEL_ID_FLAN_T5_XXL)
26
- assert MODEL_ID in [MODEL_ID_OPT_2_7B, MODEL_ID_OPT_6_7B, MODEL_ID_FLAN_T5_XL, MODEL_ID_FLAN_T5_XXL]
 
 
27
 
28
  if torch.cuda.is_available():
29
  processor = AutoProcessor.from_pretrained(MODEL_ID)
30
- model = Blip2ForConditionalGeneration.from_pretrained(MODEL_ID, device_map="auto", load_in_8bit=True)
 
 
31
 
32
 
33
  @spaces.GPU
@@ -54,8 +56,7 @@ def generate_caption(
54
  num_beams=num_beams,
55
  top_p=top_p,
56
  )
57
- result = processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
58
- return result
59
 
60
 
61
  @spaces.GPU
@@ -83,8 +84,7 @@ def answer_question(
83
  num_beams=num_beams,
84
  top_p=top_p,
85
  )
86
- result = processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
87
- return result
88
 
89
 
90
  def postprocess_output(output: str) -> str:
@@ -104,9 +104,11 @@ def chat(
104
  min_length: int = 1,
105
  num_beams: int = 5,
106
  top_p: float = 0.9,
107
- history_orig: list[str] = [],
108
- history_qa: list[str] = [],
109
  ) -> tuple[list[tuple[str, str]], list[str], list[str]]:
 
 
110
  history_orig.append(text)
111
  text_qa = f"Question: {text} Answer:"
112
  history_qa.append(text_qa)
@@ -128,7 +130,7 @@ def chat(
128
  history_orig.append(output)
129
  history_qa.append(output)
130
 
131
- chat_val = list(zip(history_orig[0::2], history_orig[1::2]))
132
  return chat_val, history_orig, history_qa
133
 
134
 
@@ -155,7 +157,7 @@ examples = [
155
  ],
156
  ]
157
 
158
- with gr.Blocks(css="style.css") as demo:
159
  gr.Markdown(DESCRIPTION)
160
  gr.DuplicateButton(
161
  value="Duplicate Space for private use",
 
1
  #!/usr/bin/env python
2
 
 
 
3
  import os
4
  import string
5
 
 
7
  import PIL.Image
8
  import spaces
9
  import torch
10
+ from transformers import AutoProcessor, BitsAndBytesConfig, Blip2ForConditionalGeneration
11
 
12
  DESCRIPTION = "# [BLIP-2](https://github.com/salesforce/LAVIS/tree/main/projects/blip2)"
13
 
 
21
  MODEL_ID_FLAN_T5_XL = "Salesforce/blip2-flan-t5-xl"
22
  MODEL_ID_FLAN_T5_XXL = "Salesforce/blip2-flan-t5-xxl"
23
  MODEL_ID = os.getenv("MODEL_ID", MODEL_ID_FLAN_T5_XXL)
24
+ if MODEL_ID not in [MODEL_ID_OPT_2_7B, MODEL_ID_OPT_6_7B, MODEL_ID_FLAN_T5_XL, MODEL_ID_FLAN_T5_XXL]:
25
+ error_message = f"Invalid MODEL_ID: {MODEL_ID}"
26
+ raise ValueError(error_message)
27
 
28
  if torch.cuda.is_available():
29
  processor = AutoProcessor.from_pretrained(MODEL_ID)
30
+ model = Blip2ForConditionalGeneration.from_pretrained(
31
+ MODEL_ID, device_map="auto", quantization_config=BitsAndBytesConfig(load_in_8bit=True)
32
+ )
33
 
34
 
35
  @spaces.GPU
 
56
  num_beams=num_beams,
57
  top_p=top_p,
58
  )
59
+ return processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
 
60
 
61
 
62
  @spaces.GPU
 
84
  num_beams=num_beams,
85
  top_p=top_p,
86
  )
87
+ return processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
 
88
 
89
 
90
  def postprocess_output(output: str) -> str:
 
104
  min_length: int = 1,
105
  num_beams: int = 5,
106
  top_p: float = 0.9,
107
+ history_orig: list[str] | None = None,
108
+ history_qa: list[str] | None = None,
109
  ) -> tuple[list[tuple[str, str]], list[str], list[str]]:
110
+ history_orig = history_orig or []
111
+ history_qa = history_qa or []
112
  history_orig.append(text)
113
  text_qa = f"Question: {text} Answer:"
114
  history_qa.append(text_qa)
 
130
  history_orig.append(output)
131
  history_qa.append(output)
132
 
133
+ chat_val = list(zip(history_orig[0::2], history_orig[1::2], strict=False))
134
  return chat_val, history_orig, history_qa
135
 
136
 
 
157
  ],
158
  ]
159
 
160
+ with gr.Blocks(css_paths="style.css") as demo:
161
  gr.Markdown(DESCRIPTION)
162
  gr.DuplicateButton(
163
  value="Duplicate Space for private use",
pyproject.toml CHANGED
@@ -5,12 +5,48 @@ description = ""
5
  readme = "README.md"
6
  requires-python = ">=3.10"
7
  dependencies = [
8
- "accelerate>=0.34.2",
9
- "bitsandbytes>=0.44.1",
10
- "gradio>=4.44.1",
11
  "hf-transfer>=0.1.8",
12
- "spaces>=0.30.3",
13
  "torch==2.4.0",
14
  "torchvision==0.19.0",
15
- "transformers>=4.45.1",
16
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  readme = "README.md"
6
  requires-python = ">=3.10"
7
  dependencies = [
8
+ "accelerate>=1.2.1",
9
+ "bitsandbytes>=0.45.0",
10
+ "gradio>=5.9.1",
11
  "hf-transfer>=0.1.8",
12
+ "spaces>=0.31.1",
13
  "torch==2.4.0",
14
  "torchvision==0.19.0",
15
+ "transformers>=4.47.1",
16
  ]
17
+
18
+ [tool.ruff]
19
+ line-length = 119
20
+
21
+ [tool.ruff.lint]
22
+ select = ["ALL"]
23
+ ignore = [
24
+ "COM812", # missing-trailing-comma
25
+ "D203", # one-blank-line-before-class
26
+ "D213", # multi-line-summary-second-line
27
+ "E501", # line-too-long
28
+ "SIM117", # multiple-with-statements
29
+ ]
30
+ extend-ignore = [
31
+ "D100", # undocumented-public-module
32
+ "D101", # undocumented-public-class
33
+ "D102", # undocumented-public-method
34
+ "D103", # undocumented-public-function
35
+ "D104", # undocumented-public-package
36
+ "D105", # undocumented-magic-method
37
+ "D107", # undocumented-public-init
38
+ "EM101", # raw-string-in-exception
39
+ "FBT001", # boolean-type-hint-positional-argument
40
+ "FBT002", # boolean-default-value-positional-argument
41
+ "PD901", # pandas-df-variable-name
42
+ "PGH003", # blanket-type-ignore
43
+ "PLR0913", # too-many-arguments
44
+ "PLR0915", # too-many-statements
45
+ "TRY003", # raise-vanilla-args
46
+ ]
47
+ unfixable = [
48
+ "F401", # unused-import
49
+ ]
50
+
51
+ [tool.ruff.format]
52
+ docstring-code-format = true
requirements.txt CHANGED
@@ -1,38 +1,34 @@
1
  # This file was autogenerated by uv via the following command:
2
  # uv pip compile pyproject.toml -o requirements.txt
3
- accelerate==0.34.2
4
  # via blip2 (pyproject.toml)
5
  aiofiles==23.2.1
6
  # via gradio
7
  annotated-types==0.7.0
8
  # via pydantic
9
- anyio==4.6.0
10
  # via
11
  # gradio
12
  # httpx
13
  # starlette
14
- bitsandbytes==0.44.1
15
  # via blip2 (pyproject.toml)
16
- certifi==2024.8.30
17
  # via
18
  # httpcore
19
  # httpx
20
  # requests
21
- charset-normalizer==3.3.2
22
  # via requests
23
- click==8.1.7
24
  # via
25
  # typer
26
  # uvicorn
27
- contourpy==1.3.0
28
- # via matplotlib
29
- cycler==0.12.1
30
- # via matplotlib
31
  exceptiongroup==1.2.2
32
  # via anyio
33
- fastapi==0.115.0
34
  # via gradio
35
- ffmpy==0.4.0
36
  # via gradio
37
  filelock==3.16.1
38
  # via
@@ -40,18 +36,16 @@ filelock==3.16.1
40
  # torch
41
  # transformers
42
  # triton
43
- fonttools==4.54.1
44
- # via matplotlib
45
- fsspec==2024.9.0
46
  # via
47
  # gradio-client
48
  # huggingface-hub
49
  # torch
50
- gradio==4.44.1
51
  # via
52
  # blip2 (pyproject.toml)
53
  # spaces
54
- gradio-client==1.3.0
55
  # via gradio
56
  h11==0.14.0
57
  # via
@@ -59,14 +53,15 @@ h11==0.14.0
59
  # uvicorn
60
  hf-transfer==0.1.8
61
  # via blip2 (pyproject.toml)
62
- httpcore==1.0.6
63
  # via httpx
64
- httpx==0.27.2
65
  # via
66
  # gradio
67
  # gradio-client
 
68
  # spaces
69
- huggingface-hub==0.25.1
70
  # via
71
  # accelerate
72
  # gradio
@@ -78,35 +73,27 @@ idna==3.10
78
  # anyio
79
  # httpx
80
  # requests
81
- importlib-resources==6.4.5
82
- # via gradio
83
- jinja2==3.1.4
84
  # via
85
  # gradio
86
  # torch
87
- kiwisolver==1.4.7
88
- # via matplotlib
89
  markdown-it-py==3.0.0
90
  # via rich
91
  markupsafe==2.1.5
92
  # via
93
  # gradio
94
  # jinja2
95
- matplotlib==3.9.2
96
- # via gradio
97
  mdurl==0.1.2
98
  # via markdown-it-py
99
  mpmath==1.3.0
100
  # via sympy
101
- networkx==3.3
102
  # via torch
103
- numpy==2.1.1
104
  # via
105
  # accelerate
106
  # bitsandbytes
107
- # contourpy
108
  # gradio
109
- # matplotlib
110
  # pandas
111
  # torchvision
112
  # transformers
@@ -135,52 +122,46 @@ nvidia-cusparse-cu12==12.1.0.106
135
  # torch
136
  nvidia-nccl-cu12==2.20.5
137
  # via torch
138
- nvidia-nvjitlink-cu12==12.6.77
139
  # via
140
  # nvidia-cusolver-cu12
141
  # nvidia-cusparse-cu12
142
  nvidia-nvtx-cu12==12.1.105
143
  # via torch
144
- orjson==3.10.7
145
  # via gradio
146
- packaging==24.1
147
  # via
148
  # accelerate
149
  # gradio
150
  # gradio-client
151
  # huggingface-hub
152
- # matplotlib
153
  # spaces
154
  # transformers
155
  pandas==2.2.3
156
  # via gradio
157
- pillow==10.3.0
158
  # via
159
  # gradio
160
- # matplotlib
161
  # torchvision
162
  psutil==5.9.8
163
  # via
164
  # accelerate
165
  # spaces
166
- pydantic==2.9.2
167
  # via
168
  # fastapi
169
  # gradio
170
  # spaces
171
- pydantic-core==2.23.4
172
  # via pydantic
173
  pydub==0.25.1
174
  # via gradio
175
  pygments==2.18.0
176
  # via rich
177
- pyparsing==3.1.4
178
- # via matplotlib
179
  python-dateutil==2.9.0.post0
180
- # via
181
- # matplotlib
182
- # pandas
183
- python-multipart==0.0.12
184
  # via gradio
185
  pytz==2024.2
186
  # via pandas
@@ -190,16 +171,18 @@ pyyaml==6.0.2
190
  # gradio
191
  # huggingface-hub
192
  # transformers
193
- regex==2024.9.11
194
  # via transformers
195
  requests==2.32.3
196
  # via
197
  # huggingface-hub
198
  # spaces
199
  # transformers
200
- rich==13.9.2
201
  # via typer
202
- ruff==0.6.9
 
 
203
  # via gradio
204
  safetensors==0.4.5
205
  # via
@@ -209,21 +192,21 @@ semantic-version==2.10.0
209
  # via gradio
210
  shellingham==1.5.4
211
  # via typer
212
- six==1.16.0
213
  # via python-dateutil
214
  sniffio==1.3.1
215
- # via
216
- # anyio
217
- # httpx
218
- spaces==0.30.3
219
  # via blip2 (pyproject.toml)
220
- starlette==0.38.6
221
- # via fastapi
 
 
222
  sympy==1.13.3
223
  # via torch
224
- tokenizers==0.20.0
225
  # via transformers
226
- tomlkit==0.12.0
227
  # via gradio
228
  torch==2.4.0
229
  # via
@@ -233,19 +216,20 @@ torch==2.4.0
233
  # torchvision
234
  torchvision==0.19.0
235
  # via blip2 (pyproject.toml)
236
- tqdm==4.66.5
237
  # via
238
  # huggingface-hub
239
  # transformers
240
- transformers==4.45.1
241
  # via blip2 (pyproject.toml)
242
  triton==3.0.0
243
  # via torch
244
- typer==0.12.5
245
  # via gradio
246
  typing-extensions==4.12.2
247
  # via
248
  # anyio
 
249
  # fastapi
250
  # gradio
251
  # gradio-client
@@ -259,11 +243,9 @@ typing-extensions==4.12.2
259
  # uvicorn
260
  tzdata==2024.2
261
  # via pandas
262
- urllib3==2.2.3
263
- # via
264
- # gradio
265
- # requests
266
- uvicorn==0.31.0
267
  # via gradio
268
- websockets==12.0
269
  # via gradio-client
 
1
  # This file was autogenerated by uv via the following command:
2
  # uv pip compile pyproject.toml -o requirements.txt
3
+ accelerate==1.2.1
4
  # via blip2 (pyproject.toml)
5
  aiofiles==23.2.1
6
  # via gradio
7
  annotated-types==0.7.0
8
  # via pydantic
9
+ anyio==4.7.0
10
  # via
11
  # gradio
12
  # httpx
13
  # starlette
14
+ bitsandbytes==0.45.0
15
  # via blip2 (pyproject.toml)
16
+ certifi==2024.12.14
17
  # via
18
  # httpcore
19
  # httpx
20
  # requests
21
+ charset-normalizer==3.4.1
22
  # via requests
23
+ click==8.1.8
24
  # via
25
  # typer
26
  # uvicorn
 
 
 
 
27
  exceptiongroup==1.2.2
28
  # via anyio
29
+ fastapi==0.115.6
30
  # via gradio
31
+ ffmpy==0.5.0
32
  # via gradio
33
  filelock==3.16.1
34
  # via
 
36
  # torch
37
  # transformers
38
  # triton
39
+ fsspec==2024.12.0
 
 
40
  # via
41
  # gradio-client
42
  # huggingface-hub
43
  # torch
44
+ gradio==5.9.1
45
  # via
46
  # blip2 (pyproject.toml)
47
  # spaces
48
+ gradio-client==1.5.2
49
  # via gradio
50
  h11==0.14.0
51
  # via
 
53
  # uvicorn
54
  hf-transfer==0.1.8
55
  # via blip2 (pyproject.toml)
56
+ httpcore==1.0.7
57
  # via httpx
58
+ httpx==0.28.1
59
  # via
60
  # gradio
61
  # gradio-client
62
+ # safehttpx
63
  # spaces
64
+ huggingface-hub==0.27.0
65
  # via
66
  # accelerate
67
  # gradio
 
73
  # anyio
74
  # httpx
75
  # requests
76
+ jinja2==3.1.5
 
 
77
  # via
78
  # gradio
79
  # torch
 
 
80
  markdown-it-py==3.0.0
81
  # via rich
82
  markupsafe==2.1.5
83
  # via
84
  # gradio
85
  # jinja2
 
 
86
  mdurl==0.1.2
87
  # via markdown-it-py
88
  mpmath==1.3.0
89
  # via sympy
90
+ networkx==3.4.2
91
  # via torch
92
+ numpy==2.2.1
93
  # via
94
  # accelerate
95
  # bitsandbytes
 
96
  # gradio
 
97
  # pandas
98
  # torchvision
99
  # transformers
 
122
  # torch
123
  nvidia-nccl-cu12==2.20.5
124
  # via torch
125
+ nvidia-nvjitlink-cu12==12.6.85
126
  # via
127
  # nvidia-cusolver-cu12
128
  # nvidia-cusparse-cu12
129
  nvidia-nvtx-cu12==12.1.105
130
  # via torch
131
+ orjson==3.10.13
132
  # via gradio
133
+ packaging==24.2
134
  # via
135
  # accelerate
136
  # gradio
137
  # gradio-client
138
  # huggingface-hub
 
139
  # spaces
140
  # transformers
141
  pandas==2.2.3
142
  # via gradio
143
+ pillow==11.0.0
144
  # via
145
  # gradio
 
146
  # torchvision
147
  psutil==5.9.8
148
  # via
149
  # accelerate
150
  # spaces
151
+ pydantic==2.10.4
152
  # via
153
  # fastapi
154
  # gradio
155
  # spaces
156
+ pydantic-core==2.27.2
157
  # via pydantic
158
  pydub==0.25.1
159
  # via gradio
160
  pygments==2.18.0
161
  # via rich
 
 
162
  python-dateutil==2.9.0.post0
163
+ # via pandas
164
+ python-multipart==0.0.20
 
 
165
  # via gradio
166
  pytz==2024.2
167
  # via pandas
 
171
  # gradio
172
  # huggingface-hub
173
  # transformers
174
+ regex==2024.11.6
175
  # via transformers
176
  requests==2.32.3
177
  # via
178
  # huggingface-hub
179
  # spaces
180
  # transformers
181
+ rich==13.9.4
182
  # via typer
183
+ ruff==0.8.4
184
+ # via gradio
185
+ safehttpx==0.1.6
186
  # via gradio
187
  safetensors==0.4.5
188
  # via
 
192
  # via gradio
193
  shellingham==1.5.4
194
  # via typer
195
+ six==1.17.0
196
  # via python-dateutil
197
  sniffio==1.3.1
198
+ # via anyio
199
+ spaces==0.31.1
 
 
200
  # via blip2 (pyproject.toml)
201
+ starlette==0.41.3
202
+ # via
203
+ # fastapi
204
+ # gradio
205
  sympy==1.13.3
206
  # via torch
207
+ tokenizers==0.21.0
208
  # via transformers
209
+ tomlkit==0.13.2
210
  # via gradio
211
  torch==2.4.0
212
  # via
 
216
  # torchvision
217
  torchvision==0.19.0
218
  # via blip2 (pyproject.toml)
219
+ tqdm==4.67.1
220
  # via
221
  # huggingface-hub
222
  # transformers
223
+ transformers==4.47.1
224
  # via blip2 (pyproject.toml)
225
  triton==3.0.0
226
  # via torch
227
+ typer==0.15.1
228
  # via gradio
229
  typing-extensions==4.12.2
230
  # via
231
  # anyio
232
+ # bitsandbytes
233
  # fastapi
234
  # gradio
235
  # gradio-client
 
243
  # uvicorn
244
  tzdata==2024.2
245
  # via pandas
246
+ urllib3==2.3.0
247
+ # via requests
248
+ uvicorn==0.34.0
 
 
249
  # via gradio
250
+ websockets==14.1
251
  # via gradio-client
uv.lock CHANGED
The diff for this file is too large to render. See raw diff