Spaces:
Running
on
Zero
Running
on
Zero
donbr
commited on
Commit
·
3e2b7aa
1
Parent(s):
f7df671
remove monkey patch - use requirements.txt versions
Browse files- app.py +0 -6
- requirements.txt +2 -2
app.py
CHANGED
@@ -10,12 +10,6 @@ import gradio as gr
|
|
10 |
import spaces
|
11 |
from urllib.parse import unquote
|
12 |
from transformers import AutoModelForCausalLM, AutoTokenizer, StoppingCriteria, StoppingCriteriaList
|
13 |
-
from transformers.cache_utils import DynamicCache
|
14 |
-
|
15 |
-
# Add get_max_length method to DynamicCache if it doesn't exist
|
16 |
-
# This is needed for compatibility with Phi-3.5 models
|
17 |
-
if not hasattr(DynamicCache, 'get_max_length'):
|
18 |
-
DynamicCache.get_max_length = lambda self: self.get_seq_length()
|
19 |
|
20 |
from data import extract_leaves, split_document, handle_broken_output, clean_json_text, sync_empty_fields
|
21 |
from examples import examples as input_examples
|
|
|
10 |
import spaces
|
11 |
from urllib.parse import unquote
|
12 |
from transformers import AutoModelForCausalLM, AutoTokenizer, StoppingCriteria, StoppingCriteriaList
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
from data import extract_leaves, split_document, handle_broken_output, clean_json_text, sync_empty_fields
|
15 |
from examples import examples as input_examples
|
requirements.txt
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
transformers
|
2 |
-
torch
|
3 |
accelerate
|
4 |
spaces>=0.1.0
|
|
|
1 |
+
transformers==4.41.2
|
2 |
+
torch==2.2.2
|
3 |
accelerate
|
4 |
spaces>=0.1.0
|