Spaces:
Sleeping
Sleeping
removed gpu flag
Browse files
utils.py
CHANGED
@@ -39,7 +39,7 @@ sys.path.append('/mount/src/gen_ai_dev')
|
|
39 |
DATA_PATH = "./Infy financial report/"
|
40 |
DATA_FILES = ["INFY_2022_2023.pdf", "INFY_2023_2024.pdf"]
|
41 |
EMBEDDING_MODEL = "sentence-transformers/all-MiniLM-L6-v2"
|
42 |
-
LLM_MODEL = "
|
43 |
|
44 |
# Environment settings
|
45 |
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
@@ -220,9 +220,8 @@ try:
|
|
220 |
tokenizer = AutoTokenizer.from_pretrained(LLM_MODEL)
|
221 |
model = AutoModelForCausalLM.from_pretrained(
|
222 |
LLM_MODEL,
|
223 |
-
device_map="
|
224 |
torch_dtype=torch.float16,
|
225 |
-
load_in_4bit=True
|
226 |
)
|
227 |
return pipeline(
|
228 |
"text-generation",
|
@@ -232,7 +231,7 @@ try:
|
|
232 |
do_sample=False,
|
233 |
temperature=0.7,
|
234 |
top_k=0,
|
235 |
-
top_p=1
|
236 |
)
|
237 |
generator = load_generator()
|
238 |
except Exception as e:
|
|
|
39 |
DATA_PATH = "./Infy financial report/"
|
40 |
DATA_FILES = ["INFY_2022_2023.pdf", "INFY_2023_2024.pdf"]
|
41 |
EMBEDDING_MODEL = "sentence-transformers/all-MiniLM-L6-v2"
|
42 |
+
LLM_MODEL = "gpt2" # Or "distilgpt2" # Or "HuggingFaceH4/zephyr-7b-beta" or "microsoft/phi-2"
|
43 |
|
44 |
# Environment settings
|
45 |
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
|
|
220 |
tokenizer = AutoTokenizer.from_pretrained(LLM_MODEL)
|
221 |
model = AutoModelForCausalLM.from_pretrained(
|
222 |
LLM_MODEL,
|
223 |
+
device_map="cpu",
|
224 |
torch_dtype=torch.float16,
|
|
|
225 |
)
|
226 |
return pipeline(
|
227 |
"text-generation",
|
|
|
231 |
do_sample=False,
|
232 |
temperature=0.7,
|
233 |
top_k=0,
|
234 |
+
top_p=1
|
235 |
)
|
236 |
generator = load_generator()
|
237 |
except Exception as e:
|