Spaces:
Runtime error
Runtime error
gmerrill
commited on
Commit
·
a1e244c
1
Parent(s):
bd8563e
update
Browse files
main.py
CHANGED
@@ -29,7 +29,7 @@ def get_prompt(user_query: str, functions: list = []) -> str:
|
|
29 |
device : str = "cuda:0" if torch.cuda.is_available() else "cpu"
|
30 |
torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
|
31 |
|
32 |
-
result = subprocess.run('ls -lh /.cache/huggingface/hub', shell=True, capture_output=True, text=True)
|
33 |
log('Cache files: ' + result.stdout)
|
34 |
|
35 |
model_id : str = "gorilla-llm/gorilla-openfunctions-v1"
|
@@ -38,7 +38,7 @@ tokenizer = AutoTokenizer.from_pretrained(model_id)
|
|
38 |
log('AutoModelForCausalLM.from_pretrained ...')
|
39 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch_dtype, low_cpu_mem_usage=True)
|
40 |
|
41 |
-
result = subprocess.run('ls -lh /.cache/huggingface/hub', shell=True, capture_output=True, text=True)
|
42 |
log('Cache files: ' + result.stdout)
|
43 |
|
44 |
log('mode.to(device) ...')
|
|
|
29 |
device : str = "cuda:0" if torch.cuda.is_available() else "cpu"
|
30 |
torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
|
31 |
|
32 |
+
result = subprocess.run('ls -lh /.cache/huggingface/hub && find .', shell=True, capture_output=True, text=True)
|
33 |
log('Cache files: ' + result.stdout)
|
34 |
|
35 |
model_id : str = "gorilla-llm/gorilla-openfunctions-v1"
|
|
|
38 |
log('AutoModelForCausalLM.from_pretrained ...')
|
39 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch_dtype, low_cpu_mem_usage=True)
|
40 |
|
41 |
+
result = subprocess.run('ls -lh /.cache/huggingface/hub && find .', shell=True, capture_output=True, text=True)
|
42 |
log('Cache files: ' + result.stdout)
|
43 |
|
44 |
log('mode.to(device) ...')
|