Spaces:
Runtime error
Runtime error
Commit
·
6cd98fb
1
Parent(s):
c82eb8a
only run from local files when running locally
Browse files- .idea/workspace.xml +4 -1
- __pycache__/ai.cpython-311.pyc +0 -0
- __pycache__/cpu_ai.cpython-311.pyc +0 -0
- __pycache__/github_manager.cpython-311.pyc +0 -0
- ai.py +4 -4
- cpu_ai.py +12 -4
- models/abacaj/{replit-v2-codeinstruct-3b.q4_1.bin → Replit-v2-CodeInstruct-3B-ggml.bin} +0 -0
- models/marella/{ggml-model.bin → gpt-2-ggml.bin} +0 -0
.idea/workspace.xml
CHANGED
@@ -6,7 +6,10 @@
|
|
6 |
<component name="ChangeListManager">
|
7 |
<list default="true" id="ba1eab7b-54ae-409d-bac6-f23bdc636598" name="Changes" comment="">
|
8 |
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
9 |
-
<change beforePath="$PROJECT_DIR$/
|
|
|
|
|
|
|
10 |
</list>
|
11 |
<option name="SHOW_DIALOG" value="false" />
|
12 |
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
|
|
6 |
<component name="ChangeListManager">
|
7 |
<list default="true" id="ba1eab7b-54ae-409d-bac6-f23bdc636598" name="Changes" comment="">
|
8 |
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
9 |
+
<change beforePath="$PROJECT_DIR$/ai.py" beforeDir="false" afterPath="$PROJECT_DIR$/ai.py" afterDir="false" />
|
10 |
+
<change beforePath="$PROJECT_DIR$/cpu_ai.py" beforeDir="false" afterPath="$PROJECT_DIR$/cpu_ai.py" afterDir="false" />
|
11 |
+
<change beforePath="$PROJECT_DIR$/models/abacaj/replit-v2-codeinstruct-3b.q4_1.bin" beforeDir="false" />
|
12 |
+
<change beforePath="$PROJECT_DIR$/models/marella/ggml-model.bin" beforeDir="false" />
|
13 |
</list>
|
14 |
<option name="SHOW_DIALOG" value="false" />
|
15 |
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
__pycache__/ai.cpython-311.pyc
DELETED
Binary file (2.34 kB)
|
|
__pycache__/cpu_ai.cpython-311.pyc
DELETED
Binary file (2.55 kB)
|
|
__pycache__/github_manager.cpython-311.pyc
DELETED
Binary file (3.17 kB)
|
|
ai.py
CHANGED
@@ -3,8 +3,8 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
3 |
import cpu_ai
|
4 |
|
5 |
models = [
|
6 |
-
"
|
7 |
-
"
|
8 |
"WizardLM/WizardCoder-Python-34B-V1.0",
|
9 |
"WizardLM/WizardCoder-15B-V1.0",
|
10 |
"WizardLM/WizardCoder-Python-7B-V1.0",
|
@@ -45,9 +45,9 @@ def cleanup_response(generated_text):
|
|
45 |
def generate_code(prompt, model_index, max_tokens, temperature=0.6):
|
46 |
model_full_name = models[model_index]
|
47 |
if model_index == 0:
|
48 |
-
output = cpu_ai.generate_code(prompt,
|
49 |
elif model_index == 1:
|
50 |
-
output = cpu_ai.generate_code(prompt,
|
51 |
else:
|
52 |
output = run_general_model(model_full_name, prompt, max_tokens, temperature)
|
53 |
|
|
|
3 |
import cpu_ai
|
4 |
|
5 |
models = [
|
6 |
+
"abacaj/Replit-v2-CodeInstruct-3B-ggml",
|
7 |
+
"marella/gpt-2-ggml",
|
8 |
"WizardLM/WizardCoder-Python-34B-V1.0",
|
9 |
"WizardLM/WizardCoder-15B-V1.0",
|
10 |
"WizardLM/WizardCoder-Python-7B-V1.0",
|
|
|
45 |
def generate_code(prompt, model_index, max_tokens, temperature=0.6):
|
46 |
model_full_name = models[model_index]
|
47 |
if model_index == 0:
|
48 |
+
output = cpu_ai.generate_code(prompt, model_full_name, max_tokens, temperature)
|
49 |
elif model_index == 1:
|
50 |
+
output = cpu_ai.generate_code(prompt, model_full_name, max_tokens, temperature)
|
51 |
else:
|
52 |
output = run_general_model(model_full_name, prompt, max_tokens, temperature)
|
53 |
|
cpu_ai.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import os
|
|
|
2 |
from dataclasses import dataclass, asdict
|
3 |
from ctransformers import AutoModelForCausalLM, AutoConfig
|
4 |
|
@@ -29,14 +30,21 @@ def generate(llm: AutoModelForCausalLM,
|
|
29 |
return llm(format_prompt(prompt), **asdict(generation_config))
|
30 |
|
31 |
|
32 |
-
def generate_code(prompt,
|
33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
|
35 |
config = AutoConfig.from_pretrained(
|
36 |
-
|
37 |
)
|
38 |
llm = AutoModelForCausalLM.from_pretrained(
|
39 |
-
|
40 |
model_type="replit",
|
41 |
config=config,
|
42 |
)
|
|
|
1 |
import os
|
2 |
+
from pathlib import Path
|
3 |
from dataclasses import dataclass, asdict
|
4 |
from ctransformers import AutoModelForCausalLM, AutoConfig
|
5 |
|
|
|
30 |
return llm(format_prompt(prompt), **asdict(generation_config))
|
31 |
|
32 |
|
33 |
+
def generate_code(prompt, model_name, max_tokens, temperature):
|
34 |
+
from_local = False
|
35 |
+
|
36 |
+
model_path = model_name
|
37 |
+
config_path = model_name
|
38 |
+
if from_local:
|
39 |
+
config_folder = model_name.split("/")[0]
|
40 |
+
config_path = os.path.abspath(f"models/{config_folder}")
|
41 |
+
model_path = os.path.abspath(f"models/{model_name}.bin")
|
42 |
|
43 |
config = AutoConfig.from_pretrained(
|
44 |
+
config_path,
|
45 |
)
|
46 |
llm = AutoModelForCausalLM.from_pretrained(
|
47 |
+
model_path,
|
48 |
model_type="replit",
|
49 |
config=config,
|
50 |
)
|
models/abacaj/{replit-v2-codeinstruct-3b.q4_1.bin → Replit-v2-CodeInstruct-3B-ggml.bin}
RENAMED
File without changes
|
models/marella/{ggml-model.bin → gpt-2-ggml.bin}
RENAMED
File without changes
|