Spaces:
Sleeping
Sleeping
Update private_gpt/components/llm/llm_component.py
Browse files
private_gpt/components/llm/llm_component.py
CHANGED
@@ -13,7 +13,7 @@ from private_gpt.settings.settings import Settings
|
|
13 |
import os
|
14 |
logger = logging.getLogger(__name__)
|
15 |
|
16 |
-
model_url
|
17 |
|
18 |
@singleton
|
19 |
class LLMComponent:
|
@@ -27,7 +27,6 @@ class LLMComponent:
|
|
27 |
|
28 |
match settings.llm.mode:
|
29 |
case "local":
|
30 |
-
model_url: "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GGUF/raw/main/mistral-7b-instruct-v0.1.Q4_K_M.gguf"
|
31 |
from llama_index.llms import LlamaCPP
|
32 |
prompt_style_cls = get_prompt_style(settings.local.prompt_style)
|
33 |
prompt_style = prompt_style_cls(
|
|
|
13 |
import os
|
14 |
logger = logging.getLogger(__name__)
|
15 |
|
16 |
+
model_url: "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GGUF/raw/main/mistral-7b-instruct-v0.1.Q4_K_M.gguf"
|
17 |
|
18 |
@singleton
|
19 |
class LLMComponent:
|
|
|
27 |
|
28 |
match settings.llm.mode:
|
29 |
case "local":
|
|
|
30 |
from llama_index.llms import LlamaCPP
|
31 |
prompt_style_cls = get_prompt_style(settings.local.prompt_style)
|
32 |
prompt_style = prompt_style_cls(
|