Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -17,7 +17,7 @@ import re
|
|
17 |
from gradio_client import Client
|
18 |
from simple_salesforce import Salesforce, SalesforceLogin
|
19 |
from llama_index.llms.huggingface import HuggingFaceLLM
|
20 |
-
from llama_index.llms.huggingface import HuggingFaceInferenceAPI
|
21 |
|
22 |
|
23 |
# Define Pydantic model for incoming request body
|
@@ -72,7 +72,7 @@ app.mount("/static", StaticFiles(directory="static"), name="static")
|
|
72 |
|
73 |
templates = Jinja2Templates(directory="static")
|
74 |
# Configure Llama index settings
|
75 |
-
Settings.llm =
|
76 |
model_name="meta-llama/Meta-Llama-3-8B-Instruct",
|
77 |
tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
|
78 |
context_window=3000,
|
|
|
17 |
from gradio_client import Client
|
18 |
from simple_salesforce import Salesforce, SalesforceLogin
|
19 |
from llama_index.llms.huggingface import HuggingFaceLLM
|
20 |
+
# from llama_index.llms.huggingface import HuggingFaceInferenceAPI
|
21 |
|
22 |
|
23 |
# Define Pydantic model for incoming request body
|
|
|
72 |
|
73 |
templates = Jinja2Templates(directory="static")
|
74 |
# Configure Llama index settings
|
75 |
+
Settings.llm = HuggingFaceLLM(
|
76 |
model_name="meta-llama/Meta-Llama-3-8B-Instruct",
|
77 |
tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
|
78 |
context_window=3000,
|