Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -66,6 +66,7 @@ RAG_CHAIN_PROMPT = PromptTemplate(input_variables = ["context", "question"],
|
|
| 66 |
#Plattform Keys aus den Secrets holen zu diesem Space
|
| 67 |
HUGGINGFACEHUB_API_TOKEN = os.getenv("HF_ACCESS_READ")
|
| 68 |
OAI_API_KEY=os.getenv("OPENAI_API_KEY")
|
|
|
|
| 69 |
|
| 70 |
|
| 71 |
#Pfad, wo Docs/Bilder/Filme abgelegt werden können - lokal, also hier im HF Space (sonst auf eigenem Rechner)
|
|
@@ -111,6 +112,8 @@ repo_id = "HuggingFaceH4/zephyr-7b-alpha" #das Modell ist echt gut!!! Vom MIT
|
|
| 111 |
#HuggingFace Model name--------------------------------
|
| 112 |
MODEL_NAME_HF = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
| 113 |
MODEL_NAME_OAI_ZEICHNEN = "dall-e-3"
|
|
|
|
|
|
|
| 114 |
|
| 115 |
################################################
|
| 116 |
#HF Hub Zugriff ermöglichen
|
|
@@ -386,8 +389,9 @@ def invoke (prompt, file, history, rag_option, model_option, openai_api_key, k=3
|
|
| 386 |
if (model_option == "OpenAI"):
|
| 387 |
#Anfrage an OpenAI ----------------------------
|
| 388 |
if (prompt.find('Bild zeichnen') != -1):
|
| 389 |
-
print("OpenAI zeichnen.......................")
|
| 390 |
-
llm = ChatOpenAI(model_name = MODEL_NAME_OAI_ZEICHNEN, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
|
|
|
|
| 391 |
else:
|
| 392 |
print("OpenAI normal.......................")
|
| 393 |
llm = ChatOpenAI(model_name = MODEL_NAME, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
|
|
@@ -416,8 +420,12 @@ def invoke (prompt, file, history, rag_option, model_option, openai_api_key, k=3
|
|
| 416 |
result = rag_chain(llm, history_text_und_prompt, db)
|
| 417 |
else:
|
| 418 |
print("LLM aufrufen ohne RAG: ...........")
|
| 419 |
-
|
| 420 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 421 |
|
| 422 |
except Exception as e:
|
| 423 |
raise gr.Error(e)
|
|
|
|
| 66 |
#Plattform Keys aus den Secrets holen zu diesem Space
|
| 67 |
HUGGINGFACEHUB_API_TOKEN = os.getenv("HF_ACCESS_READ")
|
| 68 |
OAI_API_KEY=os.getenv("OPENAI_API_KEY")
|
| 69 |
+
HEADERS = {"Authorization": f"Bearer {HUGGINGFACEHUB_API_TOKEN}"}
|
| 70 |
|
| 71 |
|
| 72 |
#Pfad, wo Docs/Bilder/Filme abgelegt werden können - lokal, also hier im HF Space (sonst auf eigenem Rechner)
|
|
|
|
| 112 |
#HuggingFace Model name--------------------------------
|
| 113 |
MODEL_NAME_HF = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
| 114 |
MODEL_NAME_OAI_ZEICHNEN = "dall-e-3"
|
| 115 |
+
#Alternativ zeichnen: Stabe Diffusion from HF:
|
| 116 |
+
API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-2-1"
|
| 117 |
|
| 118 |
################################################
|
| 119 |
#HF Hub Zugriff ermöglichen
|
|
|
|
| 389 |
if (model_option == "OpenAI"):
|
| 390 |
#Anfrage an OpenAI ----------------------------
|
| 391 |
if (prompt.find('Bild zeichnen') != -1):
|
| 392 |
+
#print("OpenAI zeichnen.......................")
|
| 393 |
+
#llm = ChatOpenAI(model_name = MODEL_NAME_OAI_ZEICHNEN, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
|
| 394 |
+
data = {"inputs": prompt}
|
| 395 |
else:
|
| 396 |
print("OpenAI normal.......................")
|
| 397 |
llm = ChatOpenAI(model_name = MODEL_NAME, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
|
|
|
|
| 420 |
result = rag_chain(llm, history_text_und_prompt, db)
|
| 421 |
else:
|
| 422 |
print("LLM aufrufen ohne RAG: ...........")
|
| 423 |
+
if (prompt.find('Bild zeichnen') != -1):
|
| 424 |
+
response = requests.post(API_URL, headers=HEADERS, json=data)
|
| 425 |
+
result = response.content
|
| 426 |
+
else:
|
| 427 |
+
result = llm_chain(llm, history_text_und_prompt)
|
| 428 |
+
|
| 429 |
|
| 430 |
except Exception as e:
|
| 431 |
raise gr.Error(e)
|