update demo
Browse files- configs.py +0 -2
- helper.py +5 -2
- main.py +3 -9
- model_main.py +3 -2
- static/script.js +4 -0
- static/style.css +8 -4
- utils.py +3 -4
configs.py
DELETED
|
@@ -1,2 +0,0 @@
|
|
| 1 |
-
OPENAI_KEY = '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'
|
| 2 |
-
DEBUG_PRINT = False
|
|
|
|
|
|
|
|
|
helper.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
from utils import get_embeddings, search_document_annoy, \
|
| 2 |
answer_with_gpt3_with_function_calls, transform_user_question, debug_print
|
| 3 |
|
|
|
|
| 4 |
def get_response_from_model(user_input, top_k=3, annoy_metric='dot', model_name="gpt-3.5-turbo", user_query_preprocess=False):
|
| 5 |
|
| 6 |
assert top_k > 0, 'k must be an integer greater than 0'
|
|
@@ -15,7 +16,9 @@ def get_response_from_model(user_input, top_k=3, annoy_metric='dot', model_name=
|
|
| 15 |
user_q_embedding = get_embeddings(chatgpt_question)
|
| 16 |
document = search_document_annoy(user_q_embedding, top_k=top_k, metric=annoy_metric)
|
| 17 |
reply = answer_with_gpt3_with_function_calls(document, user_input, model_name)
|
|
|
|
| 18 |
return reply
|
| 19 |
except Exception as e:
|
| 20 |
-
print(e)
|
| 21 |
-
return
|
|
|
|
|
|
| 1 |
from utils import get_embeddings, search_document_annoy, \
|
| 2 |
answer_with_gpt3_with_function_calls, transform_user_question, debug_print
|
| 3 |
|
| 4 |
+
# add input parameter: need api_key for demo
|
| 5 |
def get_response_from_model(user_input, top_k=3, annoy_metric='dot', model_name="gpt-3.5-turbo", user_query_preprocess=False):
|
| 6 |
|
| 7 |
assert top_k > 0, 'k must be an integer greater than 0'
|
|
|
|
| 16 |
user_q_embedding = get_embeddings(chatgpt_question)
|
| 17 |
document = search_document_annoy(user_q_embedding, top_k=top_k, metric=annoy_metric)
|
| 18 |
reply = answer_with_gpt3_with_function_calls(document, user_input, model_name)
|
| 19 |
+
print(f"returning reply: {reply}")
|
| 20 |
return reply
|
| 21 |
except Exception as e:
|
| 22 |
+
print(f"returning error: {e}")
|
| 23 |
+
return e._message
|
| 24 |
+
# return "Error when trying to get embedding for the user query. Please try with a shorter question."
|
main.py
CHANGED
|
@@ -4,6 +4,7 @@ from typing import List
|
|
| 4 |
from fastapi.responses import HTMLResponse
|
| 5 |
from fastapi.staticfiles import StaticFiles
|
| 6 |
from pydantic import BaseModel
|
|
|
|
| 7 |
|
| 8 |
from helper import get_response_from_model
|
| 9 |
|
|
@@ -31,15 +32,8 @@ def chat(input_data: InputData):
|
|
| 31 |
user_input = input_data.user_input
|
| 32 |
api_key = input_data.api_key
|
| 33 |
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
# if not validate_api_key(api_key):
|
| 37 |
-
# raise HTTPException(status_code=400, detail="Invalid API key")
|
| 38 |
-
|
| 39 |
-
# Tokenize the user input and get model's response
|
| 40 |
-
# input_ids = tokenizer.encode(user_input, return_tensors="pt")
|
| 41 |
-
# output = model.generate(input_ids)
|
| 42 |
-
# response = tokenizer.decode(output[0], skip_special_tokens=True)
|
| 43 |
response = get_response_from_model(user_input)
|
| 44 |
|
| 45 |
return {"response": response}
|
|
|
|
| 4 |
from fastapi.responses import HTMLResponse
|
| 5 |
from fastapi.staticfiles import StaticFiles
|
| 6 |
from pydantic import BaseModel
|
| 7 |
+
import openai
|
| 8 |
|
| 9 |
from helper import get_response_from_model
|
| 10 |
|
|
|
|
| 32 |
user_input = input_data.user_input
|
| 33 |
api_key = input_data.api_key
|
| 34 |
|
| 35 |
+
openai.api_key = api_key
|
| 36 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 37 |
response = get_response_from_model(user_input)
|
| 38 |
|
| 39 |
return {"response": response}
|
model_main.py
CHANGED
|
@@ -33,8 +33,9 @@ def main(args):
|
|
| 33 |
reply = answer_with_gpt3_with_function_calls(document, user_input, args.model)
|
| 34 |
print(reply)
|
| 35 |
except Exception as e:
|
| 36 |
-
print(e)
|
| 37 |
-
|
|
|
|
| 38 |
|
| 39 |
|
| 40 |
if __name__ == "__main__":
|
|
|
|
| 33 |
reply = answer_with_gpt3_with_function_calls(document, user_input, args.model)
|
| 34 |
print(reply)
|
| 35 |
except Exception as e:
|
| 36 |
+
print(f"error: {e}")
|
| 37 |
+
return e
|
| 38 |
+
# print("Error when trying to get embedding for the user query. Please try with a shorter question.")
|
| 39 |
|
| 40 |
|
| 41 |
if __name__ == "__main__":
|
static/script.js
CHANGED
|
@@ -2,6 +2,7 @@ const sendBtn = document.getElementById('send-btn');
|
|
| 2 |
const userInput = document.getElementById('user-input');
|
| 3 |
const apiKeyInput = document.getElementById('api-key-input');
|
| 4 |
const chatBox = document.getElementById('chat-box');
|
|
|
|
| 5 |
const questionButtons = document.querySelectorAll('.question_btn');
|
| 6 |
questionButtons.forEach(btn => {
|
| 7 |
console.log(btn)
|
|
@@ -14,6 +15,7 @@ questionButtons.forEach(btn => {
|
|
| 14 |
});
|
| 15 |
|
| 16 |
sendBtn.addEventListener('click', () => {
|
|
|
|
| 17 |
const message = userInput.value.trim();
|
| 18 |
const apiKey = apiKeyInput.value.trim();
|
| 19 |
console.log("message: ", message)
|
|
@@ -39,11 +41,13 @@ sendBtn.addEventListener('click', () => {
|
|
| 39 |
.then(response => {
|
| 40 |
console.log("response: ", response)
|
| 41 |
if (!response.ok) {
|
|
|
|
| 42 |
throw new Error('Network response was not ok');
|
| 43 |
}
|
| 44 |
return response.json();
|
| 45 |
})
|
| 46 |
.then(data => {
|
|
|
|
| 47 |
console.log("reply: ", data.response)
|
| 48 |
// Display chatbot's response
|
| 49 |
const botMessageDiv = document.createElement('div');
|
|
|
|
| 2 |
const userInput = document.getElementById('user-input');
|
| 3 |
const apiKeyInput = document.getElementById('api-key-input');
|
| 4 |
const chatBox = document.getElementById('chat-box');
|
| 5 |
+
|
| 6 |
const questionButtons = document.querySelectorAll('.question_btn');
|
| 7 |
questionButtons.forEach(btn => {
|
| 8 |
console.log(btn)
|
|
|
|
| 15 |
});
|
| 16 |
|
| 17 |
sendBtn.addEventListener('click', () => {
|
| 18 |
+
console.log("send clicked")
|
| 19 |
const message = userInput.value.trim();
|
| 20 |
const apiKey = apiKeyInput.value.trim();
|
| 21 |
console.log("message: ", message)
|
|
|
|
| 41 |
.then(response => {
|
| 42 |
console.log("response: ", response)
|
| 43 |
if (!response.ok) {
|
| 44 |
+
print(response)
|
| 45 |
throw new Error('Network response was not ok');
|
| 46 |
}
|
| 47 |
return response.json();
|
| 48 |
})
|
| 49 |
.then(data => {
|
| 50 |
+
console.log("data: ", data)
|
| 51 |
console.log("reply: ", data.response)
|
| 52 |
// Display chatbot's response
|
| 53 |
const botMessageDiv = document.createElement('div');
|
static/style.css
CHANGED
|
@@ -26,11 +26,12 @@ body {
|
|
| 26 |
}
|
| 27 |
|
| 28 |
.demo {
|
| 29 |
-
display: flex;
|
| 30 |
justify-content: flex-end;
|
| 31 |
-
align-items: center;
|
| 32 |
-
width:
|
| 33 |
-
flex-direction: column;
|
|
|
|
| 34 |
}
|
| 35 |
|
| 36 |
button {
|
|
@@ -40,6 +41,9 @@ button {
|
|
| 40 |
padding: 10px 15px;
|
| 41 |
border-radius: 5px;
|
| 42 |
cursor: pointer;
|
|
|
|
|
|
|
|
|
|
| 43 |
}
|
| 44 |
|
| 45 |
.user-message, .bot-message {
|
|
|
|
| 26 |
}
|
| 27 |
|
| 28 |
.demo {
|
| 29 |
+
/* display: flex;
|
| 30 |
justify-content: flex-end;
|
| 31 |
+
align-items: center; */
|
| 32 |
+
width: 30%;
|
| 33 |
+
/* flex-direction: column; */
|
| 34 |
+
margin-right: auto;
|
| 35 |
}
|
| 36 |
|
| 37 |
button {
|
|
|
|
| 41 |
padding: 10px 15px;
|
| 42 |
border-radius: 5px;
|
| 43 |
cursor: pointer;
|
| 44 |
+
width: 100%;
|
| 45 |
+
margin: 10px;
|
| 46 |
+
|
| 47 |
}
|
| 48 |
|
| 49 |
.user-message, .bot-message {
|
utils.py
CHANGED
|
@@ -11,10 +11,9 @@ from tqdm import tqdm
|
|
| 11 |
from annoy import AnnoyIndex
|
| 12 |
|
| 13 |
from openai_function_utils.openai_function_interface import OPENAI_AVAILABLE_FUNCTIONS, OPENAI_FUNCTIONS_DEFINITIONS
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
openai.
|
| 17 |
-
openai.organization = 'org-dsEkob5KeBBq3lbBLhnCXcJt'
|
| 18 |
|
| 19 |
|
| 20 |
def get_embeddings(input):
|
|
|
|
| 11 |
from annoy import AnnoyIndex
|
| 12 |
|
| 13 |
from openai_function_utils.openai_function_interface import OPENAI_AVAILABLE_FUNCTIONS, OPENAI_FUNCTIONS_DEFINITIONS
|
| 14 |
+
DEBUG_PRINT = False
|
| 15 |
+
# openai.api_key = OPENAI_KEY
|
| 16 |
+
# openai.organization = 'org-dsEkob5KeBBq3lbBLhnCXcJt'
|
|
|
|
| 17 |
|
| 18 |
|
| 19 |
def get_embeddings(input):
|