Update app.py
Browse files
app.py
CHANGED
|
@@ -99,17 +99,18 @@ def predict(input, history=[]):
|
|
| 99 |
|
| 100 |
def sqlquery(input, conversation_history):
|
| 101 |
|
| 102 |
-
input_text = " ".join(conversation_history) + " " + input
|
| 103 |
-
sql_encoding = sql_tokenizer(table=table, query=
|
| 104 |
sql_outputs = sql_model.generate(**sql_encoding)
|
| 105 |
sql_response = sql_tokenizer.batch_decode(sql_outputs, skip_special_tokens=True)
|
| 106 |
|
| 107 |
-
global conversation_history
|
| 108 |
|
| 109 |
# Maintain the conversation history
|
| 110 |
-
conversation_history.append(
|
|
|
|
| 111 |
|
| 112 |
-
return
|
| 113 |
|
| 114 |
|
| 115 |
chat_interface = gr.Interface(
|
|
|
|
| 99 |
|
| 100 |
def sqlquery(input, conversation_history):
|
| 101 |
|
| 102 |
+
#input_text = " ".join(conversation_history) + " " + input
|
| 103 |
+
sql_encoding = sql_tokenizer(table=table, query=input + sql_tokenizer.eos_token, return_tensors="pt")
|
| 104 |
sql_outputs = sql_model.generate(**sql_encoding)
|
| 105 |
sql_response = sql_tokenizer.batch_decode(sql_outputs, skip_special_tokens=True)
|
| 106 |
|
| 107 |
+
#global conversation_history
|
| 108 |
|
| 109 |
# Maintain the conversation history
|
| 110 |
+
conversation_history.append("User: " + input)
|
| 111 |
+
conversation_history.append("Bot: " + sql_response)
|
| 112 |
|
| 113 |
+
return conversation_history
|
| 114 |
|
| 115 |
|
| 116 |
chat_interface = gr.Interface(
|