Spaces:
Sleeping
Sleeping
File size: 2,266 Bytes
e763e8a 0d179e3 11a9727 e763e8a 55c7d01 4e308cb e763e8a 0d179e3 8f7a5e4 5218a30 8f7a5e4 3b344a7 d14928c fed1aac d41ae8b fed1aac bd92b29 fed1aac 497a011 a944901 9538882 a148c7b 4e308cb 4e8b18f b279c78 4e8b18f 50d6f71 4e8b18f ae3693c 841b907 4e8b18f b6c9ea3 0d179e3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
from omegaconf import OmegaConf
from query import VectaraQuery
import os
import gradio as gr
def isTrue(x) -> bool:
if isinstance(x, bool):
return x
return x.strip().lower() == 'true'
corpus_ids = str(os.environ['corpus_ids']).split(',')
cfg = OmegaConf.create({
'customer_id': str(os.environ['customer_id']),
'corpus_ids': corpus_ids,
'api_key': str(os.environ['api_key']),
'title': os.environ['title'],
'description': os.environ['description'],
'source_data_desc': os.environ['source_data_desc'],
'streaming': isTrue(os.environ.get('streaming', False)),
'prompt_name': os.environ.get('prompt_name', None),
'examples': os.environ.get('examples', None)
})
import logging
logging.basicConfig(level=logging.DEBUG)
logging.debug(f'examples: {cfg.examples} of type: {type(cfg.examples)}')
vq = VectaraQuery(cfg.api_key, cfg.customer_id, cfg.corpus_ids, cfg.prompt_name)
def respond(message, history):
if cfg.streaming:
# Call stream response and stream output
stream = vq.submit_query_streaming(message)
outputs = ""
for output in stream:
outputs += output
yield outputs
else:
# Call non-stream response and return message output
response = vq.submit_query(message)
yield response
cfg.title = f'''<center> <img src="https://github.com/david-oplatka/chatbot-streamlit/blob/main/Vectara-logo.png?raw=true" width="200px" height="40px">
<h1>{cfg.title}</h1> </center>
'''
cfg.description = f'''<center> <h2>{cfg.description}</h2>
<br>
This demo uses Retrieval Augmented Generation to ask questions about {cfg.source_data_desc}</center>
'''
if cfg.examples:
app_examples = [example.strip() for example in cfg.examples.split(",")]
else:
app_examples = None
logging.debug(f'Examples before function call: {app_examples}; type: {type(app_examples)}')
demo = gr.ChatInterface(respond, title = cfg.title, description = cfg.description,
chatbot = gr.Chatbot(value = [[None, "How may I help you?"]], scale=3), examples = app_examples)
if __name__ == "__main__":
demo.launch() |