Spaces:
Running
Running
File size: 2,985 Bytes
ad773e5 b0f6b9b edb2d41 ad773e5 b0f6b9b ad773e5 edb2d41 fec8e6e edb2d41 ad773e5 b0f6b9b ad773e5 b0f6b9b ad773e5 b0f6b9b edb2d41 b0f6b9b edb2d41 b0f6b9b edb2d41 b0f6b9b edb2d41 b0f6b9b ad0387f b0f6b9b edb2d41 b0f6b9b edb2d41 ad773e5 edb2d41 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
import gradio as gr
from openai import OpenAI
import os
from fpdf import FPDF
from docx import Document
css = '''
.gradio-container{max-width: 1000px !important}
h1{text-align:center}
footer {
visibility: hidden
}
'''
ACCESS_TOKEN = os.getenv("HF_TOKEN")
client = OpenAI(
base_url="https://api-inference.huggingface.co/v1/",
api_key=ACCESS_TOKEN,
)
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-8B-Instruct",
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
messages=messages,
):
token = message.choices[0].delta.content
response += token
yield response
def save_to_file(history, file_format):
if file_format == "PDF":
pdf = FPDF()
pdf.add_page()
pdf.set_auto_page_break(auto=True, margin=15)
pdf.set_font("Arial", size=12)
for user_message, assistant_message in history:
pdf.multi_cell(0, 10, f"User: {user_message}")
pdf.multi_cell(0, 10, f"Assistant: {assistant_message}")
file_name = "chat_history.pdf"
pdf.output(file_name)
elif file_format == "DOCX":
doc = Document()
for user_message, assistant_message in history:
doc.add_paragraph(f"User: {user_message}")
doc.add_paragraph(f"Assistant: {assistant_message}")
file_name = "chat_history.docx"
doc.save(file_name)
elif file_format == "TXT":
file_name = "chat_history.txt"
with open(file_name, "w") as file:
for user_message, assistant_message in history:
file.write(f"User: {user_message}\n")
file.write(f"Assistant: {assistant_message}\n")
return file_name
def save_conversation(history, file_format):
file_name = save_to_file(history, file_format)
return file_name
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(minimum=0.1, maximum 1.0, value=0.95, step=0.05, label="Top-P"),
gr.Radio(["PDF", "DOCX", "TXT"], label="Save As"),
],
button_fn=save_conversation,
css=css,
theme="allenai/gradio-theme",
)
if __name__ == "__main__":
demo.launch() |