Spaces:
Sleeping
Sleeping
File size: 3,795 Bytes
ad773e5 d9bd1ee ad773e5 fec8e6e ad773e5 fec8e6e ad773e5 fec8e6e ad773e5 fec8e6e ad773e5 fec8e6e d9bd1ee fec8e6e ad773e5 d9bd1ee ad773e5 d9bd1ee ad773e5 d9bd1ee ad773e5 d9bd1ee ad773e5 fec8e6e ad773e5 a48362a fec8e6e a48362a fec8e6e a48362a fec8e6e a48362a d9bd1ee a48362a fec8e6e a48362a d9bd1ee a48362a d9bd1ee fec8e6e a48362a ad773e5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 |
import gradio as gr
from openai import OpenAI
import os
from fpdf import FPDF # For PDF conversion
from docx import Document # For DOCX conversion
import tempfile
css = '''
.gradio-container{max-width: 1000px !important}
h1{text-align:center}
footer {
visibility: hidden
}
'''
ACCESS_TOKEN = os.getenv("HF_TOKEN")
client = OpenAI(
base_url="https://api-inference.huggingface.co/v1/",
api_key=ACCESS_TOKEN,
)
def respond(
message,
history,
system_message,
max_tokens,
temperature,
top_p,
):
if history is None:
history = []
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat.completions.create(
model="meta-llama/Meta-Llama-3.1-8B-Instruct",
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
messages=messages,
):
token = message.choices[0].delta.content
response += token
yield response
history.append((message, response))
return history
def save_as_file(history, conversion_type):
input_text = "\n".join([f"User: {h[0]}" for h in history if h[0]])
output_text = "\n".join([f"Assistant: {h[1]}" for h in history if h[1]])
file_name = None
if conversion_type == "PDF":
pdf = FPDF()
pdf.add_page()
pdf.set_font("Arial", size=12)
pdf.multi_cell(0, 10, f"User Query: {input_text}\n\nResponse: {output_text}")
file_name = tempfile.NamedTemporaryFile(delete=False, suffix=".pdf").name
pdf.output(file_name)
elif conversion_type == "DOCX":
doc = Document()
doc.add_heading('Conversation', 0)
doc.add_paragraph(f"User Query: {input_text}\n\nResponse: {output_text}")
file_name = tempfile.NamedTemporaryFile(delete=False, suffix=".docx").name
doc.save(file_name)
elif conversion_type == "TXT":
file_name = tempfile.NamedTemporaryFile(delete=False, suffix=".txt").name
with open(file_name, "w") as f:
f.write(f"User Query: {input_text}\n\nResponse: {output_text}")
return file_name
def convert_and_download(history, conversion_type):
if not history:
return None
file_path = save_as_file(history, conversion_type)
return file_path
demo = gr.Blocks(css=css)
with demo:
history_state = gr.State([]) # Initialize an empty list to store the conversation history
with gr.Row():
system_message = gr.Textbox(value="", label="System message")
max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
top_p = gr.Slider(minimum=0.1, maximum 1.0, value=0.95, step=0.05, label="Top-P")
chatbot = gr.ChatInterface(
fn=respond,
additional_inputs=[system_message, max_tokens, temperature, top_p],
state=history_state, # Pass the history state to the ChatInterface
)
with gr.Row():
conversion_type = gr.Dropdown(choices=["PDF", "DOCX", "TXT"], value="PDF", label="Conversion Type")
download_button = gr.Button("Convert and Download")
file_output = gr.File()
download_button.click(
convert_and_download,
inputs=[history_state, conversion_type], # Pass the history state to the conversion function
outputs=file_output
)
if __name__ == "__main__":
demo.launch() |