Spaces:
Running
Running
File size: 2,043 Bytes
e3782c8 eac607d e3782c8 4474f90 e3782c8 4474f90 e3782c8 c44b042 4dcc6fb c44b042 e3782c8 4474f90 6286956 4dcc6fb eac607d 4474f90 e3782c8 4474f90 e3782c8 4474f90 e3782c8 3a844a3 e3782c8 4474f90 e3782c8 c44b042 e3782c8 158f8e3 e3782c8 4474f90 eac607d e3782c8 eac607d 4474f90 c44b042 eac607d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 |
from huggingface_hub import InferenceClient
from resume import data
import markdowm as md
import gradio as gr
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
# Chatbot response function with integrated system message
def respond(
message,
history: list[tuple[str, str]],
max_tokens=1024,
temperature=0.7,
top_p=0.95,
):
# System message defining assistant behavior
system_message = {
"role": "system",
"content": f"Chat as SARATH to the user question. SARATH is a professional fresher seeking a job and pursuing Btech-4-1."
f"Here is about SARATH:```{data}```. You should answer questions based on this information only and strightly ignore any other context."
f"If the question is irreverent to the contex=[about sarath], polity reply the user"
}
messages = [system_message]
# Adding conversation history
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
# Adding the current user input
messages.append({"role": "user", "content": message})
response = ''
# Streaming the response from the API
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
# Gradio interface with additional sliders for control
with gr.Blocks(theme=gr.themes.Ocean(font=[gr.themes.GoogleFont("Roboto Mono")])) as main:
gr.Markdown(md.title)
with gr.Tabs():
with gr.TabItem("Intro"):
gr.Markdown(md.description)
gr.Markdown("# Resume:")
gr.Markdown(data)
with gr.TabItem("My2.0"):
gr.ChatInterface(respond)
if __name__ == "__main__":
main.launch(share=True)
|