Spaces:
Running
Running
File size: 4,049 Bytes
ef37daa f147126 ef37daa f147126 ef37daa a387258 e4af908 a387258 464da3a ef37daa a387258 ef37daa a387258 ef37daa f147126 a387258 f147126 ef37daa 464da3a f147126 464da3a a387258 f147126 a387258 f147126 ef37daa f147126 ef37daa 464da3a a387258 ef37daa a387258 ef37daa a387258 ef37daa 464da3a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 |
import gradio as gr
from huggingface_hub import InferenceClient
from googletrans import Translator
from langdetect import detect
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
translator = Translator()
def detect_and_translate(text: str, target_lang='en') -> tuple[str, str]:
"""
Detect language and translate to target language if needed.
Returns tuple of (translated_text, detected_language)
"""
try:
detected_lang = detect(text)
if detected_lang != target_lang:
translation = translator.translate(text, dest=target_lang)
return translation.text, detected_lang
return text, detected_lang
except:
return text, 'en' # Fallback to original text if translation fails
def translate_to_original(text: str, original_lang: str) -> str:
"""Translate response back to original language if needed"""
if original_lang != 'en':
try:
translation = translator.translate(text, dest=original_lang)
return translation.text
except:
return text
return text
def check_custom_responses(message: str) -> str:
"""Check for specific patterns and return custom responses."""
message_lower = message.lower()
custom_responses = {
"what is ur name?": "xylaria",
"what is your name?": "xylaria",
"what's your name?": "xylaria",
"whats your name": "xylaria",
"how many 'r' is in strawberry?": "3",
"who is your developer?": "sk md saad amin",
"how many r is in strawberry": "3",
"who is ur dev": "sk md saad amin",
"who is ur developer": "sk md saad amin",
}
for pattern, response in custom_responses.items():
if pattern in message_lower:
return response
return None
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
# First check for custom responses
custom_response = check_custom_responses(message)
if custom_response:
yield custom_response
return
# Detect language and translate to English if needed
translated_msg, detected_lang = detect_and_translate(message)
# Prepare conversation history
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
# Translate user message from history if needed
trans_user_msg, _ = detect_and_translate(val[0])
messages.append({"role": "user", "content": trans_user_msg})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": translated_msg})
# Get response from model
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
# Translate accumulated response if original message wasn't in English
if detected_lang != 'en':
translated_response = translate_to_original(response, detected_lang)
yield translated_response
else:
yield response
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(
value="You are a friendly Chatbot.",
label="System message"
),
gr.Slider(
minimum=1,
maximum=2048,
value=512,
step=1,
label="Max new tokens"
),
gr.Slider(
minimum=0.1,
maximum=4.0,
value=0.7,
step=0.1,
label="Temperature"
),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)"
),
]
)
if __name__ == "__main__":
demo.launch(share=True) |