import gradio as gr from huggingface_hub import InferenceClient from googletrans import Translator from langdetect import detect client = InferenceClient("HuggingFaceH4/zephyr-7b-beta") translator = Translator() def detect_and_translate(text: str, target_lang='en') -> tuple[str, str]: """ Detect language and translate to target language if needed. Returns tuple of (translated_text, detected_language) """ try: detected_lang = detect(text) if detected_lang != target_lang: translation = translator.translate(text, dest=target_lang) return translation.text, detected_lang return text, detected_lang except: return text, 'en' # Fallback to original text if translation fails def translate_to_original(text: str, original_lang: str) -> str: """Translate response back to original language if needed""" if original_lang != 'en': try: translation = translator.translate(text, dest=original_lang) return translation.text except: return text return text def check_custom_responses(message: str) -> str: """Check for specific patterns and return custom responses.""" message_lower = message.lower() custom_responses = { "what is ur name?": "xylaria", "what is your name?": "xylaria", "what's your name?": "xylaria", "whats your name": "xylaria", "how many 'r' is in strawberry?": "3", "who is your developer?": "sk md saad amin", "how many r is in strawberry": "3", "who is ur dev": "sk md saad amin", "who is ur developer": "sk md saad amin", } for pattern, response in custom_responses.items(): if pattern in message_lower: return response return None def respond( message, history: list[tuple[str, str]], system_message, max_tokens, temperature, top_p, ): # First check for custom responses custom_response = check_custom_responses(message) if custom_response: yield custom_response return # Detect language and translate to English if needed translated_msg, detected_lang = detect_and_translate(message) # Prepare conversation history messages = [{"role": "system", "content": system_message}] for val in history: if val[0]: # Translate user message from history if needed trans_user_msg, _ = detect_and_translate(val[0]) messages.append({"role": "user", "content": trans_user_msg}) if val[1]: messages.append({"role": "assistant", "content": val[1]}) messages.append({"role": "user", "content": translated_msg}) # Get response from model response = "" for message in client.chat_completion( messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p, ): token = message.choices[0].delta.content response += token # Translate accumulated response if original message wasn't in English if detected_lang != 'en': translated_response = translate_to_original(response, detected_lang) yield translated_response else: yield response demo = gr.ChatInterface( respond, additional_inputs=[ gr.Textbox( value="You are a friendly Chatbot.", label="System message" ), gr.Slider( minimum=1, maximum=2048, value=512, step=1, label="Max new tokens" ), gr.Slider( minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature" ), gr.Slider( minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)" ), ] ) if __name__ == "__main__": demo.launch(share=True)