File size: 8,966 Bytes
ef37daa
65a6bd0
305d245
 
 
 
 
 
ef37daa
5ac6df3
65a6bd0
 
e49806f
65a6bd0
 
 
e49806f
65a6bd0
 
 
 
 
 
 
 
 
 
 
 
 
 
283f6a1
65a6bd0
283f6a1
65a6bd0
 
 
 
 
 
 
 
 
283f6a1
 
65a6bd0
 
 
 
283f6a1
65a6bd0
 
 
 
 
283f6a1
65a6bd0
 
ab73210
65a6bd0
 
 
 
 
 
 
 
 
ab73210
65a6bd0
 
 
 
283f6a1
65a6bd0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
283f6a1
65a6bd0
 
 
 
 
 
 
e49806f
65a6bd0
 
 
e49806f
65a6bd0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69bd0b3
65a6bd0
 
 
ab73210
65a6bd0
 
 
 
 
 
 
 
04a9af6
 
 
 
 
 
 
 
 
 
 
 
 
65a6bd0
04a9af6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bc34025
04a9af6
 
65a6bd0
04a9af6
 
 
 
 
 
 
 
 
 
 
 
 
 
1af0ee8
04a9af6
 
 
 
 
 
 
 
 
 
 
bc34025
04a9af6
 
 
 
 
 
 
 
 
 
bc34025
04a9af6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d598d13
 
 
04a9af6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
import gradio as gr
from huggingface_hub import InferenceClient
from deep_translator import GoogleTranslator
from indic_transliteration import sanscript
from indic_transliteration.detect import detect as detect_script
from indic_transliteration.sanscript import transliterate
import langdetect
import re

# Initialize clients
text_client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
image_client = InferenceClient("SG161222/RealVisXL_V3.0")

def detect_language_script(text: str) -> tuple[str, str]:
    """Detect language and script of the input text.
    Returns (language_code, script_type)"""
    try:
        # Use confidence threshold to avoid false detections
        lang_detect = langdetect.detect_langs(text)
        if lang_detect[0].prob > 0.8:
            # Only accept high confidence detections
            lang = lang_detect[0].lang
        else:
            lang = 'en'  # Default to English if unsure
        
        script = None
        try:
            script = detect_script(text)
        except:
            pass
        return lang, script
    except:
        return 'en', None

def is_romanized_indic(text: str) -> bool:
    """Check if text appears to be romanized Indic language.
    More strict pattern matching."""
    # Common Bengali romanized patterns with word boundaries
    bengali_patterns = [
        r'\b(ami|tumi|apni)\b',  # Common pronouns
        r'\b(ache|achen|thako|thaken)\b',  # Common verbs
        r'\b(kemon|bhalo|kharap)\b',  # Common adjectives
        r'\b(ki|kothay|keno)\b'  # Common question words
    ]
    
    # Require multiple matches to confirm it's actually Bengali
    text_lower = text.lower()
    matches = sum(1 for pattern in bengali_patterns if re.search(pattern, text_lower))
    return matches >= 2  # Require at least 2 matches to consider it Bengali

def translate_text(text: str, target_lang='en') -> tuple[str, str, bool]:
    """Translate text to target language, with more conservative translation logic."""
    # Skip translation for very short inputs or basic greetings
    if len(text.split()) <= 2 or text.lower() in ['hello', 'hi', 'hey']:
        return text, 'en', False
    
    original_lang, script = detect_language_script(text)
    is_transliterated = False
    
    # Only process if confident it's non-English
    if original_lang != 'en' and len(text.split()) > 2:
        try:
            translator = GoogleTranslator(source='auto', target=target_lang)
            translated = translator.translate(text)
            return translated, original_lang, is_transliterated
        except Exception as e:
            print(f"Translation error: {e}")
            return text, 'en', False
    
    # Check for romanized Indic text only if it's a longer input
    if original_lang == 'en' and len(text.split()) > 2 and is_romanized_indic(text):
        text = romanized_to_bengali(text)
        return translate_text(text, target_lang)  # Recursive call with Bengali script
    
    return text, 'en', False

def check_custom_responses(message: str) -> str:
    """Check for specific patterns and return custom responses."""
    message_lower = message.lower()
    custom_responses = {
        "what is ur name?": "xylaria",
        "what is your name?": "xylaria",
        "what's your name?": "xylaria",
        "whats your name": "xylaria",
        "how many 'r' is in strawberry?": "3",
        "who is your developer?": "sk md saad amin",
        "how many r is in strawberry": "3",
        "who is ur dev": "sk md saad amin",
        "who is ur developer": "sk md saad amin",
    }
    for pattern, response in custom_responses.items():
        if pattern in message_lower:
            return response
    return None

def is_image_request(message: str) -> bool:
    """Detect if the message is requesting image generation."""
    image_triggers = [
        "generate an image",
        "create an image",
        "draw",
        "make a picture",
        "generate a picture",
        "create a picture",
        "generate art",
        "create art",
        "make art",
        "visualize",
        "show me",
    ]
    message_lower = message.lower()
    return any(trigger in message_lower for trigger in image_triggers)

def generate_image(prompt: str) -> str:
    """Generate an image using DALLE-4K model."""
    try:
        response = image_client.text_to_image(
            prompt,
            parameters={
                "negative_prompt": "blurry, bad quality, nsfw",
                "num_inference_steps": 30,
                "guidance_scale": 7.5
            }
        )
        # Save the image and return the path or base64 string
        # Note: Implementation depends on how you want to handle the image output
        return response
    except Exception as e:
        print(f"Image generation error: {e}")
        return None
def romanized_to_bengali(text: str) -> str:
    """Convert romanized Bengali text to Bengali script."""
    bengali_mappings = {
        'ami': 'আমি',
        'tumi': 'তুমি',
        'apni': 'আপনি',
        'kemon': 'কেমন',
        'achen': 'আছেন',
        'acchen': 'আছেন',
        'bhalo': 'ভালো',
        'achi': 'আছি',
        'ki': 'কি',
        'kothay': 'কোথায়',
        'keno': 'কেন',
    }
    
    text_lower = text.lower()
    for roman, bengali in bengali_mappings.items():
        text_lower = re.sub(r'\b' + roman + r'\b', bengali, text_lower)
    
    if text_lower == text.lower():
        try:
            return transliterate(text, sanscript.ITRANS, sanscript.BENGALI)
        except:
            return text
            
    return text_lower

def respond(
    message,
    history: list[tuple[str, str]],
    system_message,
    max_tokens,
    temperature,
    top_p,
):
    # First check for custom responses
    custom_response = check_custom_responses(message)
    if custom_response:
        yield custom_response
        return

    # Check if this is an image generation request
    if is_image_request(message):
        try:
            image = generate_image(message)
            if image:
                yield f"Here's your generated image based on: {message}"
                # You'll need to implement the actual image display logic
                # depending on your Gradio interface requirements
                return
            else:
                yield "Sorry, I couldn't generate the image. Please try again."
                return
        except Exception as e:
            yield f"An error occurred while generating the image: {str(e)}"
            return

    # Handle translation with more conservative approach
    translated_msg, original_lang, was_transliterated = translate_text(message)

    # Prepare conversation history - only translate if necessary
    messages = [{"role": "system", "content": system_message}]
    for val in history:
        if val[0]:
            # Only translate longer messages
            if len(val[0].split()) > 2:
                trans_user_msg, _, _ = translate_text(val[0])
                messages.append({"role": "user", "content": trans_user_msg})
            else:
                messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})
    
    messages.append({"role": "user", "content": translated_msg})

    # Get response from model
    response = ""
    for message in text_client.chat_completion(
        messages,
        max_tokens=max_tokens,
        stream=True,
        temperature=temperature,
        top_p=top_p,
    ):
        token = message.choices[0].delta.content
        response += token

    # Only translate back if the original was definitely non-English
    if original_lang != 'en' and len(message.split()) > 2:
        try:
            translator = GoogleTranslator(source='en', target=original_lang)
            translated_response = translator.translate(response)
            yield translated_response
        except:
            yield response
    else:
        yield response

# Updated Gradio interface to handle images
demo = gr.ChatInterface(
    respond,
    additional_inputs=[
        gr.Textbox(
            value="You are a friendly Chatbot who always responds in English unless the user specifically uses another language.",
            label="System message"
        ),
        gr.Slider(
            minimum=1,
            maximum=2048,
            value=512,
            step=1,
            label="Max new tokens"
        ),
        gr.Slider(
            minimum=0.1,
            maximum=4.0,
            value=0.7,
            step=0.1,
            label="Temperature"
        ),
        gr.Slider(
            minimum=0.1,
            maximum=1.0,
            value=0.95,
            step=0.05,
            label="Top-p (nucleus sampling)"
        ),
    ]
)

if __name__ == "__main__":
    demo.launch(share=True)