# Import the Gradio library for creating the web interface import gradio as gr # Import the InferenceClient from huggingface_hub to interact with the language model from huggingface_hub import InferenceClient # --- Configuration Constants --- FIXED_MAX_TOKENS = 1000 # --- Initialize the InferenceClient --- API_BASE_URL = "https://vulture-awake-probably.ngrok-free.app/v1/chat/completions" try: client = InferenceClient(base_url=API_BASE_URL) print(f"InferenceClient initialized with base_url: {API_BASE_URL}") except Exception as e: print(f"Error initializing InferenceClient with base_url '{API_BASE_URL}': {e}") raise RuntimeError( "Could not initialize InferenceClient. " f"Please check the API base URL ('{API_BASE_URL}') and ensure the server is accessible. " f"Error: {e}" ) # --- Core Chatbot Logic --- def respond(message, history): messages = [] for user_message, ai_message in history: if user_message: messages.append({"role": "user", "content": user_message}) if ai_message: messages.append({"role": "assistant", "content": ai_message}) messages.append({"role": "user", "content": message}) response_text = "" try: stream = client.chat_completion( messages=messages, max_tokens=FIXED_MAX_TOKENS, stream=True, ) for chunk in stream: if chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content is not None: token = chunk.choices[0].delta.content response_text += token yield response_text except Exception as e: error_message = f"An error occurred during model inference: {e}" print(error_message) yield error_message # --- Gradio Interface Definition --- # URL for the header image header_image_path = "https://cdn-uploads.huggingface.co/production/uploads/6540a02d1389943fef4d2640/j61iZTDaK9g0UW3aWGwWi.gif" # Ko-fi widget script (original) kofi_script_original = """ """ # Ko-fi button HTML kofi_button_html = """
Buy Me a Coffee at ko-fi.com
""" # HTML, CSS, and JavaScript for Matrix Rain matrix_rain_html_script = """ """ # Combine Ko-fi script with Matrix Rain script for the 'head' combined_head_script = kofi_script_original + matrix_rain_html_script # Create a Gradio Blocks layout # Use a dark theme and add the combined scripts to the head # theme = gr.themes.Soft() # Original theme theme = gr.themes.Base(primary_hue=gr.themes.colors.green, neutral_hue=gr.themes.colors.slate).dark() # Example dark theme with gr.Blocks(theme=theme, head=combined_head_script) as demo: # Display an image at the top gr.Image( value=header_image_path, label="Chatbot Header", show_label=False, interactive=False, height=150, elem_id="chatbot-logo" ) # Create the chat interface component gr.ChatInterface( fn=respond, chatbot=gr.Chatbot( height=650, # elem_id="chatbot_messages_container" # You can add an ID for more specific CSS ), # title="XORTRON AI", # You can set a title # description="Engage with the digital rain." # And a description ) # Add the Ko-fi button at the bottom gr.HTML(kofi_button_html) # --- Application Entry Point --- if __name__ == "__main__": try: demo.launch(show_api=False, share=False) except NameError as ne: print(f"Gradio demo could not be launched. 'client' might not have been initialized: {ne}") except RuntimeError as re: print(f"Gradio demo could not be launched due to an error during client initialization: {re}") except Exception as e: print(f"An unexpected error occurred when trying to launch Gradio demo: {e}")