import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM import torch # Load your fine-tuned model and tokenizer model = AutoModelForCausalLM.from_pretrained( "hackergeek/gemma-finetuned", torch_dtype=torch.float16, device_map="auto" ) tokenizer = AutoTokenizer.from_pretrained("hackergeek/gemma-finetuned") tokenizer.pad_token = tokenizer.eos_token def format_prompt(message, history): """Format the prompt with conversation history""" system_prompt = "You are a knowledgeable space expert assistant. Answer questions about astronomy, space exploration, and related topics in a clear and engaging manner." prompt = f"{system_prompt}\n" for user_msg, bot_msg in history: prompt += f"{user_msg}\n{bot_msg}\n" prompt += f"{message}\n" return prompt def respond(message, history): # Format the prompt with conversation history full_prompt = format_prompt(message, history) # Tokenize input inputs = tokenizer(full_prompt, return_tensors="pt", add_special_tokens=False).to(model.device) # Generate response outputs = model.generate( **inputs, max_new_tokens=1024, temperature=0.7, top_p=0.9, repetition_penalty=1.1, do_sample=True ) # Decode and extract only the new response response = tokenizer.decode(outputs[0][inputs.input_ids.shape[1]:], skip_special_tokens=True) return response # Custom CSS for space theme space_css = """ .gradio-container { background: linear-gradient(45deg, #000000, #1a1a2e); color: white; } .chatbot { background-color: rgba(0, 0, 0, 0.7) !important; border: 1px solid #4a4a4a !important; } """ # Create the interface with gr.Blocks(css=space_css, theme=gr.themes.Default(primary_hue="blue", secondary_hue="purple")) as demo: gr.Markdown("# 🚀 Space Explorer Chatbot 🌌") gr.Markdown("Ask me anything about space! Planets, stars, galaxies, or space exploration!") chatbot = gr.ChatInterface( respond, examples=[ "Explain black holes in simple terms", "What's the latest news about Mars exploration?", "How do stars form?", "Tell me about the James Webb Space Telescope" ], retry_btn=None, undo_btn=None, clear_btn="Clear History", ) chatbot.chatbot.height = 600 if __name__ == "__main__": demo.launch(share=True)