Spaces:
Running
Running
| import gradio as gr | |
| import requests | |
| import json | |
| import os | |
| # Retrieve the OpenRouter API Key from the Space secrets | |
| API_KEY = os.getenv("OpenRouter_API_KEY") | |
| # Define available models for selection | |
| MODEL_OPTIONS = [ | |
| "openai/gpt-4o-mini-2024-07-18", | |
| "meta-llama/llama-3.1-405b-instruct", | |
| "nvidia/llama-3.1-nemotron-70b-instruct", | |
| "qwen/qwen-2.5-7b-instruct", | |
| "mistralai/mistral-large-2411", | |
| "microsoft/phi-3-medium-128k-instruct", | |
| "meta-llama/llama-3.1-405b-instruct:free", | |
| "nousresearch/hermes-3-llama-3.1-405b:free", | |
| "mistralai/mistral-7b-instruct:free", | |
| "microsoft/phi-3-medium-128k-instruct:free", | |
| "liquid/lfm-40b:free" | |
| ] | |
| # History storage | |
| history = [] | |
| def generate_comparisons_with_history(input_text, selected_models, history_state): | |
| global history | |
| results = {} | |
| for model in selected_models: | |
| response = requests.post( | |
| url="https://openrouter.ai/api/v1/chat/completions", | |
| headers={ | |
| "Authorization": f"Bearer {API_KEY}", | |
| "Content-Type": "application/json" | |
| }, | |
| data=json.dumps({ | |
| "model": model, # Use the current model | |
| "messages": [{"role": "user", "content": input_text}], | |
| "top_p": 1, | |
| "temperature": 1, | |
| "frequency_penalty": 0, | |
| "presence_penalty": 0, | |
| "repetition_penalty": 1, | |
| "top_k": 0, | |
| }) | |
| ) | |
| # Parse the response | |
| if response.status_code == 200: | |
| try: | |
| response_json = response.json() | |
| results[model] = response_json.get("choices", [{}])[0].get("message", {}).get("content", "No content returned.") | |
| except json.JSONDecodeError: | |
| results[model] = "Error: Unable to parse response." | |
| else: | |
| results[model] = f"Error: {response.status_code}, {response.text}" | |
| # Add input and results to history | |
| history_entry = { | |
| "input": input_text, | |
| "selected_models": selected_models, | |
| "outputs": results | |
| } | |
| history.append(history_entry) | |
| # Update the history state | |
| history_state = history | |
| return results, history_state | |
| def clear_history(): | |
| global history | |
| history = [] | |
| return history | |
| # Create Gradio interface with multiple model selection and history | |
| with gr.Blocks() as demo: | |
| input_text = gr.Textbox(lines=2, label="Input Text", placeholder="Enter your query here") | |
| selected_models = gr.CheckboxGroup(choices=MODEL_OPTIONS, label="Select Models", value=[MODEL_OPTIONS[0]]) | |
| # Define output components with scrollable containers | |
| output_comparisons = gr.JSON(label="Model Comparisons", elem_id="output-comparisons") | |
| output_history = gr.JSON(label="History", elem_id="output-history") | |
| clear_history_button = gr.Button("Clear History") | |
| # Add a button to clear the history | |
| clear_history_button.click(clear_history, outputs=output_history) | |
| # Define the button to trigger generating comparisons | |
| generate_button = gr.Button("Generate Comparisons") | |
| generate_button.click(generate_comparisons_with_history, inputs=[input_text, selected_models, gr.State()], outputs=[output_comparisons, output_history]) | |
| # Insert custom CSS using gr.HTML() | |
| gr.HTML(""" | |
| <style> | |
| #output-comparisons { | |
| height: 300px; | |
| overflow: auto; | |
| border: 1px solid #ddd; | |
| padding: 10px; | |
| } | |
| #output-history { | |
| height: 300px; | |
| overflow: auto; | |
| border: 1px solid #ddd; | |
| padding: 10px; | |
| } | |
| </style> | |
| """) | |
| demo.launch() | |