|
import gradio as gr |
|
import requests |
|
import json |
|
import os |
|
|
|
|
|
API_KEY = os.getenv("OpenRouter_API_KEY") |
|
|
|
|
|
MODEL_OPTIONS = [ |
|
"openai/gpt-4o-mini-2024-07-18", |
|
"openai/gpt-4", |
|
"anthropic/claude-2", |
|
"cohere/command-xlarge-nightly" |
|
] |
|
|
|
def generate_comparisons(input_text, selected_models): |
|
results = {} |
|
for model in selected_models: |
|
response = requests.post( |
|
url="https://openrouter.ai/api/v1/chat/completions", |
|
headers={ |
|
"Authorization": f"Bearer {API_KEY}", |
|
"Content-Type": "application/json" |
|
}, |
|
data=json.dumps({ |
|
"model": model, |
|
"messages": [{"role": "user", "content": input_text}], |
|
"top_p": 1, |
|
"temperature": 1, |
|
"frequency_penalty": 0, |
|
"presence_penalty": 0, |
|
"repetition_penalty": 1, |
|
"top_k": 0, |
|
}) |
|
) |
|
|
|
|
|
if response.status_code == 200: |
|
try: |
|
response_json = response.json() |
|
results[model] = response_json.get("choices", [{}])[0].get("message", {}).get("content", "No content returned.") |
|
except json.JSONDecodeError: |
|
results[model] = "Error: Unable to parse response." |
|
else: |
|
results[model] = f"Error: {response.status_code}, {response.text}" |
|
|
|
return results |
|
|
|
|
|
iface = gr.Interface( |
|
fn=generate_comparisons, |
|
inputs=[ |
|
gr.Textbox(lines=2, label="Input Text", placeholder="Enter your query here"), |
|
gr.CheckboxGroup(choices=MODEL_OPTIONS, label="Select Models", value=[MODEL_OPTIONS[0]]) |
|
], |
|
outputs=gr.JSON(label="Model Comparisons"), |
|
title="Compare Outputs from Multiple Models" |
|
) |
|
|
|
iface.launch() |
|
|