Spaces:
Build error
Build error
File size: 6,147 Bytes
7bfaddc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 |
import modal
import gradio as gr
import os
import requests
import json
from fastapi import Request
from dotenv import load_dotenv
load_dotenv()
# Create Modal app
app = modal.App("llm-conversation-transfer")
# Define image with dependencies
image = modal.Image.debian_slim().pip_install([
"gradio==4.44.1",
"requests",
"fastapi"
])
# Function to check API keys
@app.function(
image=image,
secrets=[
modal.Secret.from_name("anthropic-api-key"),
modal.Secret.from_name("mistral-api-key"),
modal.Secret.from_name("hyperbolic-api-key")
]
)
def check_api_keys():
status = {
'Anthropic': 'β
' if os.getenv('ANTHROPIC_API_KEY') else 'β',
'Mistral': 'β
' if os.getenv('MISTRAL_API_KEY') else 'β',
'Hyperbolic': 'β
' if os.getenv('HYPERBOLIC_API_KEY') else 'β'
}
return json.dumps(status)
# Function to handle conversation transfer
@app.function(
image=image,
secrets=[
modal.Secret.from_name("anthropic-api-key"),
modal.Secret.from_name("mistral-api-key"),
modal.Secret.from_name("hyperbolic-api-key")
]
)
def transfer_conversation_backend(conversation_text: str, source_provider: str, target_provider: str, source_model: str, target_model: str):
try:
if not conversation_text.strip():
return "β Error: Please provide conversation text", ""
messages = parse_conversation_text(conversation_text)
response = get_ai_response(messages, target_provider, target_model)
return "β
Transfer successful!", response
except Exception as e:
return f"β Error: {str(e)}", ""
def parse_conversation_text(text: str) -> list:
lines = text.strip().split('\n')
messages = []
current_message = {"role": "", "content": ""}
for line in lines:
line = line.strip()
if not line:
continue
if line.startswith(('Human:', 'User:')):
if current_message["content"]:
messages.append(current_message.copy())
current_message = {"role": "user", "content": line.split(':', 1)[1].strip()}
elif line.startswith(('Assistant:', 'AI:')):
if current_message["content"]:
messages.append(current_message.copy())
current_message = {"role": "assistant", "content": line.split(':', 1)[1].strip()}
else:
current_message["content"] += " " + line
if current_message["content"]:
messages.append(current_message)
return messages
def get_ai_response(messages: list, provider: str, model: str) -> str:
try:
if provider == "Mistral":
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {os.getenv('MISTRAL_API_KEY')}"
}
data = {
"model": model,
"messages": messages,
"max_tokens": 1000
}
response = requests.post("https://api.mistral.ai/v1/chat/completions", headers=headers, json=data)
response.raise_for_status()
return response.json()["choices"][0]["message"]["content"]
elif provider == "Hyperbolic":
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {os.getenv('HYPERBOLIC_API_KEY')}"
}
data = {
"model": model,
"messages": messages,
"max_tokens": 1000
}
response = requests.post("https://api.hyperbolic.xyz/v1/chat/completions", headers=headers, json=data)
response.raise_for_status()
return response.json()["choices"][0]["message"]["content"]
elif provider == "Anthropic":
return "Simulated Claude response."
else:
return f"Unsupported provider: {provider}"
except Exception as e:
raise Exception(f"Failed to get response from {provider}: {str(e)}")
# Create the Gradio interface
def create_interface():
with gr.Blocks() as demo:
gr.Markdown("# π LLM Conversation Transfer Tool")
with gr.Row():
api_status_display = gr.Textbox(label="π API Keys Status", interactive=False)
check_status_btn = gr.Button("π Check Status")
with gr.Row():
source_provider = gr.Dropdown(choices=["Anthropic", "Mistral", "Hyperbolic"], value="Anthropic", label="Source Provider")
source_model = gr.Textbox(value="claude-3-sonnet-20240229", label="Source Model")
target_provider = gr.Dropdown(choices=["Anthropic", "Mistral", "Hyperbolic"], value="Mistral", label="Target Provider")
target_model = gr.Textbox(value="mistral-large-latest", label="Target Model")
conversation_input = gr.Textbox(lines=12, label="Conversation Text")
transfer_btn = gr.Button("π Transfer Conversation")
status_output = gr.Textbox(label="π Transfer Status", interactive=False)
response_output = gr.Textbox(label="π€ AI Response", interactive=False)
async def check_keys():
result = await check_api_keys.remote.aio()
return result
async def transfer_convo(conv_text, src_prov, tgt_prov, src_model, tgt_model):
status, response = await transfer_conversation_backend.remote.aio(conv_text, src_prov, tgt_prov, src_model, tgt_model)
return status, response
check_status_btn.click(fn=check_keys, outputs=api_status_display)
transfer_btn.click(fn=transfer_convo,
inputs=[conversation_input, source_provider, target_provider, source_model, target_model],
outputs=[status_output, response_output])
demo.load(fn=check_keys, outputs=api_status_display)
return demo
# ASGI app implementation
@app.function(image=image, timeout=300)
@modal.asgi_app()
def fastapi_app():
from fastapi import FastAPI
app = FastAPI()
gradio_app = create_interface()
app = gr.mount_gradio_app(app, gradio_app, path="/")
return app |