llm_chat_transfer / modal_app.py
Varshith1909
Final Chnages
7bfaddc
import modal
import gradio as gr
import os
import requests
import json
from fastapi import Request
from dotenv import load_dotenv
load_dotenv()
# Create Modal app
app = modal.App("llm-conversation-transfer")
# Define image with dependencies
image = modal.Image.debian_slim().pip_install([
"gradio==4.44.1",
"requests",
"fastapi"
])
# Function to check API keys
@app.function(
image=image,
secrets=[
modal.Secret.from_name("anthropic-api-key"),
modal.Secret.from_name("mistral-api-key"),
modal.Secret.from_name("hyperbolic-api-key")
]
)
def check_api_keys():
status = {
'Anthropic': 'βœ…' if os.getenv('ANTHROPIC_API_KEY') else '❌',
'Mistral': 'βœ…' if os.getenv('MISTRAL_API_KEY') else '❌',
'Hyperbolic': 'βœ…' if os.getenv('HYPERBOLIC_API_KEY') else '❌'
}
return json.dumps(status)
# Function to handle conversation transfer
@app.function(
image=image,
secrets=[
modal.Secret.from_name("anthropic-api-key"),
modal.Secret.from_name("mistral-api-key"),
modal.Secret.from_name("hyperbolic-api-key")
]
)
def transfer_conversation_backend(conversation_text: str, source_provider: str, target_provider: str, source_model: str, target_model: str):
try:
if not conversation_text.strip():
return "❌ Error: Please provide conversation text", ""
messages = parse_conversation_text(conversation_text)
response = get_ai_response(messages, target_provider, target_model)
return "βœ… Transfer successful!", response
except Exception as e:
return f"❌ Error: {str(e)}", ""
def parse_conversation_text(text: str) -> list:
lines = text.strip().split('\n')
messages = []
current_message = {"role": "", "content": ""}
for line in lines:
line = line.strip()
if not line:
continue
if line.startswith(('Human:', 'User:')):
if current_message["content"]:
messages.append(current_message.copy())
current_message = {"role": "user", "content": line.split(':', 1)[1].strip()}
elif line.startswith(('Assistant:', 'AI:')):
if current_message["content"]:
messages.append(current_message.copy())
current_message = {"role": "assistant", "content": line.split(':', 1)[1].strip()}
else:
current_message["content"] += " " + line
if current_message["content"]:
messages.append(current_message)
return messages
def get_ai_response(messages: list, provider: str, model: str) -> str:
try:
if provider == "Mistral":
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {os.getenv('MISTRAL_API_KEY')}"
}
data = {
"model": model,
"messages": messages,
"max_tokens": 1000
}
response = requests.post("https://api.mistral.ai/v1/chat/completions", headers=headers, json=data)
response.raise_for_status()
return response.json()["choices"][0]["message"]["content"]
elif provider == "Hyperbolic":
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {os.getenv('HYPERBOLIC_API_KEY')}"
}
data = {
"model": model,
"messages": messages,
"max_tokens": 1000
}
response = requests.post("https://api.hyperbolic.xyz/v1/chat/completions", headers=headers, json=data)
response.raise_for_status()
return response.json()["choices"][0]["message"]["content"]
elif provider == "Anthropic":
return "Simulated Claude response."
else:
return f"Unsupported provider: {provider}"
except Exception as e:
raise Exception(f"Failed to get response from {provider}: {str(e)}")
# Create the Gradio interface
def create_interface():
with gr.Blocks() as demo:
gr.Markdown("# πŸ”„ LLM Conversation Transfer Tool")
with gr.Row():
api_status_display = gr.Textbox(label="πŸ”‘ API Keys Status", interactive=False)
check_status_btn = gr.Button("πŸ”„ Check Status")
with gr.Row():
source_provider = gr.Dropdown(choices=["Anthropic", "Mistral", "Hyperbolic"], value="Anthropic", label="Source Provider")
source_model = gr.Textbox(value="claude-3-sonnet-20240229", label="Source Model")
target_provider = gr.Dropdown(choices=["Anthropic", "Mistral", "Hyperbolic"], value="Mistral", label="Target Provider")
target_model = gr.Textbox(value="mistral-large-latest", label="Target Model")
conversation_input = gr.Textbox(lines=12, label="Conversation Text")
transfer_btn = gr.Button("πŸ”„ Transfer Conversation")
status_output = gr.Textbox(label="πŸ“Š Transfer Status", interactive=False)
response_output = gr.Textbox(label="πŸ€– AI Response", interactive=False)
async def check_keys():
result = await check_api_keys.remote.aio()
return result
async def transfer_convo(conv_text, src_prov, tgt_prov, src_model, tgt_model):
status, response = await transfer_conversation_backend.remote.aio(conv_text, src_prov, tgt_prov, src_model, tgt_model)
return status, response
check_status_btn.click(fn=check_keys, outputs=api_status_display)
transfer_btn.click(fn=transfer_convo,
inputs=[conversation_input, source_provider, target_provider, source_model, target_model],
outputs=[status_output, response_output])
demo.load(fn=check_keys, outputs=api_status_display)
return demo
# ASGI app implementation
@app.function(image=image, timeout=300)
@modal.asgi_app()
def fastapi_app():
from fastapi import FastAPI
app = FastAPI()
gradio_app = create_interface()
app = gr.mount_gradio_app(app, gradio_app, path="/")
return app