AGayathriiii's picture
Upload 8 files
6ca36e0 verified
raw
history blame
1.27 kB
import streamlit as st
from huggingface_hub import InferenceClient
HF_TOKEN = st.secrets["HF_API_KEY"]
def convert_with_codegen2(code):
prompt = f"Convert the following C# code to Java:\n\n{code}\n\nJava Code:"
try:
client = InferenceClient("bigcode/starcoder", token=HF_TOKEN)
response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95)
return response.strip()
except Exception as e1:
print("❌ StarCoder failed:", e1)
try:
client = InferenceClient("deepseek-ai/deepseek-coder-1.3b-instruct", token=HF_TOKEN)
response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95)
return "[⚠️ Using fallback: DeepSeek-Coder]\n" + response.strip()
except Exception as e2:
print("❌ DeepSeek-Coder failed:", e2)
try:
client = InferenceClient("Salesforce/codegen2-1B", token=HF_TOKEN)
response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95)
return "[⚠️ Using fallback: CodeGen2-1B]\n" + response.strip()
except Exception as e3:
print("❌ CodeGen2-1B failed:", e3)
return "πŸ’₯ All model servers are currently unavailable. Please try again later!"