File size: 1,274 Bytes
6ca36e0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
import streamlit as st
from huggingface_hub import InferenceClient
HF_TOKEN = st.secrets["HF_API_KEY"]
def convert_with_codegen2(code):
prompt = f"Convert the following C# code to Java:\n\n{code}\n\nJava Code:"
try:
client = InferenceClient("bigcode/starcoder", token=HF_TOKEN)
response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95)
return response.strip()
except Exception as e1:
print("β StarCoder failed:", e1)
try:
client = InferenceClient("deepseek-ai/deepseek-coder-1.3b-instruct", token=HF_TOKEN)
response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95)
return "[β οΈ Using fallback: DeepSeek-Coder]\n" + response.strip()
except Exception as e2:
print("β DeepSeek-Coder failed:", e2)
try:
client = InferenceClient("Salesforce/codegen2-1B", token=HF_TOKEN)
response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95)
return "[β οΈ Using fallback: CodeGen2-1B]\n" + response.strip()
except Exception as e3:
print("β CodeGen2-1B failed:", e3)
return "π₯ All model servers are currently unavailable. Please try again later!" |