|
import streamlit as st |
|
from huggingface_hub import InferenceClient |
|
|
|
HF_TOKEN = st.secrets["HF_API_KEY"] |
|
|
|
def convert_with_codegen2(code): |
|
prompt = f"Convert the following C# code to Java:\n\n{code}\n\nJava Code:" |
|
|
|
try: |
|
client = InferenceClient("bigcode/starcoder", token=HF_TOKEN) |
|
response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95) |
|
return response.strip() |
|
except Exception as e1: |
|
print("β StarCoder failed:", e1) |
|
|
|
try: |
|
client = InferenceClient("deepseek-ai/deepseek-coder-1.3b-instruct", token=HF_TOKEN) |
|
response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95) |
|
return "[β οΈ Using fallback: DeepSeek-Coder]\n" + response.strip() |
|
except Exception as e2: |
|
print("β DeepSeek-Coder failed:", e2) |
|
|
|
try: |
|
client = InferenceClient("Salesforce/codegen2-1B", token=HF_TOKEN) |
|
response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95) |
|
return "[β οΈ Using fallback: CodeGen2-1B]\n" + response.strip() |
|
except Exception as e3: |
|
print("β CodeGen2-1B failed:", e3) |
|
|
|
return "π₯ All model servers are currently unavailable. Please try again later!" |