AGayathriiii commited on
Commit
6ca36e0
Β·
verified Β·
1 Parent(s): 130c066

Upload 8 files

Browse files
Procfile ADDED
@@ -0,0 +1 @@
 
 
1
+ web: streamlit run main.py
README.md CHANGED
@@ -1,12 +1,20 @@
1
- ---
2
- title: DotNetJavaCodeConversion
3
- emoji: πŸ“š
4
- colorFrom: green
5
- colorTo: yellow
6
- sdk: streamlit
7
- sdk_version: 1.44.1
8
- app_file: app.py
9
- pinned: false
10
- ---
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
1
+ # πŸ¦„ The Enchanted Code Converter: .NET to Java
2
+
3
+ A Streamlit app that uses Hugging Face models to convert C# code into Java code using LLMs.
4
+
5
+ ## How it Works
6
+
7
+ 1. Choose a model (Mistral, Mixtral, CodeGen2).
8
+ 2. Paste your C# code.
9
+ 3. Let the model convert it to Java using magic ✨
 
10
 
11
+ ## Secrets
12
+
13
+ This app uses the `HF_API_KEY` stored in Hugging Face Secrets. Go to:
14
+ - `Settings > Secrets > Add new secret`
15
+ - Key: `HF_API_KEY`
16
+ - Value: *your Hugging Face token*
17
+
18
+ ## Made with ❀️ by Aarvii (CEO AAYUVV)
19
+
20
+ ---
app.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from backend.model_router import route_model_conversion
3
+
4
+ st.set_page_config(
5
+ page_title="C# to Java with Large Language Models 🌈",
6
+ layout="wide",
7
+ page_icon="✨"
8
+ )
9
+
10
+ st.markdown("""
11
+ <div style="text-align: center; padding: 20px 10px;">
12
+ <h1 style="color: #6C63FF; font-size: 3em;">πŸ¦„ The Enchanted Code Converter: .NET to Java</h1>
13
+ <p style="color: #999; font-size: 1.2em;">✨ "Code it like a CEO. Convert it like a GEN_AI Innovator." ✨</p>
14
+ <p style="color: #FF69B4;">The cutest way to switch your code from serious C# to jazzy Java πŸ’ƒ</p>
15
+ </div>
16
+ """, unsafe_allow_html=True)
17
+
18
+ st.markdown("<hr style='border-top: 2px dashed #eee;'>", unsafe_allow_html=True)
19
+
20
+ st.subheader("πŸͺ„ Choose your Magic Model:")
21
+ model_option = st.selectbox(
22
+ "✨ Yours model box:",
23
+ options=[
24
+ "mistralai/Mistral-7B-Instruct-v0.2",
25
+ "mistralai/Mixtral-8x7B-Instruct-v0.1",
26
+ "Salesforce/codegen2-1B"
27
+ ],
28
+ index=0
29
+ )
30
+
31
+ st.markdown("#### πŸ–ŠοΈ Paste your <span style='color:#6C63FF;'>C# Code</span> below:", unsafe_allow_html=True)
32
+ cs_code = st.text_area("πŸ“₯ Your C# Code", height=250, placeholder="// Type some cool C# code...")
33
+
34
+ convert = st.button("✨ Convert with LLM's ✨")
35
+
36
+ if convert:
37
+ with st.spinner("🧠 PowerBrain is thinking... sprinkling AI dust... πŸ’­βœ¨"):
38
+ result = route_model_conversion(model_option, cs_code)
39
+ if result:
40
+ st.balloons()
41
+ st.success("πŸ’ƒ Tadaa! Here's your Java code, boss!")
42
+ st.code(result, language="java")
43
+ else:
44
+ st.error("🚨 Oopsie daisy! Something went wrong... maybe the code got shy? πŸ₯Ί")
45
+
46
+ st.markdown("""
47
+ <hr style="border-top: 2px solid #eee;">
48
+ <div style='text-align: center; color: #aaa; font-size: 0.9em;'>
49
+ πŸ› οΈ Made with Large Language Models by <br>
50
+ <span style="font-size: 1.3em; font-weight: bold; color: #6C63FF;">Apoorva Vutukuru</span> ✨ <br>
51
+ <i>Crafted for GEN_AI Innovators everywhere!</i>
52
+ </div>
53
+ """, unsafe_allow_html=True)
backend/codegen2.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from huggingface_hub import InferenceClient
3
+
4
+ HF_TOKEN = st.secrets["HF_API_KEY"]
5
+
6
+ def convert_with_codegen2(code):
7
+ prompt = f"Convert the following C# code to Java:\n\n{code}\n\nJava Code:"
8
+
9
+ try:
10
+ client = InferenceClient("bigcode/starcoder", token=HF_TOKEN)
11
+ response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95)
12
+ return response.strip()
13
+ except Exception as e1:
14
+ print("❌ StarCoder failed:", e1)
15
+
16
+ try:
17
+ client = InferenceClient("deepseek-ai/deepseek-coder-1.3b-instruct", token=HF_TOKEN)
18
+ response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95)
19
+ return "[⚠️ Using fallback: DeepSeek-Coder]\n" + response.strip()
20
+ except Exception as e2:
21
+ print("❌ DeepSeek-Coder failed:", e2)
22
+
23
+ try:
24
+ client = InferenceClient("Salesforce/codegen2-1B", token=HF_TOKEN)
25
+ response = client.text_generation(prompt, max_new_tokens=512, temperature=0.3, top_p=0.95)
26
+ return "[⚠️ Using fallback: CodeGen2-1B]\n" + response.strip()
27
+ except Exception as e3:
28
+ print("❌ CodeGen2-1B failed:", e3)
29
+
30
+ return "πŸ’₯ All model servers are currently unavailable. Please try again later!"
backend/mistral7b.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from huggingface_hub import InferenceClient
3
+
4
+ HF_TOKEN = st.secrets["HF_API_KEY"]
5
+
6
+ def convert_with_mistral_7b(code):
7
+ client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.2", token=HF_TOKEN)
8
+ prompt = f"Convert the following C# code to Java:\n\n{code}\n\nJava Code:"
9
+ response = client.text_generation(
10
+ prompt,
11
+ max_new_tokens=512,
12
+ temperature=0.3,
13
+ top_p=0.95
14
+ )
15
+ return response.strip()
backend/mistrallarge2.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from huggingface_hub import InferenceClient
3
+
4
+ HF_TOKEN = st.secrets["HF_API_KEY"]
5
+
6
+ def convert_with_mistral_large2(code):
7
+ client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1", token=HF_TOKEN)
8
+ prompt = f"Convert the following C# code to Java:\n\n{code}\n\nJava Code:"
9
+ response = client.text_generation(
10
+ prompt,
11
+ max_new_tokens=512,
12
+ temperature=0.3,
13
+ top_p=0.95
14
+ )
15
+ return response.strip()
backend/model_router.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from backend.mistral7b import convert_with_mistral_7b
2
+ from backend.mistrallarge2 import convert_with_mistral_large2
3
+ from backend.codegen2 import convert_with_codegen2
4
+
5
+ def route_model_conversion(model_name, cs_code):
6
+ if "Mistral-7B" in model_name:
7
+ return convert_with_mistral_7b(cs_code)
8
+ elif "Mixtral" in model_name:
9
+ return convert_with_mistral_large2(cs_code)
10
+ elif "codegen2" in model_name:
11
+ return convert_with_codegen2(cs_code)
12
+ else:
13
+ return "Unsupported model selected."
requirements.txt ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ altair==5.5.0
2
+ attrs==25.1.0
3
+ blinker==1.9.0
4
+ cachetools==5.5.2
5
+ certifi==2025.1.31
6
+ charset-normalizer==3.4.1
7
+ click==8.1.8
8
+ colorama==0.4.6
9
+ filelock==3.18.0
10
+ fsspec==2025.3.2
11
+ gitdb==4.0.12
12
+ GitPython==3.1.44
13
+ huggingface-hub==0.30.2
14
+ idna==3.10
15
+ Jinja2==3.1.6
16
+ joblib==1.4.2
17
+ jsonschema==4.23.0
18
+ jsonschema-specifications==2024.10.1
19
+ markdown-it-py==3.0.0
20
+ MarkupSafe==3.0.2
21
+ mdurl==0.1.2
22
+ narwhals==1.29.1
23
+ nltk==3.9.1
24
+ numpy==1.26.4
25
+ packaging==23.2
26
+ pandas==2.2.3
27
+ pillow==10.4.0
28
+ protobuf==4.25.6
29
+ pyarrow==19.0.1
30
+ pydeck==0.9.1
31
+ Pygments==2.19.1
32
+ python-dateutil==2.9.0.post0
33
+ python-dotenv==1.1.0
34
+ pytz==2025.1
35
+ PyYAML==6.0.2
36
+ referencing==0.36.2
37
+ regex==2024.11.6
38
+ requests==2.31.0
39
+ rich==13.9.4
40
+ rpds-py==0.23.1
41
+ safetensors==0.5.3
42
+ scikit-learn==1.6.1
43
+ scipy==1.15.2
44
+ six==1.17.0
45
+ smmap==5.0.2
46
+ streamlit==1.32.2
47
+ tenacity==8.5.0
48
+ textblob==0.19.0
49
+ threadpoolctl==3.5.0
50
+ tokenizers==0.19.1
51
+ toml==0.10.2
52
+ tornado==6.4.2
53
+ tqdm==4.67.1
54
+ transformers==4.40.0
55
+ typing_extensions==4.12.2
56
+ tzdata==2025.1
57
+ urllib3==2.3.0
58
+ watchdog==6.0.0