Spaces:
Sleeping
Sleeping
# app.py | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import requests | |
import gradio as gr | |
# Load the Hugging Face model and tokenizer | |
model_name = "SakanaAI/Llama-3-8B-Instruct-Coding-Expert" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
# Groq API configuration | |
GROQ_API_KEY = "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" | |
GROQ_API_URL = "https://api.groq.com/v1/completions" | |
# Function to query Groq API | |
def query_groq(prompt): | |
headers = { | |
"Authorization": f"Bearer {GROQ_API_KEY}", | |
"Content-Type": "application/json" | |
} | |
data = { | |
"prompt": prompt, | |
"max_tokens": 150 | |
} | |
response = requests.post(GROQ_API_URL, headers=headers, json=data) | |
return response.json()["choices"][0]["text"] | |
# Function to generate smart contract code | |
def generate_smart_contract(language, requirements): | |
# Create a prompt for the model | |
prompt = f"Generate a {language} smart contract with the following requirements: {requirements}" | |
# Use the Hugging Face model to generate code | |
inputs = tokenizer(prompt, return_tensors="pt") | |
outputs = model.generate(**inputs, max_length=500) | |
generated_code = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
# Enhance the code using Groq API | |
enhanced_code = query_groq(generated_code) | |
return enhanced_code | |
# Gradio interface for the app | |
def generate_contract(language, requirements): | |
return generate_smart_contract(language, requirements) | |
interface = gr.Interface( | |
fn=generate_contract, | |
inputs=["text", "text"], | |
outputs="text", | |
title="Smart Contract Generator", | |
description="Generate smart contracts using AI." | |
) | |
# Launch the Gradio app | |
if __name__ == "__main__": | |
interface.launch() |