File size: 1,546 Bytes
eba1fe9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
from flask import Flask, render_template, request, jsonify
from huggingface_hub import InferenceClient
import traceback
import json
import random
import os

app = Flask(__name__)

def safe_json_dumps(obj):
    return json.dumps(obj, default=str)

@app.errorhandler(Exception)
def handle_exception(e):
    tb = traceback.format_exc()
    print(f"An error occurred: {str(e)}\n{tb}")
    return jsonify({
        "error": str(e),
        "traceback": tb
    }), 500

@app.route('/', methods=['GET', 'POST'])
def index():
    if request.method == 'POST':
        try:
            code = request.form['code']

            list_of_token = [os.environ.get('HF_TOKEN1'), os.environ.get('HF_TOKEN2'), os.environ.get('HF_TOKEN3'), os.environ.get('HF_TOKEN4')]
            client = InferenceClient(
                os.environ.get('HF_MODEL_ID'),
                token=random.choice(list_of_token)
            )
            prompt = f"""{os.environ.get('HF_PROMPT')}"""
            res = client.text_generation(prompt, max_new_tokens=1024, stop=["```", "</Code>"])
            res = res[:res.rfind('\n')]
            return jsonify({'result': res})
        except Exception as e:
            tb = traceback.format_exc()
            print(f"An error occurred while processing the code: {str(e)}\n{tb}")
            return jsonify({
                "error": f"An error occurred while processing the code: {str(e)}",
                "traceback": tb
            }), 500
    return render_template('index.html')

if __name__ == '__main__':
    app.run(debug=True)