File size: 2,535 Bytes
51cbadd
f388c93
 
 
 
 
 
6b10944
f388c93
 
 
76b11c3
f388c93
 
 
 
 
 
 
 
 
 
 
 
 
51cbadd
f388c93
 
 
 
 
 
51cbadd
e79be93
51cbadd
e79be93
6b10944
c510872
 
6b10944
 
6f33a3b
6b10944
 
 
e79be93
51cbadd
e79be93
 
 
 
51cbadd
e79be93
 
 
 
415f13d
e79be93
51cbadd
 
6b10944
51cbadd
6b10944
 
 
 
 
 
 
 
f388c93
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
from flask import Flask, render_template, request, jsonify, Response, stream_with_context
from google import genai
from google.genai import types
import os
from PIL import Image
import io
import base64
import json

app = Flask(__name__)

GOOGLE_API_KEY =  os.environ.get("GEMINI_API_KEY")

client = genai.Client(
    api_key=GOOGLE_API_KEY,
    http_options={'api_version': 'v1alpha'},
)

@app.route('/')
def index():
    return render_template('index.html')

@app.route('/solve', methods=['POST'])
def solve():
    try:
        image_data = request.files['image'].read()
        img = Image.open(io.BytesIO(image_data))

        buffered = io.BytesIO()
        img.save(buffered, format="PNG")
        img_str = base64.b64encode(buffered.getvalue()).decode()

        def generate():
            mode = 'starting'
            try:
                response = client.models.generate_content_stream(
                    model="gemini-2.0-flash-thinking-exp-01-21",
                    config={'thinking_config': {'include_thoughts': True},types.GenerateContentConfig(
        temperature=0.4),}
                    contents=[
                        {'inline_data': {'mime_type': 'image/png', 'data': img_str}},
                        "Resous cette exercice. ça doit être bien présentable et espacé. "
                    ]
                )

                for chunk in response:
                    for part in chunk.candidates[0].content.parts:
                        if part.thought:
                            if mode != "thinking":
                                yield f'data: {json.dumps({"mode": "thinking"})}\n\n'
                                mode = "thinking"
                        else:
                            if mode != "answering":
                                yield f'data: {json.dumps({"mode": "answering"})}\n\n'
                                mode = "answering"
                        
                        yield f'data: {json.dumps({"content": part.text})}\n\n'

            except Exception as e:
                print(f"Error during generation: {e}")
                yield f'data: {json.dumps({"error": str(e)})}\n\n'

        return Response(
            stream_with_context(generate()),
            mimetype='text/event-stream',
            headers={
                'Cache-Control': 'no-cache',
                'X-Accel-Buffering': 'no'
            }
        )

    except Exception as e:
        return jsonify({'error': str(e)}), 500

if __name__ == '__main__':
    app.run(debug=True)