Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,15 +1,16 @@
|
|
1 |
-
from flask import Flask, render_template, request, jsonify
|
2 |
from google import genai
|
3 |
from google.genai import types
|
4 |
import os
|
5 |
from PIL import Image
|
6 |
import io
|
7 |
import base64
|
|
|
8 |
|
9 |
app = Flask(__name__)
|
10 |
|
11 |
# Remplacez par votre clé API réelle
|
12 |
-
GOOGLE_API_KEY = "
|
13 |
|
14 |
client = genai.Client(
|
15 |
api_key=GOOGLE_API_KEY,
|
@@ -24,34 +25,36 @@ def index():
|
|
24 |
@app.route('/solve', methods=['POST'])
|
25 |
def solve():
|
26 |
try:
|
27 |
-
image_data = request.files['image'].read()
|
28 |
img = Image.open(io.BytesIO(image_data))
|
29 |
|
30 |
-
# Convertir l'image en base64 pour l'envoyer à l'API Gemini
|
31 |
buffered = io.BytesIO()
|
32 |
img.save(buffered, format="PNG")
|
33 |
img_str = base64.b64encode(buffered.getvalue()).decode()
|
34 |
|
35 |
-
|
36 |
-
response = client.models.generate_content(
|
37 |
model="gemini-2.0-flash-thinking-exp-01-21",
|
38 |
config={'thinking_config': {'include_thoughts': True}},
|
39 |
contents=[
|
40 |
{'inline_data': {'mime_type': 'image/png', 'data': img_str}},
|
41 |
-
"
|
42 |
]
|
43 |
)
|
44 |
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
|
|
|
|
|
|
|
|
55 |
|
56 |
except Exception as e:
|
57 |
return jsonify({'error': str(e)}), 500
|
|
|
1 |
+
from flask import Flask, render_template, request, jsonify, Response, stream_with_context
|
2 |
from google import genai
|
3 |
from google.genai import types
|
4 |
import os
|
5 |
from PIL import Image
|
6 |
import io
|
7 |
import base64
|
8 |
+
import time
|
9 |
|
10 |
app = Flask(__name__)
|
11 |
|
12 |
# Remplacez par votre clé API réelle
|
13 |
+
GOOGLE_API_KEY = "YOUR_API_KEY"
|
14 |
|
15 |
client = genai.Client(
|
16 |
api_key=GOOGLE_API_KEY,
|
|
|
25 |
@app.route('/solve', methods=['POST'])
|
26 |
def solve():
|
27 |
try:
|
28 |
+
image_data = request.files['image'].read()
|
29 |
img = Image.open(io.BytesIO(image_data))
|
30 |
|
|
|
31 |
buffered = io.BytesIO()
|
32 |
img.save(buffered, format="PNG")
|
33 |
img_str = base64.b64encode(buffered.getvalue()).decode()
|
34 |
|
35 |
+
response_stream = client.models.generate_content_stream(
|
|
|
36 |
model="gemini-2.0-flash-thinking-exp-01-21",
|
37 |
config={'thinking_config': {'include_thoughts': True}},
|
38 |
contents=[
|
39 |
{'inline_data': {'mime_type': 'image/png', 'data': img_str}},
|
40 |
+
"Résous ce problème?"
|
41 |
]
|
42 |
)
|
43 |
|
44 |
+
def generate():
|
45 |
+
try:
|
46 |
+
for chunk in response_stream:
|
47 |
+
for part in chunk.candidates[0].content.parts:
|
48 |
+
if part.thought:
|
49 |
+
yield f"data: {json.dumps({'thought': part.text})}\n\n"
|
50 |
+
else:
|
51 |
+
yield f"data: {json.dumps({'answer': part.text})}\n\n"
|
52 |
+
time.sleep(0.05) # Contrôler la vitesse de streaming
|
53 |
+
except Exception as e:
|
54 |
+
print(f"Error during generation: {e}")
|
55 |
+
yield f"data: {{ \"error\": \"{e}\" }}\n\n"
|
56 |
+
|
57 |
+
return Response(generate(), mimetype='text/event-stream')
|
58 |
|
59 |
except Exception as e:
|
60 |
return jsonify({'error': str(e)}), 500
|