devlim commited on
Commit
efb95c5
โ€ข
1 Parent(s): 5b09e83

Upload 3 files

Browse files
Files changed (3) hide show
  1. app.py +91 -0
  2. requirements.txt +3 -0
  3. templates/index.html +154 -0
app.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify, render_template, send_from_directory
2
+ import base64
3
+ import re
4
+ import os
5
+ from datetime import datetime
6
+ import requests
7
+
8
+ # OpenAI API Key
9
+ api_key = "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"
10
+
11
+ app = Flask(__name__)
12
+
13
+ # Function to encode the image
14
+ def encode_image(image_path):
15
+ with open(image_path, "rb") as image_file:
16
+ return base64.b64encode(image_file.read()).decode('utf-8')
17
+
18
+ @app.route('/')
19
+ def index():
20
+ return render_template('index.html')
21
+
22
+ @app.route('/save_image', methods=['POST'])
23
+ def save_image():
24
+ data = request.get_json()
25
+ image_data = data['image']
26
+
27
+ # Decode the base64 image data
28
+ image_data = re.sub('^data:image/.+;base64,', '', image_data)
29
+ image_data = base64.b64decode(image_data)
30
+
31
+ # Create a unique file name
32
+ timestamp = datetime.now().strftime('%Y%m%d%H%M%S')
33
+ file_path = f'captured_image_{timestamp}.png'
34
+
35
+ # Save the image to a file
36
+ with open(file_path, 'wb') as f:
37
+ f.write(image_data)
38
+
39
+
40
+
41
+ # Path to your image
42
+ image_path = file_path
43
+
44
+ # Getting the base64 string
45
+ base64_image = encode_image(image_path)
46
+
47
+ headers = {
48
+ "Content-Type": "application/json",
49
+ "Authorization": f"Bearer {api_key}"
50
+ }
51
+
52
+ payload = {
53
+ "model": "gpt-4o",
54
+ "messages": [
55
+ {
56
+ "role": "user",
57
+ "content": [
58
+ {
59
+ "type": "text",
60
+ "text": "์ด๋ฏธ์ง€๋ฅผ ์ž…๋ ฅ๋ฐ›์œผ๋ฉด ๋‹น๋ฅ˜๊ฐ€ ๋ช‡ g์ธ์ง€ ์˜ˆ์‹œ์™€ ๊ฐ™์€ ํ˜•์‹๋งŒ ์ถœ๋ ฅํ•˜์‹œ์˜ค.\n์˜ˆ) ๋‹น๋ฅ˜ : 10g \n์ƒํ’ˆ๋ถ„์„ํ‘œ๊ฐ€ ์•„๋‹ˆ๋ผ๋ฉด 'error'๋ฅผ ์ถœ๋ ฅํ•˜์‹œ์˜ค."
61
+ },
62
+ {
63
+ "type": "image_url",
64
+ "image_url": {
65
+ "url": f"data:image/jpeg;base64,{base64_image}"
66
+ }
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "max_tokens": 300
72
+ }
73
+
74
+ response = requests.post("https://api.openai.com/v1/chat/completions", headers=headers, json=payload)
75
+
76
+ if response.status_code == 200:
77
+ result = response.json()
78
+ analysis_result = result['choices'][0]['message']['content']
79
+ else:
80
+ analysis_result = "Error: ๋‹น๋ฅ˜๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
81
+
82
+ return jsonify({'message': '๋ถ„์„์ด ์™„๋ฃŒ๋˜์—ˆ์Šต๋‹ˆ๋‹ค.', 'image_url': file_path, 'analysis_result': analysis_result})
83
+
84
+
85
+
86
+ @app.route('/images/<filename>')
87
+ def get_image(filename):
88
+ return send_from_directory('.', filename)
89
+
90
+ if __name__ == '__main__':
91
+ app.run(host='0.0.0.0', port=7860, debug=True)
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ Flask
2
+ requests
3
+ openai
templates/index.html ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+
4
+ <head>
5
+ <meta charset="UTF-8">
6
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
7
+ <title>์„ฑ๋ถ„๋ถ„์„ํ‘œ</title>
8
+ <link href="https://fonts.googleapis.com/css2?family=Roboto:wght@400;700&display=swap" rel="stylesheet">
9
+ <style>
10
+ body {
11
+ font-family: 'Roboto', sans-serif;
12
+ background-color: #f0f2f5;
13
+ display: flex;
14
+ justify-content: center;
15
+ align-items: center;
16
+ height: 100vh;
17
+ margin: 0;
18
+ }
19
+
20
+ .container {
21
+ background: #fff;
22
+ padding: 20px;
23
+ border-radius: 8px;
24
+ box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1);
25
+ text-align: center;
26
+ max-width: 700px;
27
+ width: 100%;
28
+ }
29
+
30
+ h1 {
31
+ color: #333;
32
+ margin-bottom: 20px;
33
+ }
34
+
35
+ video,
36
+ canvas,
37
+ img {
38
+ border-radius: 8px;
39
+ width: 100%;
40
+ max-width: 640px;
41
+ height: auto;
42
+ margin-bottom: 20px;
43
+ }
44
+
45
+ button {
46
+ background-color: #4CAF50;
47
+ color: white;
48
+ padding: 10px 20px;
49
+ border: none;
50
+ border-radius: 5px;
51
+ cursor: pointer;
52
+ font-size: 16px;
53
+ transition: background-color 0.3s, transform 0.3s;
54
+ }
55
+
56
+ button:hover {
57
+ background-color: #45a049;
58
+ transform: scale(1.05);
59
+ }
60
+
61
+ .result {
62
+ margin-top: 20px;
63
+ font-size: 18px;
64
+ font-weight: bold;
65
+ }
66
+
67
+ .good {
68
+ color: green;
69
+ }
70
+
71
+ .normal {
72
+ color: orange;
73
+ }
74
+
75
+ .dangerous {
76
+ color: red;
77
+ }
78
+ </style>
79
+ </head>
80
+
81
+ <body>
82
+ <div class="container">
83
+ <h1>์„ฑ๋ถ„๋ถ„์„ํ‘œ</h1>
84
+ <video id="video" autoplay></video>
85
+ <button id="snap">์‚ฌ์ง„ ์ฐ๊ธฐ</button>
86
+ <canvas id="canvas"></canvas>
87
+ <p id="analysisResult" class="result"></p>
88
+ </div>
89
+
90
+ <script>
91
+ const video = document.getElementById('video');
92
+ const canvas = document.getElementById('canvas');
93
+ const snap = document.getElementById('snap');
94
+ const context = canvas.getContext('2d');
95
+ const analysisResult = document.getElementById('analysisResult');
96
+
97
+ navigator.mediaDevices.getUserMedia({ video: true })
98
+ .then(stream => {
99
+ video.srcObject = stream;
100
+ })
101
+ .catch(err => {
102
+ console.error("Error accessing webcam: " + err);
103
+ });
104
+
105
+ function processAnalysisResult(result) {
106
+ const match = result.match(/\d+/);
107
+ if (match) {
108
+ const sugarContent = parseInt(match[0], 10);
109
+ let message = '';
110
+ let className = '';
111
+
112
+ if (sugarContent >= 0 && sugarContent <= 20) {
113
+ message = 'good';
114
+ className = 'good';
115
+ } else if (sugarContent >= 21 && sugarContent <= 50) {
116
+ message = 'normal';
117
+ className = 'normal';
118
+ } else if (sugarContent >= 51) {
119
+ message = 'dangerous';
120
+ className = 'dangerous';
121
+ }
122
+
123
+ analysisResult.textContent = message;
124
+ analysisResult.className = className;
125
+ } else {
126
+ analysisResult.textContent = 'Error: Could not analyze image.';
127
+ analysisResult.className = '';
128
+ }
129
+ }
130
+
131
+ snap.addEventListener('click', () => {
132
+ context.drawImage(video, 0, 0, canvas.width, canvas.height);
133
+ const dataURL = canvas.toDataURL('image/png');
134
+ fetch('/save_image', {
135
+ method: 'POST',
136
+ headers: {
137
+ 'Content-Type': 'application/json',
138
+ },
139
+ body: JSON.stringify({ image: dataURL })
140
+ }).then(response => response.json())
141
+ .then(data => {
142
+ if (data.analysis_result) {
143
+ processAnalysisResult(data.analysis_result);
144
+ } else {
145
+ analysisResult.textContent = 'Error: ์„ฑ๋ถ„๋ถ„์„ํ‘œ๋ฅผ ํ™•์ธํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.';
146
+ analysisResult.className = '';
147
+ }
148
+ alert(data.message);
149
+ });
150
+ });
151
+ </script>
152
+ </body>
153
+
154
+ </html>