Glucolor_making / index.html
yukiapple323's picture
Update index.html
534f4b3 verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Candy Label Scanner</title>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs"></script>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow-models/coco-ssd"></script>
<script src="https://cdn.jsdelivr.net/npm/tesseract.js"></script>
<style>
#output {
font-size: 20px;
margin-top: 20px;
}
.red {
color: red;
}
.yellow {
color: yellow;
}
.green {
color: green;
}
video {
width: 100%;
height: auto;
}
</style>
</head>
<body>
<h1>Candy Label Scanner</h1>
<video id="video" autoplay></video>
<button id="capture">Capture</button>
<canvas id="canvas" style="display: none;"></canvas>
<div id="output"></div>
<script>
const video = document.getElementById('video');
const canvas = document.getElementById('canvas');
const output = document.getElementById('output');
const captureButton = document.getElementById('capture');
navigator.mediaDevices.getUserMedia({
video: {
facingMode: { exact: "environment" },
width: { ideal: 2000 },
height: { ideal: 2000 },
advanced: [{ focusMode: "continuous" }]
}
})
.then(stream => {
video.srcObject = stream;
})
.catch(err => {
console.error("Error accessing the camera: ", err);
});
captureButton.addEventListener('click', () => {
// Draw the video frame to the canvas
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
const context = canvas.getContext('2d');
context.drawImage(video, 0, 0, canvas.width, canvas.height);
// Preprocess the image
const imageData = context.getImageData(0, 0, canvas.width, canvas.height);
preprocess(imageData);
// Draw the preprocessed image on the canvas
context.putImageData(imageData, 0, 0);
// Convert the canvas to data URL
const dataURL = canvas.toDataURL('image/png');
// Recognize text using Tesseract
Tesseract.recognize(
dataURL,
'kor+eng', // Recognize both Korean and English
{
logger: m => console.log(m)
}
).then(({ data: { text } }) => {
console.log(text);
analyzeNutrition(text);
}).catch(err => {
console.error("Tesseract error: ", err);
});
});
function preprocess(imageData) {
// Binarization
binarization(imageData);
// Noise reduction
noiseReduction(imageData);
// Border enhancement
borderEnhancement(imageData);
// Resize
resize(imageData);
}
function binarization(imageData) {
// Binarization code
}
function noiseReduction(imageData) {
// Noise reduction code
}
function borderEnhancement(imageData) {
// Border enhancement code
}
function resize(imageData) {
// Resize code
}
function analyzeNutrition(text) {
// Extract sugar content from the recognized text
const sugarMatch = text.match(/(당[^\d]*)(\d+(\.\d+)?)(\s*(g|grams|그램))/i);
if (sugarMatch) {
const sugarContent = parseFloat(sugarMatch[2]);
let message = `Sugar content: ${sugarContent}g - `;
if (sugarContent <= 20) {
message += 'Good';
output.className = 'green';
} else if (sugarContent <= 50) {
message += 'Normal';
output.className = 'yellow';
} else {
message += 'Dangerous';
output.className = 'red';
}
output.textContent = message;
} else {
output.textContent = 'Sugar content not found';
output.className = '';
}
}
</script>
</body>
</html>