import gradio as gr import tensorflow as tf import numpy as np import cv2 import os from scipy.spatial.distance import cosine from keras_facenet import FaceNet # Load the FaceNet model def load_facenet_model(): facenet = FaceNet() model = facenet.model # Access the Keras model in FaceNet return model embedding_model = load_facenet_model() embedding_model.load_weights('v4_facenet_siamese_network_embedding.h5') # Database to store embeddings and user IDs user_embeddings = [] user_ids = [] # Threshold RECOGNITION_THRESHOLD = 0.3 # Adjust as needed # Preprocess the image for FaceNet def preprocess_image(image): image = cv2.resize(image, (160, 160)) # Resize image to 160x160 for FaceNet image = image.astype('float32') mean, std = image.mean(), image.std() image = (image - mean) / std return np.expand_dims(image, axis=0) # Generate embedding using FaceNet def generate_embedding(image): preprocessed_image = preprocess_image(image) return embedding_model.predict(preprocessed_image)[0] # Register new user def register_user(image, user_id): try: embedding = generate_embedding(image) user_embeddings.append(embedding) user_ids.append(user_id) return f"User {user_id} registered successfully." except Exception as e: return f"Error during registration: {str(e)}" # Recognize user def recognize_user(image): try: new_embedding = generate_embedding(image) closest_user_id = None closest_distance = float('inf') for user_id, embedding in zip(user_ids, user_embeddings): distance = cosine(new_embedding, embedding) print(f"Distance for {user_id}: {distance}") # Debug: Print distances for each user if distance < closest_distance: closest_distance = distance closest_user_id = user_id print(f"Min distance: {closest_distance}") # Debug: Print minimum distance if closest_distance <= RECOGNITION_THRESHOLD: return f"Recognized User: {closest_user_id}" else: return f"User not recognized. Closest Distance: {closest_distance}" except Exception as e: return f"Error during recognition: {str(e)}" def main(): with gr.Blocks() as demo: gr.Markdown("Facial Recognition System") with gr.Tab("Register"): with gr.Row(): img_register = gr.Image() user_id = gr.Textbox(label="User ID") register_button = gr.Button("Register") register_output = gr.Textbox() register_button.click(register_user, inputs=[img_register, user_id], outputs=register_output) with gr.Tab("Recognize"): with gr.Row(): img_recognize = gr.Image() recognize_button = gr.Button("Recognize") recognize_output = gr.Textbox() recognize_button.click(recognize_user, inputs=[img_recognize], outputs=recognize_output) demo.launch(share=True) if __name__ == "__main__": main()