ibrahim321123 commited on
Commit
18ef570
·
verified ·
1 Parent(s): 937d1e1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -35
app.py CHANGED
@@ -1,11 +1,9 @@
1
  import streamlit as st
2
- from fer import FER
3
- import cv2
4
- import numpy as np
5
  import random
6
- import moviepy.editor
7
 
8
- # Example ayahs
9
  ayahs = {
10
  "sad": [
11
  {
@@ -33,43 +31,57 @@ ayahs = {
33
  ]
34
  }
35
 
36
- st.title("🌙 Qur’an Healing Soul - FER Version")
 
37
 
38
- uploaded_file = st.file_uploader("Upload your selfie", type=["jpg", "png", "jpeg"])
39
 
40
- if uploaded_file is not None:
41
- # Convert to OpenCV image
42
- file_bytes = np.asarray(bytearray(uploaded_file.read()), dtype=np.uint8)
43
- image = cv2.imdecode(file_bytes, 1)
44
- st.image(image, channels="BGR")
45
 
46
- # Detect emotion
47
- detector = FER(mtcnn=True)
48
- result = detector.top_emotion(image)
49
- st.write(f"Detected: {result}")
50
 
51
- if result is not None:
52
- emotion, score = result
53
- st.write(f"Dominant Emotion: **{emotion}** ({round(score*100, 2)}%)")
54
 
55
- if emotion in ["sad", "fear"]:
56
- key = "sad"
57
- elif emotion in ["happy", "surprise"]:
58
- key = "happy"
59
- elif emotion in ["angry", "disgust"]:
60
- key = "angry"
61
- else:
62
- key = "sad"
63
 
64
- ayah = random.choice(ayahs[key])
65
- st.markdown(f"""
66
- **📖 Ayah ({ayah['ayah']})**
67
 
68
- Arabic: *{ayah['arabic']}*
 
 
 
69
 
70
- Translation: {ayah['translation']}
 
71
 
72
- Tafsir: {ayah['tafsir']}
73
- """)
 
 
 
 
 
 
 
 
 
 
 
 
74
  else:
75
- st.warning("No face detected.")
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
+ from gradio_client import Client, file
3
+ import tempfile
 
4
  import random
 
5
 
6
+ # --- Your Qur’an ayahs ---
7
  ayahs = {
8
  "sad": [
9
  {
 
31
  ]
32
  }
33
 
34
+ # --- Streamlit UI ---
35
+ st.set_page_config(page_title="Qur’an Healing Soul - Image Emotion", page_icon="🌙")
36
 
37
+ st.title("🌙 Qur’an Healing Soul - Emotion from Image")
38
 
39
+ uploaded_file = st.file_uploader("Upload your selfie", type=["jpg", "jpeg", "png"])
 
 
 
 
40
 
41
+ if uploaded_file:
42
+ st.image(uploaded_file, caption="Uploaded Image")
 
 
43
 
44
+ with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
45
+ tmp_file.write(uploaded_file.read())
46
+ tmp_file_path = tmp_file.name
47
 
48
+ st.info("Detecting emotion... please wait...")
 
 
 
 
 
 
 
49
 
50
+ # Call the remote FER model
51
+ client = Client("ElenaRyumina/Facial_Expression_Recognition")
 
52
 
53
+ result = client.predict(
54
+ inp=file(tmp_file_path),
55
+ api_name="/preprocess_image_and_predict"
56
+ )
57
 
58
+ # Unpack returned values
59
+ _, _, confidences = result
60
 
61
+ st.subheader("Emotion Probabilities")
62
+ st.json(confidences)
63
+
64
+ # Find dominant emotion
65
+ dominant = max(confidences, key=confidences.get)
66
+ st.success(f"Dominant Emotion: **{dominant}**")
67
+
68
+ # Map emotion
69
+ if dominant.lower() in ["sad", "fear"]:
70
+ key = "sad"
71
+ elif dominant.lower() in ["happy", "surprise"]:
72
+ key = "happy"
73
+ elif dominant.lower() in ["angry", "disgust"]:
74
+ key = "angry"
75
  else:
76
+ key = "sad"
77
+
78
+ ayah = random.choice(ayahs[key])
79
+ st.markdown(f"""
80
+ ### 📖 Ayah ({ayah['ayah']})
81
+
82
+ **Arabic:** *{ayah['arabic']}*
83
+
84
+ **Translation:** {ayah['translation']}
85
+
86
+ **Tafsir:** {ayah['tafsir']}
87
+ """)