willwade commited on
Commit
7e47c96
·
1 Parent(s): 428c083

add more debug

Browse files
Files changed (1) hide show
  1. app.py +18 -2
app.py CHANGED
@@ -69,10 +69,11 @@ class ChaplinGradio:
69
 
70
  # Add initial debug info
71
  debug_log.append(f"Current time: {current_time}")
 
72
 
73
  if current_time - self.last_frame_time < self.frame_interval:
74
  debug_log.append("Skipping frame - too soon")
75
- return self.last_prediction, "\n".join(debug_log) # Make sure we return both values
76
 
77
  self.last_frame_time = current_time
78
 
@@ -121,6 +122,12 @@ class ChaplinGradio:
121
  debug_log.append(f"Wrote {i+1} frames to video")
122
  out.release()
123
 
 
 
 
 
 
 
124
  # Clear buffer but keep last few frames for continuity
125
  self.frame_buffer = self.frame_buffer[-8:] # Keep last 0.5 seconds
126
  debug_log.append(f"Cleared buffer, kept {len(self.frame_buffer)} frames")
@@ -129,26 +136,35 @@ class ChaplinGradio:
129
  # Process the video file using the pipeline
130
  debug_log.append("Starting model inference...")
131
  predicted_text = self.vsr_model(temp_video)
132
- debug_log.append(f"Model prediction: {predicted_text}")
133
  if predicted_text:
134
  self.last_prediction = predicted_text
 
 
 
135
  return (self.last_prediction or "Waiting for speech..."), "\n".join(debug_log)
136
 
137
  except Exception as e:
138
  error_msg = f"Error during inference: {str(e)}"
139
  debug_log.append(error_msg)
 
 
140
  return f"Error processing frames: {str(e)}", "\n".join(debug_log)
141
  finally:
142
  # Clean up temp file
143
  if os.path.exists(temp_video):
144
  os.remove(temp_video)
145
  debug_log.append("Cleaned up temp video file")
 
 
146
 
147
  return (self.last_prediction or "Waiting for speech..."), "\n".join(debug_log)
148
 
149
  except Exception as e:
150
  error_msg = f"Error processing: {str(e)}"
151
  debug_log.append(error_msg)
 
 
152
  return f"Error processing: {str(e)}", "\n".join(debug_log)
153
 
154
 
 
69
 
70
  # Add initial debug info
71
  debug_log.append(f"Current time: {current_time}")
72
+ debug_log.append(f"Last prediction: {self.last_prediction}")
73
 
74
  if current_time - self.last_frame_time < self.frame_interval:
75
  debug_log.append("Skipping frame - too soon")
76
+ return self.last_prediction, "\n".join(debug_log)
77
 
78
  self.last_frame_time = current_time
79
 
 
122
  debug_log.append(f"Wrote {i+1} frames to video")
123
  out.release()
124
 
125
+ # Verify video was created
126
+ if not os.path.exists(temp_video):
127
+ debug_log.append("Error: Video file was not created!")
128
+ else:
129
+ debug_log.append(f"Video file created successfully, size: {os.path.getsize(temp_video)} bytes")
130
+
131
  # Clear buffer but keep last few frames for continuity
132
  self.frame_buffer = self.frame_buffer[-8:] # Keep last 0.5 seconds
133
  debug_log.append(f"Cleared buffer, kept {len(self.frame_buffer)} frames")
 
136
  # Process the video file using the pipeline
137
  debug_log.append("Starting model inference...")
138
  predicted_text = self.vsr_model(temp_video)
139
+ debug_log.append(f"Raw model prediction: '{predicted_text}'")
140
  if predicted_text:
141
  self.last_prediction = predicted_text
142
+ debug_log.append(f"Updated last prediction to: '{self.last_prediction}'")
143
+ else:
144
+ debug_log.append("Model returned empty prediction")
145
  return (self.last_prediction or "Waiting for speech..."), "\n".join(debug_log)
146
 
147
  except Exception as e:
148
  error_msg = f"Error during inference: {str(e)}"
149
  debug_log.append(error_msg)
150
+ import traceback
151
+ debug_log.append(f"Full error: {traceback.format_exc()}")
152
  return f"Error processing frames: {str(e)}", "\n".join(debug_log)
153
  finally:
154
  # Clean up temp file
155
  if os.path.exists(temp_video):
156
  os.remove(temp_video)
157
  debug_log.append("Cleaned up temp video file")
158
+ else:
159
+ debug_log.append("No temp file to clean up")
160
 
161
  return (self.last_prediction or "Waiting for speech..."), "\n".join(debug_log)
162
 
163
  except Exception as e:
164
  error_msg = f"Error processing: {str(e)}"
165
  debug_log.append(error_msg)
166
+ import traceback
167
+ debug_log.append(f"Full error: {traceback.format_exc()}")
168
  return f"Error processing: {str(e)}", "\n".join(debug_log)
169
 
170