reab5555 commited on
Commit
353d877
·
verified ·
1 Parent(s): 1a445f9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -8
app.py CHANGED
@@ -385,9 +385,9 @@ def process_video(video_path, num_anomalies, num_components, desired_fps, batch_
385
  zip(anomaly_scores_all[top_indices_all], df['Timecode'].iloc[top_indices_all].values)])
386
 
387
  progress(1.0, "Complete")
388
- return results, anomaly_plot, *emotion_plots
389
-
390
- # Gradio interface
391
  iface = gr.Interface(
392
  fn=process_video,
393
  inputs=[
@@ -399,13 +399,33 @@ iface = gr.Interface(
399
  ],
400
  outputs=[
401
  gr.Textbox(label="Anomaly Detection Results"),
402
- gr.Plot(label="Anomaly Scores"),
403
- gr.Plot(label="Fear Scores"),
404
- gr.Plot(label="Sad Scores"),
405
- gr.Plot(label="Angry Scores")
406
  ],
407
  title="Facial Expressions Anomaly Detection",
408
- description="Upload a video to detect anomalies in facial expressions and emotions. Adjust parameters as needed."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
409
  )
410
 
411
  if __name__ == "__main__":
 
385
  zip(anomaly_scores_all[top_indices_all], df['Timecode'].iloc[top_indices_all].values)])
386
 
387
  progress(1.0, "Complete")
388
+ return results, anomaly_plot, emotion_plots[0], emotion_plots[1], emotion_plots[2]
389
+
390
+ # Updated Gradio interface
391
  iface = gr.Interface(
392
  fn=process_video,
393
  inputs=[
 
399
  ],
400
  outputs=[
401
  gr.Textbox(label="Anomaly Detection Results"),
402
+ gr.Plot(label="Anomaly Scores").style(full_width=True, height=500),
403
+ gr.Plot(label="Fear Scores").style(full_width=True, height=500),
404
+ gr.Plot(label="Sad Scores").style(full_width=True, height=500),
405
+ gr.Plot(label="Angry Scores").style(full_width=True, height=500)
406
  ],
407
  title="Facial Expressions Anomaly Detection",
408
+ description="""
409
+ This application detects anomalies in facial expressions and emotions from a video input.
410
+ It focuses on the most frequently appearing person in the video for analysis.
411
+
412
+ How it works:
413
+ 1. The app extracts faces from the video frames.
414
+ 2. It identifies the most frequent person (face) in the video.
415
+ 3. For this person, it analyzes facial expressions and emotions over time.
416
+ 4. It then detects anomalies in these expressions and emotions.
417
+
418
+ The graphs show anomaly scores and emotion intensities over time.
419
+ Click on any graph to view it in full size.
420
+
421
+ Adjust the parameters as needed:
422
+ - Number of Anomalies: How many top anomalies to detect
423
+ - Number of Components: Complexity of the facial expression model
424
+ - Desired FPS: Frames per second to analyze (lower for faster processing)
425
+ - Batch Size: Affects processing speed and memory usage
426
+
427
+ Upload a video and click 'Submit' to start the analysis.
428
+ """
429
  )
430
 
431
  if __name__ == "__main__":