AnsenH commited on
Commit
ec35ab9
·
1 Parent(s): 9313499

bugfix: short video index issue

Browse files
app.py CHANGED
@@ -103,7 +103,7 @@ with gr.Blocks(theme=gr.themes.Default()) as demo:
103
  start=lbhd_predictions[i][0],
104
  end=lbhd_predictions[i][1],
105
  output_file=f'{i}_lbhd.mp4'
106
- ) for i in range(10)]
107
 
108
  return {
109
  output_videos: output_files,
 
103
  start=lbhd_predictions[i][0],
104
  end=lbhd_predictions[i][1],
105
  output_file=f'{i}_lbhd.mp4'
106
+ ) for i in range(min(10, len(lbhd_predictions)))]
107
 
108
  return {
109
  output_videos: output_files,
lbhd/__pycache__/infer.cpython-311.pyc CHANGED
Binary files a/lbhd/__pycache__/infer.cpython-311.pyc and b/lbhd/__pycache__/infer.cpython-311.pyc differ
 
lbhd/infer.py CHANGED
@@ -20,6 +20,7 @@ scoringModel = checkpoint['model']
20
 
21
 
22
  def sample_clips(x, frames_per_clip: int):
 
23
  x = torch.stack( [ x[i:i+frames_per_clip] for i in range(len(x) - frames_per_clip + 1) ] )
24
  return x
25
 
@@ -43,8 +44,8 @@ def sample_frames(file_name, fps):
43
  def videofile_to_frames(filename, sample_every_second=True):
44
  clip = VideoFileClip(filename)
45
  frames = clip.iter_frames()
46
- frames = [Image.fromarray(frame) for index, frame in enumerate(frames) if index % clip.fps == 0]
47
- print('Number of frames in video is:', len(frames))
48
  return frames
49
 
50
 
 
20
 
21
 
22
  def sample_clips(x, frames_per_clip: int):
23
+ print("x shape", x.shape, "frames_per_clip", frames_per_clip)
24
  x = torch.stack( [ x[i:i+frames_per_clip] for i in range(len(x) - frames_per_clip + 1) ] )
25
  return x
26
 
 
44
  def videofile_to_frames(filename, sample_every_second=True):
45
  clip = VideoFileClip(filename)
46
  frames = clip.iter_frames()
47
+ frames = [Image.fromarray(frame) for index, frame in enumerate(frames) if index % int(clip.fps) == 0]
48
+ print('clip.fps', clip.fps, 'Number of frames in video is:', len(frames))
49
  return frames
50
 
51