Nicous commited on
Commit
7b59b1a
·
verified ·
1 Parent(s): 78143c4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +51 -60
app.py CHANGED
@@ -104,7 +104,8 @@ notice_html = """
104
  <li>- Due to hardware limitations on this demo page, we recommend users only try 10-second videos.</li>
105
  <li>- The demo model is used for the egocentric video captioning step for the EgoRAG framework. The recommended prompt includes:</li>
106
  <ul style="padding-left: 20px; margin-top: 10px; color: #333;">
107
- <li>Can you help me log everything I do and the key things I see, like a personal journal? Describe them in a natural style. Please provide your response using the first person, with "I" as the subject. Make sure the descriptions are detailed and natural.</li>
 
108
  <li>Can you write down important things I notice or interact with? Please respond in the first person, using "I" as the subject. Describe them in a natural style.</li>
109
  </ul>
110
  </ul>
@@ -361,101 +362,91 @@ button.lg.secondary.svelte-1gz44hr span {
361
  <script>
362
  // Function to initialize controls once elements are found
363
  function initializeControls() {
364
- const textbox = document.querySelector('[data-testid="textbox"]');
365
  const video = document.querySelector('[data-testid="Video-player"]');
366
  const waveform = document.getElementById('waveform');
367
-
368
- // Ensure the elements exist before proceeding
369
- if (!textbox || !video || !waveform) {
370
- console.log("Textbox, video, or waveform element not found.");
371
  return;
372
  }
373
-
374
- // Example of setting up interactions if textbox and other elements are found
375
- console.log("Elements found:", { textbox, video, waveform });
376
 
377
- // Your logic for controlling the video and audio goes here
378
  const audio = waveform.querySelector('div')?.shadowRoot?.querySelector('audio');
379
  if (!audio) {
380
- console.log("Audio element not found.");
381
  return;
382
  }
383
 
384
- video.addEventListener("play", () => {
385
- if (audio.paused) {
386
- audio.play();
387
- }
388
- });
 
 
 
389
 
390
- audio.addEventListener("play", () => {
391
- if (video.paused) {
392
- video.play();
393
- }
394
- });
 
395
 
396
- video.addEventListener("timeupdate", () => {
397
- if (Math.abs(video.currentTime - audio.currentTime) > 0.1) {
398
- audio.currentTime = video.currentTime;
399
- }
400
- });
 
401
 
402
- audio.addEventListener("timeupdate", () => {
403
- if (Math.abs(audio.currentTime - video.currentTime) > 0.1) {
404
- video.currentTime = audio.currentTime;
405
- }
406
- });
407
 
408
- video.addEventListener("pause", () => {
409
- if (!audio.paused) {
410
- audio.pause();
411
- }
412
- });
 
413
 
414
- audio.addEventListener("pause", () => {
415
- if (!video.paused) {
416
- video.pause();
417
- }
418
- });
419
  }
420
 
421
- // Create an observer to listen for DOM changes
422
  const observer = new MutationObserver((mutations) => {
423
  for (const mutation of mutations) {
424
  if (mutation.addedNodes.length) {
425
- // When new nodes are added, try to initialize
426
- const textbox = document.querySelector('[data-testid="textbox"]');
427
- if (textbox) {
428
- console.log("Textbox element detected");
429
  initializeControls();
430
- // Optionally disconnect the observer after initialization
431
- observer.disconnect();
432
  }
433
  }
434
  }
435
  });
436
 
437
- // Start observing DOM changes
438
  observer.observe(document.body, {
439
  childList: true,
440
  subtree: true
441
  });
442
 
443
- // Ensure initialization happens after the DOM is fully loaded
444
  document.addEventListener('DOMContentLoaded', () => {
445
  console.log('DOM Content Loaded');
446
  initializeControls();
447
  });
448
 
449
- // Example of delayed execution to ensure all elements have loaded
450
- window.addEventListener("load", () => {
451
- const textbox = document.querySelector('[data-testid="textbox"]');
452
- if (textbox) {
453
- console.log("Textbox element found on window load.");
454
- initializeControls();
455
- }
456
- });
457
-
458
-
459
  </script>
460
  """
461
 
 
104
  <li>- Due to hardware limitations on this demo page, we recommend users only try 10-second videos.</li>
105
  <li>- The demo model is used for the egocentric video captioning step for the EgoRAG framework. The recommended prompt includes:</li>
106
  <ul style="padding-left: 20px; margin-top: 10px; color: #333;">
107
+ <li>Can you help me log everything I do and the key things I see, like a personal journal? Describe them in a natural style.
108
+ <li>Please provide your response using the first person, with "I" as the subject. Make sure the descriptions are detailed and natural.</li>
109
  <li>Can you write down important things I notice or interact with? Please respond in the first person, using "I" as the subject. Describe them in a natural style.</li>
110
  </ul>
111
  </ul>
 
362
  <script>
363
  // Function to initialize controls once elements are found
364
  function initializeControls() {
 
365
  const video = document.querySelector('[data-testid="Video-player"]');
366
  const waveform = document.getElementById('waveform');
367
+
368
+ // 如果元素还没准备好,直接返回
369
+ if (!video || !waveform) {
 
370
  return;
371
  }
 
 
 
372
 
373
+ // 尝试获取音频元素
374
  const audio = waveform.querySelector('div')?.shadowRoot?.querySelector('audio');
375
  if (!audio) {
 
376
  return;
377
  }
378
 
379
+ console.log('Elements found:', { video, audio });
380
+
381
+ // 监听视频播放进度
382
+ video.addEventListener("play", () => {
383
+ if (audio.paused) {
384
+ audio.play(); // 如果音频暂停,开始播放
385
+ }
386
+ });
387
 
388
+ // 监听音频播放进度
389
+ audio.addEventListener("play", () => {
390
+ if (video.paused) {
391
+ video.play(); // 如果视频暂停,开始播放
392
+ }
393
+ });
394
 
395
+ // 同步视频和音频的播放进度
396
+ video.addEventListener("timeupdate", () => {
397
+ if (Math.abs(video.currentTime - audio.currentTime) > 0.1) {
398
+ audio.currentTime = video.currentTime; // 如果时间差超过0.1秒,同步
399
+ }
400
+ });
401
 
402
+ audio.addEventListener("timeupdate", () => {
403
+ if (Math.abs(audio.currentTime - video.currentTime) > 0.1) {
404
+ video.currentTime = audio.currentTime; // 如果时间差超过0.1秒,同步
405
+ }
406
+ });
407
 
408
+ // 监听暂停事件,确保视频和音频都暂停
409
+ video.addEventListener("pause", () => {
410
+ if (!audio.paused) {
411
+ audio.pause(); // 如果音频未暂停,暂停音频
412
+ }
413
+ });
414
 
415
+ audio.addEventListener("pause", () => {
416
+ if (!video.paused) {
417
+ video.pause(); // 如果视频未暂停,暂停视频
418
+ }
419
+ });
420
  }
421
 
422
+ // 创建观察器监听DOM变化
423
  const observer = new MutationObserver((mutations) => {
424
  for (const mutation of mutations) {
425
  if (mutation.addedNodes.length) {
426
+ // 当有新节点添加时,尝试初始化
427
+ const waveform = document.getElementById('waveform');
428
+ if (waveform?.querySelector('div')?.shadowRoot?.querySelector('audio')) {
429
+ console.log('Audio element detected');
430
  initializeControls();
431
+ // 可选:如果不需要继续监听,可以断开观察器
432
+ // observer.disconnect();
433
  }
434
  }
435
  }
436
  });
437
 
438
+ // 开始观察
439
  observer.observe(document.body, {
440
  childList: true,
441
  subtree: true
442
  });
443
 
444
+ // 页面加载完成时也尝试初始化
445
  document.addEventListener('DOMContentLoaded', () => {
446
  console.log('DOM Content Loaded');
447
  initializeControls();
448
  });
449
 
 
 
 
 
 
 
 
 
 
 
450
  </script>
451
  """
452