rizky nindra commited on
Commit
d3c48a0
·
1 Parent(s): 480c756
.idea/workspace.xml ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="AutoImportSettings">
4
+ <option name="autoReloadType" value="SELECTIVE" />
5
+ </component>
6
+ <component name="ChangeListManager">
7
+ <list default="true" id="82a48ce3-b0fe-4383-a5c5-1857889128ea" name="Changes" comment="" />
8
+ <option name="SHOW_DIALOG" value="false" />
9
+ <option name="HIGHLIGHT_CONFLICTS" value="true" />
10
+ <option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
11
+ <option name="LAST_RESOLUTION" value="IGNORE" />
12
+ </component>
13
+ <component name="FileTemplateManagerImpl">
14
+ <option name="RECENT_TEMPLATES">
15
+ <list>
16
+ <option value="Dockerfile" />
17
+ </list>
18
+ </option>
19
+ </component>
20
+ <component name="Git.Settings">
21
+ <option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
22
+ </component>
23
+ <component name="ProjectColorInfo"><![CDATA[{
24
+ "associatedIndex": 3
25
+ }]]></component>
26
+ <component name="ProjectId" id="33zr2jYSXA7j2m2NetACLNED8HB" />
27
+ <component name="ProjectViewState">
28
+ <option name="hideEmptyMiddlePackages" value="true" />
29
+ <option name="showLibraryContents" value="true" />
30
+ </component>
31
+ <component name="PropertiesComponent"><![CDATA[{
32
+ "keyToString": {
33
+ "ModuleVcsDetector.initialDetectionPerformed": "true",
34
+ "RunOnceActivity.ShowReadmeOnStart": "true",
35
+ "RunOnceActivity.git.unshallow": "true",
36
+ "git-widget-placeholder": "main",
37
+ "last_opened_file_path": "/Users/rizkynindra.sukma/PycharmProjects/squadrone"
38
+ }
39
+ }]]></component>
40
+ <component name="RecentsManager">
41
+ <key name="CopyFile.RECENT_KEYS">
42
+ <recent name="$PROJECT_DIR$" />
43
+ </key>
44
+ </component>
45
+ <component name="SharedIndexes">
46
+ <attachedChunks>
47
+ <set>
48
+ <option value="bundled-python-sdk-82724e2b1abb-e2d783800521-com.jetbrains.pycharm.community.sharedIndexes.bundled-PC-251.28293.52" />
49
+ </set>
50
+ </attachedChunks>
51
+ </component>
52
+ <component name="TaskManager">
53
+ <task active="true" id="Default" summary="Default task">
54
+ <changelist id="82a48ce3-b0fe-4383-a5c5-1857889128ea" name="Changes" comment="" />
55
+ <created>1760330537074</created>
56
+ <option name="number" value="Default" />
57
+ <option name="presentableId" value="Default" />
58
+ <updated>1760330537074</updated>
59
+ </task>
60
+ <servers />
61
+ </component>
62
+ </project>
Dockerfile CHANGED
@@ -11,11 +11,11 @@ RUN pip install --no-cache-dir -r requirements.txt
11
  # Copy your Flask app
12
  COPY . .
13
 
14
- # Expose port 7860 (Hugging Face default)
15
- EXPOSE 7860
16
 
17
  # Set environment variable for Flask
18
- ENV PORT=7860
19
 
20
  # Run Flask app
21
  CMD ["python", "app2.py"]
 
11
  # Copy your Flask app
12
  COPY . .
13
 
14
+ # Expose port 5000 (Hugging Face default)
15
+ EXPOSE 5000
16
 
17
  # Set environment variable for Flask
18
+ ENV PORT=5000
19
 
20
  # Run Flask app
21
  CMD ["python", "app2.py"]
app2.py ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, render_template, request, jsonify, send_from_directory
2
+ import base64
3
+ import cv2
4
+ import numpy as np
5
+ from ultralytics import YOLO
6
+ import io
7
+ from PIL import Image
8
+ import time
9
+ import torch
10
+
11
+ print(f"PyTorch CUDA available: {torch.cuda.is_available()}")
12
+ print(f"CUDA device count: {torch.cuda.device_count()}")
13
+ if torch.cuda.is_available():
14
+ print(f"CUDA device name: {torch.cuda.get_device_name(0)}")
15
+
16
+ app = Flask(__name__)
17
+
18
+ @app.route('/audio/<path:filename>')
19
+ def serve_audio(filename):
20
+ return send_from_directory('audio', filename)
21
+
22
+ # Load YOLO model
23
+ model = YOLO('model/best_100_fix.pt')
24
+
25
+ #use GPU
26
+ model.to('cuda')
27
+ print(f"YOLO model loaded on device: {model.device}")
28
+
29
+ # Model configuration
30
+ CONFIDENCE_THRESHOLD = 0.3 # Lower confidence threshold for better detection
31
+ IMAGE_SIZE = 480 # Smaller inference size can improve performance
32
+
33
+ # Human detection tracking
34
+ human_detection_state = {
35
+ 'first_detected_at': None,
36
+ 'is_alarm_active': False,
37
+ 'last_detection_time': 0,
38
+ 'detection_threshold': 2.0 # 2 seconds
39
+ }
40
+
41
+
42
+ @app.route('/')
43
+ def index():
44
+ return render_template('screen_share_bck.html')
45
+
46
+
47
+ @app.route('/detect', methods=['POST'])
48
+ def detect():
49
+ start_time = time.time()
50
+ try:
51
+ # Get image data from request
52
+ data = request.json
53
+ image_data = data['image']
54
+
55
+ # Remove the prefix from base64 data
56
+ if 'data:image/jpeg;base64,' in image_data:
57
+ image_data = image_data.replace('data:image/jpeg;base64,', '')
58
+ elif 'data:image/png;base64,' in image_data:
59
+ image_data = image_data.replace('data:image/png;base64,', '')
60
+
61
+ # Decode base64 to image
62
+ image_bytes = base64.b64decode(image_data)
63
+ image = Image.open(io.BytesIO(image_bytes))
64
+
65
+ # Convert to OpenCV format
66
+ frame = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2BGR)
67
+
68
+ # Run detection with optimized parameters
69
+ results = model.predict(
70
+ source=frame,
71
+ conf=CONFIDENCE_THRESHOLD,
72
+ verbose=False,
73
+ imgsz=IMAGE_SIZE, # Use smaller size for faster inference
74
+ iou=0.5
75
+ )
76
+
77
+ # Process results
78
+ detections = []
79
+ human_detected = False
80
+
81
+ for result in results:
82
+ boxes = result.boxes.xyxy.cpu().numpy()
83
+ scores = result.boxes.conf.cpu().numpy()
84
+ classes = result.boxes.cls.cpu().numpy()
85
+
86
+ for box, score, cls in zip(boxes, scores, classes):
87
+ x1, y1, x2, y2 = map(int, box)
88
+ class_name = model.names[int(cls)]
89
+
90
+ # Check if this is a human/person detection
91
+ if class_name.lower() in ['person', 'human']:
92
+ human_detected = True
93
+
94
+ detections.append({
95
+ 'box': [x1, y1, x2, y2],
96
+ 'class': class_name,
97
+ 'confidence': float(score)
98
+ })
99
+
100
+ # Update human detection state
101
+ current_time = time.time()
102
+ alarm_status = check_human_detection(human_detected, current_time)
103
+
104
+ processing_time = time.time() - start_time
105
+ return jsonify({
106
+ 'success': True,
107
+ 'detections': detections,
108
+ 'processing_time_ms': round(processing_time * 1000, 2),
109
+ 'alarm': alarm_status
110
+ })
111
+
112
+ except Exception as e:
113
+ print(f"Error processing image: {str(e)}")
114
+ # Reset human detection on error
115
+ reset_human_detection()
116
+ return jsonify({
117
+ 'success': False,
118
+ 'error': str(e)
119
+ }), 500
120
+
121
+
122
+ def check_human_detection(human_detected, current_time):
123
+ """Track human detection and determine if alarm should be triggered"""
124
+ global human_detection_state
125
+
126
+ if human_detected:
127
+ # If this is the first human detection or there was a gap in detection
128
+ if human_detection_state['first_detected_at'] is None:
129
+ human_detection_state['first_detected_at'] = current_time
130
+ human_detection_state['is_alarm_active'] = False
131
+ return {'active': False}
132
+
133
+ # Check if human has been detected for the threshold duration
134
+ elapsed_time = current_time - human_detection_state['first_detected_at']
135
+ if elapsed_time >= human_detection_state['detection_threshold']:
136
+ # Trigger the alarm if not already triggered
137
+ human_detection_state['is_alarm_active'] = True
138
+ return {'active': True, 'duration': elapsed_time}
139
+
140
+ # Human detected but threshold not reached
141
+ return {'active': False, 'progress': elapsed_time / human_detection_state['detection_threshold']}
142
+ else:
143
+ # No human detected, reset the tracking
144
+ reset_human_detection()
145
+ return {'active': False}
146
+
147
+
148
+ def reset_human_detection():
149
+ """Reset human detection tracking"""
150
+ global human_detection_state
151
+ human_detection_state['first_detected_at'] = None
152
+ human_detection_state['is_alarm_active'] = False
153
+
154
+
155
+ @app.route('/reset_alarm', methods=['POST'])
156
+ def reset_alarm():
157
+ """Endpoint to manually reset the alarm"""
158
+ reset_human_detection()
159
+ return jsonify({'success': True})
160
+
161
+
162
+ if __name__ == '__main__':
163
+ # Use threaded mode for better performance
164
+ app.run(debug=True, host='0.0.0.0', port=5000, threaded=True)
audio/enemy_spotted.mp3 ADDED
Binary file (28.5 kB). View file
 
model/best (1).pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af5a0a7f1436251cc471db30d5f40caafc8af639bba58f396279f383a38e1169
3
+ size 5444371
model/best.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:889a947d31042a6198ec9ad3c70efb9555e03b40789fce9698303558df1fabe6
3
+ size 5445523
model/best2.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:24814a8738c47ded7f1bbffad09987f641454830dd449fa257537cd91914bdc0
3
+ size 5450643
model/best5.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8420a7d77cc16e14164ca6eb1dc2e0e8887752c074b13979bade652c783b300
3
+ size 5448147
model/best_100.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e59b8996303a92da0351907dbb32b75a3d347ad1cdf888a43be41bf32d842a42
3
+ size 5457107
model/best_100_fix.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:994df22b8e4c4ca91b297e3293b8a4ac8bacba822df094c96f4fabd314680853
3
+ size 5457043
model/best_4.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0bbff7d27045c9585a55a4ee3b19a495e2cf07c5339d1eedd3cfed0aebe38852
3
+ size 5448147
requirements.txt ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ altair==5.5.0
2
+ altgraph==0.17.4
3
+ appnope==0.1.4
4
+ asttokens==3.0.0
5
+ attrs==25.1.0
6
+ backcall==0.2.0
7
+ beautifulsoup4==4.13.3
8
+ bleach==6.2.0
9
+ blinker==1.9.0
10
+ bottle==0.13.2
11
+ cachetools==5.5.2
12
+ certifi==2025.1.31
13
+ cffi==1.17.1
14
+ charset-normalizer==3.4.1
15
+ click==8.1.8
16
+ clr_loader==0.2.7.post0
17
+ colorama==0.4.6
18
+ contourpy==1.3.1
19
+ cycler==0.12.1
20
+ decorator==5.2.1
21
+ defusedxml==0.7.1
22
+ docopt==0.6.2
23
+ executing==2.2.0
24
+ fastjsonschema==2.21.1
25
+ filelock==3.17.0
26
+ fonttools==4.56.0
27
+ fsspec==2025.3.0
28
+ gitdb==4.0.12
29
+ GitPython==3.1.44
30
+ idna==3.10
31
+ ipython==8.12.3
32
+ jedi==0.19.2
33
+ Jinja2==3.1.6
34
+ jsonschema==4.23.0
35
+ jsonschema-specifications==2024.10.1
36
+ jupyter_client==8.6.3
37
+ jupyter_core==5.7.2
38
+ jupyterlab_pygments==0.3.0
39
+ kiwisolver==1.4.8
40
+ lap==0.5.12
41
+ macholib==1.16.3
42
+ MarkupSafe==3.0.2
43
+ matplotlib==3.10.1
44
+ matplotlib-inline==0.1.7
45
+ mistune==3.1.2
46
+ mpmath==1.3.0
47
+ narwhals==1.29.1
48
+ nbclient==0.10.2
49
+ nbconvert==7.16.6
50
+ nbformat==5.10.4
51
+ networkx==3.4.2
52
+ numpy==2.1.1
53
+ opencv-python==4.11.0.86
54
+ packaging==24.2
55
+ pandas==2.2.3
56
+ pandocfilters==1.5.1
57
+ parso==0.8.4
58
+ pefile==2023.2.7
59
+ pexpect==4.9.0
60
+ pickleshare==0.7.5
61
+ pillow==11.1.0
62
+ pipreqs==0.5.0
63
+ platformdirs==4.3.6
64
+ prompt_toolkit==3.0.50
65
+ protobuf==5.29.3
66
+ proxy_tools==0.1.0
67
+ psutil==7.0.0
68
+ ptyprocess==0.7.0
69
+ pure_eval==0.2.3
70
+ py-cpuinfo==9.0.0
71
+ pyarrow==19.0.1
72
+ pycparser==2.22
73
+ pydeck==0.9.1
74
+ Pygments==2.19.1
75
+ pyinstaller==6.12.0
76
+ pyinstaller-hooks-contrib==2025.1
77
+ pyobjc-core==11.1
78
+ pyobjc-framework-Cocoa==11.1
79
+ pyobjc-framework-Quartz==11.1
80
+ pyobjc-framework-Security==11.1
81
+ pyobjc-framework-WebKit==11.1
82
+ pyparsing==3.2.1
83
+ python-dateutil==2.9.0.post0
84
+ pythonnet==3.0.5
85
+ pytz==2025.1
86
+ pywebview==5.4
87
+ PyYAML==6.0.2
88
+ pyzmq==26.2.1
89
+ referencing==0.36.2
90
+ requests==2.32.3
91
+ rpds-py==0.23.1
92
+ scipy==1.15.2
93
+ seaborn==0.13.2
94
+ six==1.17.0
95
+ smmap==5.0.2
96
+ soupsieve==2.6
97
+ stack-data==0.6.3
98
+ streamlit==1.43.1
99
+ streamlit-desktop-app==0.3.4
100
+ sympy==1.13.1
101
+ tenacity==9.0.0
102
+ tinycss2==1.4.0
103
+ toml==0.10.2
104
+ torch==2.6.0
105
+ torchvision==0.21.0
106
+ tornado==6.4.2
107
+ tqdm==4.67.1
108
+ traitlets==5.14.3
109
+ typing_extensions==4.12.2
110
+ tzdata==2025.1
111
+ ultralytics==8.3.85
112
+ ultralytics-thop==2.0.14
113
+ urllib3==2.3.0
114
+ watchdog==6.0.0
115
+ wcwidth==0.2.13
116
+ webencodings==0.5.1
117
+ yarg==0.1.9
templates/screen_share.html ADDED
@@ -0,0 +1,533 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Squadrone</title>
7
+ <style>
8
+ body {
9
+ font-family: Arial, sans-serif;
10
+ margin: 0;
11
+ padding: 20px;
12
+ background-color: #f0f0f0;
13
+ }
14
+ .container {
15
+ max-width: 1000px;
16
+ margin: 0 auto;
17
+ background-color: white;
18
+ padding: 20px;
19
+ border-radius: 8px;
20
+ box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
21
+ }
22
+ h1 {
23
+ color: #333;
24
+ text-align: center;
25
+ }
26
+ .video-container {
27
+ margin: 20px 0;
28
+ text-align: center;
29
+ position: relative;
30
+ }
31
+ video {
32
+ max-width: 100%;
33
+ border: 1px solid #ddd;
34
+ border-radius: 4px;
35
+ }
36
+ canvas {
37
+ position: absolute;
38
+ left: 0;
39
+ top: 0;
40
+ max-width: 100%;
41
+ }
42
+ button {
43
+ background-color: #4CAF50;
44
+ border: none;
45
+ color: white;
46
+ padding: 10px 20px;
47
+ text-align: center;
48
+ text-decoration: none;
49
+ display: inline-block;
50
+ font-size: 16px;
51
+ margin: 10px 5px;
52
+ cursor: pointer;
53
+ border-radius: 4px;
54
+ }
55
+ button:hover {
56
+ background-color: #45a049;
57
+ }
58
+ button:disabled {
59
+ background-color: #cccccc;
60
+ cursor: not-allowed;
61
+ }
62
+ .controls {
63
+ text-align: center;
64
+ margin-bottom: 20px;
65
+ }
66
+ .status {
67
+ text-align: center;
68
+ color: #666;
69
+ margin: 10px 0;
70
+ font-style: italic;
71
+ }
72
+ .detection-info {
73
+ margin-top: 20px;
74
+ padding: 10px;
75
+ border: 1px solid #ddd;
76
+ border-radius: 4px;
77
+ max-height: 200px;
78
+ overflow-y: auto;
79
+ }
80
+ .fps-counter {
81
+ position: absolute;
82
+ top: 10px;
83
+ right: 10px;
84
+ background-color: rgba(0,0,0,0.5);
85
+ color: white;
86
+ padding: 5px;
87
+ border-radius: 3px;
88
+ font-size: 12px;
89
+ }
90
+ .alarm {
91
+ background-color: #ff0000;
92
+ color: white;
93
+ padding: 15px;
94
+ margin-top: 10px;
95
+ text-align: center;
96
+ font-weight: bold;
97
+ font-size: 18px;
98
+ border-radius: 4px;
99
+ display: none;
100
+ }
101
+ @keyframes alarm-flash {
102
+ 0%, 100% { opacity: 1; }
103
+ 50% { opacity: 0.5; }
104
+ }
105
+ .alarm-active {
106
+ display: block;
107
+ animation: alarm-flash 1s infinite;
108
+ }
109
+ </style>
110
+ </head>
111
+ <body>
112
+ <div class="container">
113
+ <h1>Squadrone v1.0.1</h1>
114
+
115
+ <div class="controls">
116
+ <button id="startButton">Start Screen Share</button>
117
+ <button id="stopButton" disabled>Stop Screen Share</button>
118
+ <button id="startDetectionButton" disabled>Start Detection</button>
119
+ <button id="pauseDetectionButton" disabled>Pause Detection</button>
120
+ </div>
121
+
122
+ <div class="status" id="status">Ready to share screen</div>
123
+
124
+ <!-- Alarm sound -->
125
+ <div class="alarm" id="humanAlarm">ALERT: Human detected for extended period!</div>
126
+
127
+
128
+ <div class="video-container">
129
+ <video id="screenVideo" autoplay playsinline muted></video>
130
+ <canvas id="detectionCanvas"></canvas>
131
+ <div id="fpsCounter" class="fps-counter">0 FPS</div>
132
+ </div>
133
+
134
+ <div class="detection-info" id="detectionResults">
135
+ <h3>Detection Results</h3>
136
+ <p>No detection performed yet.</p>
137
+ </div>
138
+ </div>
139
+
140
+ <script>
141
+ const startButton = document.getElementById('startButton');
142
+ const stopButton = document.getElementById('stopButton');
143
+ const startDetectionButton = document.getElementById('startDetectionButton');
144
+ const pauseDetectionButton = document.getElementById('pauseDetectionButton');
145
+ const screenVideo = document.getElementById('screenVideo');
146
+ const statusElement = document.getElementById('status');
147
+ const detectionCanvas = document.getElementById('detectionCanvas');
148
+ const detectionResults = document.getElementById('detectionResults');
149
+ const fpsCounter = document.getElementById('fpsCounter');
150
+ const humanAlarm = document.getElementById('humanAlarm');
151
+
152
+ let mediaStream = null;
153
+ let capturedFrame = null;
154
+ let detectionActive = false;
155
+ let detectionInProgress = false;
156
+ let animationFrameId = null;
157
+ let lastDetectionTime = 0;
158
+ let frameCount = 0;
159
+ let lastFpsUpdateTime = 0;
160
+ let alarmActive = false;
161
+
162
+ // Add this line for the alarm sound
163
+ const humanDetectionAudio = new Audio('/audio/enemy_spotted.mp3');
164
+ // Detection throttle settings
165
+ const MIN_DETECTION_INTERVAL = 100; // ms between detection requests
166
+
167
+ startButton.addEventListener('click', async () => {
168
+ try {
169
+ statusElement.textContent = 'Requesting screen access...';
170
+
171
+ // Request screen capture
172
+ mediaStream = await navigator.mediaDevices.getDisplayMedia({
173
+ video: {
174
+ cursor: "always"
175
+ },
176
+ audio: false
177
+ });
178
+
179
+ // Connect the media stream to the video element
180
+ screenVideo.srcObject = mediaStream;
181
+
182
+ // Wait for video to be loaded
183
+ screenVideo.onloadedmetadata = () => {
184
+ // Set canvas dimensions to match video
185
+ detectionCanvas.width = screenVideo.videoWidth;
186
+ detectionCanvas.height = screenVideo.videoHeight;
187
+ };
188
+
189
+ // Enable buttons
190
+ startButton.disabled = true;
191
+ stopButton.disabled = false;
192
+ startDetectionButton.disabled = false;
193
+
194
+ statusElement.textContent = 'Screen sharing active';
195
+
196
+ // Listen for the end of stream
197
+ mediaStream.getVideoTracks()[0].addEventListener('ended', () => {
198
+ stopScreenSharing();
199
+ });
200
+
201
+ } catch (error) {
202
+ console.error('Error accessing screen:', error);
203
+ statusElement.textContent = `Error: ${error.message || 'Could not access screen'}`;
204
+ }
205
+ });
206
+
207
+ stopButton.addEventListener('click', stopScreenSharing);
208
+
209
+ function stopScreenSharing() {
210
+ stopDetection();
211
+
212
+ if (mediaStream) {
213
+ mediaStream.getTracks().forEach(track => track.stop());
214
+ screenVideo.srcObject = null;
215
+ }
216
+
217
+ startButton.disabled = false;
218
+ stopButton.disabled = true;
219
+ startDetectionButton.disabled = true;
220
+ pauseDetectionButton.disabled = true;
221
+ statusElement.textContent = 'Screen sharing stopped';
222
+ }
223
+
224
+ startDetectionButton.addEventListener('click', startDetection);
225
+ pauseDetectionButton.addEventListener('click', pauseDetection);
226
+
227
+ function startDetection() {
228
+ if (!screenVideo.srcObject) {
229
+ statusElement.textContent = 'No video stream available';
230
+ return;
231
+ }
232
+
233
+ detectionActive = true;
234
+ startDetectionButton.disabled = true;
235
+ pauseDetectionButton.disabled = false;
236
+ statusElement.textContent = 'Real-time detection active';
237
+
238
+ // Start the detection loop
239
+ lastFpsUpdateTime = performance.now();
240
+ frameCount = 0;
241
+ detectLoop();
242
+ }
243
+
244
+ function pauseDetection() {
245
+ detectionActive = false;
246
+ startDetectionButton.disabled = false;
247
+ pauseDetectionButton.disabled = true;
248
+ statusElement.textContent = 'Detection paused';
249
+
250
+ if (animationFrameId) {
251
+ cancelAnimationFrame(animationFrameId);
252
+ animationFrameId = null;
253
+ }
254
+
255
+ // Reset alarm
256
+ if (alarmActive) {
257
+ resetAlarm();
258
+ }
259
+ }
260
+
261
+ function stopDetection() {
262
+ detectionActive = false;
263
+ if (animationFrameId) {
264
+ cancelAnimationFrame(animationFrameId);
265
+ animationFrameId = null;
266
+ }
267
+ }
268
+
269
+ function captureVideoFrame() {
270
+ if (!screenVideo.srcObject) {
271
+ return false;
272
+ }
273
+
274
+ const canvas = document.createElement('canvas');
275
+ canvas.width = detectionCanvas.width;
276
+ canvas.height = detectionCanvas.height;
277
+ const ctx = canvas.getContext('2d');
278
+
279
+ // Draw the current video frame on a temporary canvas
280
+ ctx.drawImage(screenVideo, 0, 0, canvas.width, canvas.height);
281
+
282
+ // Store the captured frame as data URL - use lower quality for better performance
283
+ capturedFrame = canvas.toDataURL('image/jpeg', 0.7);
284
+ return true;
285
+ }
286
+
287
+ function detectLoop() {
288
+ if (!detectionActive) return;
289
+
290
+ // Calculate FPS
291
+ frameCount++;
292
+ const now = performance.now();
293
+ const elapsed = now - lastFpsUpdateTime;
294
+
295
+ if (elapsed >= 1000) { // Update FPS once per second
296
+ const fps = Math.round((frameCount / elapsed) * 1000);
297
+ fpsCounter.textContent = `${fps} FPS`;
298
+ frameCount = 0;
299
+ lastFpsUpdateTime = now;
300
+ }
301
+
302
+ // Update the video display without clearing the bounding boxes
303
+ const ctx = detectionCanvas.getContext('2d');
304
+ ctx.drawImage(screenVideo, 0, 0, detectionCanvas.width, detectionCanvas.height);
305
+
306
+ // Check if we should send a new detection request
307
+ if (!detectionInProgress && now - lastDetectionTime >= MIN_DETECTION_INTERVAL) {
308
+ if (captureVideoFrame()) {
309
+ lastDetectionTime = now;
310
+ detectObjects();
311
+ }
312
+ }
313
+
314
+ // Continue the loop
315
+ animationFrameId = requestAnimationFrame(detectLoop);
316
+ }
317
+
318
+ async function detectObjects() {
319
+ if (!capturedFrame || detectionInProgress) {
320
+ return;
321
+ }
322
+
323
+ try {
324
+ detectionInProgress = true;
325
+
326
+ // Send the captured frame to your Flask backend
327
+ const response = await fetch('/detect', {
328
+ method: 'POST',
329
+ headers: {
330
+ 'Content-Type': 'application/json'
331
+ },
332
+ body: JSON.stringify({
333
+ image: capturedFrame
334
+ })
335
+ });
336
+
337
+ if (!response.ok) {
338
+ throw new Error(`Server returned ${response.status}`);
339
+ }
340
+
341
+ const result = await response.json();
342
+
343
+ if (result.success) {
344
+ // Display detection results
345
+ displayDetectionResults(result);
346
+
347
+ // Store the latest detections to be drawn in the detectLoop
348
+ latestDetections = result.detections;
349
+
350
+ // Check alarm status from server
351
+ handleAlarmStatus(result.alarm);
352
+ } else {
353
+ throw new Error(result.error || 'Detection failed');
354
+ }
355
+
356
+ } catch (error) {
357
+ console.error('Error in detection:', error);
358
+ statusElement.textContent = `Error: ${error.message}`;
359
+ } finally {
360
+ detectionInProgress = false;
361
+ }
362
+ }
363
+
364
+ // Add a global variable to store latest detections
365
+ let latestDetections = [];
366
+
367
+ function detectLoop() {
368
+ if (!detectionActive) return;
369
+
370
+ // Calculate FPS
371
+ frameCount++;
372
+ const now = performance.now();
373
+ const elapsed = now - lastFpsUpdateTime;
374
+
375
+ if (elapsed >= 1000) { // Update FPS once per second
376
+ const fps = Math.round((frameCount / elapsed) * 1000);
377
+ fpsCounter.textContent = `${fps} FPS`;
378
+ frameCount = 0;
379
+ lastFpsUpdateTime = now;
380
+ }
381
+
382
+ // Clear canvas and update the video display
383
+ const ctx = detectionCanvas.getContext('2d');
384
+ ctx.clearRect(0, 0, detectionCanvas.width, detectionCanvas.height);
385
+ ctx.drawImage(screenVideo, 0, 0, detectionCanvas.width, detectionCanvas.height);
386
+
387
+ // Draw detection boxes from the latest results
388
+ if (latestDetections && latestDetections.length) {
389
+ drawDetectionBoxes(latestDetections);
390
+ }
391
+
392
+ // Check if we should send a new detection request
393
+ if (!detectionInProgress && now - lastDetectionTime >= MIN_DETECTION_INTERVAL) {
394
+ if (captureVideoFrame()) {
395
+ lastDetectionTime = now;
396
+ detectObjects();
397
+ }
398
+ }
399
+
400
+ // Continue the loop
401
+ animationFrameId = requestAnimationFrame(detectLoop);
402
+ }
403
+
404
+ // Add function to handle alarm status
405
+ function handleAlarmStatus(alarmStatus) {
406
+ if (alarmStatus.active) {
407
+ if (!alarmActive) {
408
+ triggerAlarm();
409
+ }
410
+ } else {
411
+ if (alarmActive) {
412
+ resetAlarm();
413
+ }
414
+ }
415
+ }
416
+
417
+ function triggerAlarm() {
418
+ alarmActive = true;
419
+ humanAlarm.classList.add('alarm-active');
420
+
421
+ // Play alarm sound
422
+ humanDetectionAudio.play().catch(err => console.log('Audio play error:', err));
423
+
424
+ // Update status
425
+ // statusElement.textContent = 'ALERT: Human detected!';
426
+ }
427
+
428
+ function resetAlarm() {
429
+ alarmActive = false;
430
+ humanAlarm.classList.remove('alarm-active');
431
+ humanDetectionAudio.pause();
432
+ humanDetectionAudio.currentTime = 0;
433
+
434
+ // Reset on server side too
435
+ fetch('/reset_alarm', {
436
+ method: 'POST',
437
+ headers: {
438
+ 'Content-Type': 'application/json'
439
+ }
440
+ }).catch(err => console.log('Error resetting alarm on server:', err));
441
+ }
442
+
443
+ function displayDetectionResults(result) {
444
+ let html = '<h3>Detection Results</h3>';
445
+
446
+ if (result.detections && result.detections.length) {
447
+ html += '<ul>';
448
+ result.detections.forEach(detection => {
449
+ html += `<li>${detection.class} (${(detection.confidence * 100).toFixed(2)}%)</li>`;
450
+ });
451
+ html += '</ul>';
452
+ } else {
453
+ html += '<p>No objects detected.</p>';
454
+ }
455
+
456
+ detectionResults.innerHTML = html;
457
+ }
458
+
459
+ function drawDetectionBoxes(detections) {
460
+ if (!detections || !detections.length) return;
461
+
462
+ const canvas = detectionCanvas;
463
+ const ctx = canvas.getContext('2d');
464
+
465
+ // No need to clear or redraw the video frame here, that's done in detectLoop
466
+
467
+ // Draw detection boxes
468
+ detections.forEach(detection => {
469
+ const { box, class: className, confidence } = detection;
470
+ const [x1, y1, x2, y2] = box;
471
+
472
+ ctx.strokeStyle = 'lime';
473
+ ctx.lineWidth = 3;
474
+ ctx.strokeRect(x1, y1, x2-x1, y2-y1);
475
+
476
+ // Draw label
477
+ ctx.fillStyle = 'lime';
478
+ ctx.font = '16px Arial';
479
+ const label = `${className} ${(confidence * 100).toFixed(1)}%`;
480
+ const textWidth = ctx.measureText(label).width;
481
+
482
+ ctx.fillStyle = 'rgba(0, 0, 0, 0.7)';
483
+ ctx.fillRect(x1, y1 - 25, textWidth + 10, 25);
484
+
485
+ ctx.fillStyle = 'white';
486
+ ctx.fillText(label, x1 + 5, y1 - 7);
487
+ });
488
+ }
489
+
490
+
491
+ <!-- function drawDetectionBoxes(detections) {-->
492
+ <!-- // if (!detections || !detections.length) return;-->
493
+
494
+ <!-- const canvas = detectionCanvas;-->
495
+ <!-- const ctx = canvas.getContext('2d');-->
496
+
497
+ <!-- // Clear previous drawings-->
498
+ <!-- ctx.clearRect(0, 0, canvas.width, canvas.height);-->
499
+
500
+ <!-- // Draw current video frame-->
501
+ <!-- ctx.drawImage(screenVideo, 0, 0, canvas.width, canvas.height);-->
502
+
503
+ <!-- // Draw detection boxes only for humans-->
504
+ <!-- if (detections && detections.length) {-->
505
+ <!-- detections.forEach(detection => {-->
506
+ <!-- const { box, class: className, confidence } = detection;-->
507
+
508
+ <!-- // Only draw boxes for human/person detections-->
509
+ <!-- if (className.toLowerCase() === 'person' || className.toLowerCase() === 'human') {-->
510
+ <!-- const [x1, y1, x2, y2] = box;-->
511
+
512
+ <!-- // Red color for humans-->
513
+ <!-- ctx.strokeStyle = 'red';-->
514
+ <!-- ctx.lineWidth = 3;-->
515
+ <!-- ctx.strokeRect(x1, y1, x2-x1, y2-y1);-->
516
+
517
+ <!-- // Draw label-->
518
+ <!-- ctx.font = '16px Arial';-->
519
+ <!-- const label = `${className} ${(confidence * 100).toFixed(1)}%`;-->
520
+ <!-- const textWidth = ctx.measureText(label).width;-->
521
+
522
+ <!-- ctx.fillStyle = 'rgba(0, 0, 0, 0.7)';-->
523
+ <!-- ctx.fillRect(x1, y1 - 25, textWidth + 10, 25);-->
524
+
525
+ <!-- ctx.fillStyle = 'white';-->
526
+ <!-- ctx.fillText(label, x1 + 5, y1 - 7);-->
527
+ <!-- }-->
528
+ <!-- });-->
529
+ <!-- }-->
530
+ <!-- }-->
531
+ </script>
532
+ </body>
533
+ </html>
templates/screen_share_bck.html ADDED
@@ -0,0 +1,533 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Squadrone</title>
7
+ <style>
8
+ body {
9
+ font-family: Arial, sans-serif;
10
+ margin: 0;
11
+ padding: 20px;
12
+ background-color: #f0f0f0;
13
+ }
14
+ .container {
15
+ max-width: 1000px;
16
+ margin: 0 auto;
17
+ background-color: white;
18
+ padding: 20px;
19
+ border-radius: 8px;
20
+ box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
21
+ }
22
+ h1 {
23
+ color: #333;
24
+ text-align: center;
25
+ }
26
+ .video-container {
27
+ margin: 20px 0;
28
+ text-align: center;
29
+ position: relative;
30
+ }
31
+ video {
32
+ max-width: 100%;
33
+ border: 1px solid #ddd;
34
+ border-radius: 4px;
35
+ }
36
+ canvas {
37
+ position: absolute;
38
+ left: 0;
39
+ top: 0;
40
+ max-width: 100%;
41
+ }
42
+ button {
43
+ background-color: #4CAF50;
44
+ border: none;
45
+ color: white;
46
+ padding: 10px 20px;
47
+ text-align: center;
48
+ text-decoration: none;
49
+ display: inline-block;
50
+ font-size: 16px;
51
+ margin: 10px 5px;
52
+ cursor: pointer;
53
+ border-radius: 4px;
54
+ }
55
+ button:hover {
56
+ background-color: #45a049;
57
+ }
58
+ button:disabled {
59
+ background-color: #cccccc;
60
+ cursor: not-allowed;
61
+ }
62
+ .controls {
63
+ text-align: center;
64
+ margin-bottom: 20px;
65
+ }
66
+ .status {
67
+ text-align: center;
68
+ color: #666;
69
+ margin: 10px 0;
70
+ font-style: italic;
71
+ }
72
+ .detection-info {
73
+ margin-top: 20px;
74
+ padding: 10px;
75
+ border: 1px solid #ddd;
76
+ border-radius: 4px;
77
+ max-height: 200px;
78
+ overflow-y: auto;
79
+ }
80
+ .fps-counter {
81
+ position: absolute;
82
+ top: 10px;
83
+ right: 10px;
84
+ background-color: rgba(0,0,0,0.5);
85
+ color: white;
86
+ padding: 5px;
87
+ border-radius: 3px;
88
+ font-size: 12px;
89
+ }
90
+ .alarm {
91
+ background-color: #ff0000;
92
+ color: white;
93
+ padding: 15px;
94
+ margin-top: 10px;
95
+ text-align: center;
96
+ font-weight: bold;
97
+ font-size: 18px;
98
+ border-radius: 4px;
99
+ display: none;
100
+ }
101
+ @keyframes alarm-flash {
102
+ 0%, 100% { opacity: 1; }
103
+ 50% { opacity: 0.5; }
104
+ }
105
+ .alarm-active {
106
+ display: block;
107
+ animation: alarm-flash 1s infinite;
108
+ }
109
+ </style>
110
+ </head>
111
+ <body>
112
+ <div class="container">
113
+ <h1>Squadrone v1.0.1</h1>
114
+
115
+ <div class="controls">
116
+ <button id="startButton">Start Screen Share</button>
117
+ <button id="stopButton" disabled>Stop Screen Share</button>
118
+ <button id="startDetectionButton" disabled>Start Detection</button>
119
+ <button id="pauseDetectionButton" disabled>Pause Detection</button>
120
+ </div>
121
+
122
+ <div class="status" id="status">Ready to share screen</div>
123
+
124
+ <!-- Alarm sound -->
125
+ <div class="alarm" id="humanAlarm">ALERT: Human detected for extended period!</div>
126
+
127
+
128
+ <div class="video-container">
129
+ <video id="screenVideo" autoplay playsinline muted></video>
130
+ <canvas id="detectionCanvas"></canvas>
131
+ <div id="fpsCounter" class="fps-counter">0 FPS</div>
132
+ </div>
133
+
134
+ <div class="detection-info" id="detectionResults">
135
+ <h3>Detection Results</h3>
136
+ <p>No detection performed yet.</p>
137
+ </div>
138
+ </div>
139
+
140
+ <script>
141
+ const startButton = document.getElementById('startButton');
142
+ const stopButton = document.getElementById('stopButton');
143
+ const startDetectionButton = document.getElementById('startDetectionButton');
144
+ const pauseDetectionButton = document.getElementById('pauseDetectionButton');
145
+ const screenVideo = document.getElementById('screenVideo');
146
+ const statusElement = document.getElementById('status');
147
+ const detectionCanvas = document.getElementById('detectionCanvas');
148
+ const detectionResults = document.getElementById('detectionResults');
149
+ const fpsCounter = document.getElementById('fpsCounter');
150
+ const humanAlarm = document.getElementById('humanAlarm');
151
+
152
+ let mediaStream = null;
153
+ let capturedFrame = null;
154
+ let detectionActive = false;
155
+ let detectionInProgress = false;
156
+ let animationFrameId = null;
157
+ let lastDetectionTime = 0;
158
+ let frameCount = 0;
159
+ let lastFpsUpdateTime = 0;
160
+ let alarmActive = false;
161
+
162
+ // Add this line for the alarm sound
163
+ const humanDetectionAudio = new Audio('/audio/enemy_spotted.mp3');
164
+ // Detection throttle settings
165
+ const MIN_DETECTION_INTERVAL = 100; // ms between detection requests
166
+
167
+ startButton.addEventListener('click', async () => {
168
+ try {
169
+ statusElement.textContent = 'Requesting screen access...';
170
+
171
+ // Request screen capture
172
+ mediaStream = await navigator.mediaDevices.getDisplayMedia({
173
+ video: {
174
+ cursor: "always"
175
+ },
176
+ audio: false
177
+ });
178
+
179
+ // Connect the media stream to the video element
180
+ screenVideo.srcObject = mediaStream;
181
+
182
+ // Wait for video to be loaded
183
+ screenVideo.onloadedmetadata = () => {
184
+ // Set canvas dimensions to match video
185
+ detectionCanvas.width = screenVideo.videoWidth;
186
+ detectionCanvas.height = screenVideo.videoHeight;
187
+ };
188
+
189
+ // Enable buttons
190
+ startButton.disabled = true;
191
+ stopButton.disabled = false;
192
+ startDetectionButton.disabled = false;
193
+
194
+ statusElement.textContent = 'Screen sharing active';
195
+
196
+ // Listen for the end of stream
197
+ mediaStream.getVideoTracks()[0].addEventListener('ended', () => {
198
+ stopScreenSharing();
199
+ });
200
+
201
+ } catch (error) {
202
+ console.error('Error accessing screen:', error);
203
+ statusElement.textContent = `Error: ${error.message || 'Could not access screen'}`;
204
+ }
205
+ });
206
+
207
+ stopButton.addEventListener('click', stopScreenSharing);
208
+
209
+ function stopScreenSharing() {
210
+ stopDetection();
211
+
212
+ if (mediaStream) {
213
+ mediaStream.getTracks().forEach(track => track.stop());
214
+ screenVideo.srcObject = null;
215
+ }
216
+
217
+ startButton.disabled = false;
218
+ stopButton.disabled = true;
219
+ startDetectionButton.disabled = true;
220
+ pauseDetectionButton.disabled = true;
221
+ statusElement.textContent = 'Screen sharing stopped';
222
+ }
223
+
224
+ startDetectionButton.addEventListener('click', startDetection);
225
+ pauseDetectionButton.addEventListener('click', pauseDetection);
226
+
227
+ function startDetection() {
228
+ if (!screenVideo.srcObject) {
229
+ statusElement.textContent = 'No video stream available';
230
+ return;
231
+ }
232
+
233
+ detectionActive = true;
234
+ startDetectionButton.disabled = true;
235
+ pauseDetectionButton.disabled = false;
236
+ statusElement.textContent = 'Real-time detection active';
237
+
238
+ // Start the detection loop
239
+ lastFpsUpdateTime = performance.now();
240
+ frameCount = 0;
241
+ detectLoop();
242
+ }
243
+
244
+ function pauseDetection() {
245
+ detectionActive = false;
246
+ startDetectionButton.disabled = false;
247
+ pauseDetectionButton.disabled = true;
248
+ statusElement.textContent = 'Detection paused';
249
+
250
+ if (animationFrameId) {
251
+ cancelAnimationFrame(animationFrameId);
252
+ animationFrameId = null;
253
+ }
254
+
255
+ // Reset alarm
256
+ if (alarmActive) {
257
+ resetAlarm();
258
+ }
259
+ }
260
+
261
+ function stopDetection() {
262
+ detectionActive = false;
263
+ if (animationFrameId) {
264
+ cancelAnimationFrame(animationFrameId);
265
+ animationFrameId = null;
266
+ }
267
+ }
268
+
269
+ function captureVideoFrame() {
270
+ if (!screenVideo.srcObject) {
271
+ return false;
272
+ }
273
+
274
+ const canvas = document.createElement('canvas');
275
+ canvas.width = detectionCanvas.width;
276
+ canvas.height = detectionCanvas.height;
277
+ const ctx = canvas.getContext('2d');
278
+
279
+ // Draw the current video frame on a temporary canvas
280
+ ctx.drawImage(screenVideo, 0, 0, canvas.width, canvas.height);
281
+
282
+ // Store the captured frame as data URL - use lower quality for better performance
283
+ capturedFrame = canvas.toDataURL('image/jpeg', 0.7);
284
+ return true;
285
+ }
286
+
287
+ function detectLoop() {
288
+ if (!detectionActive) return;
289
+
290
+ // Calculate FPS
291
+ frameCount++;
292
+ const now = performance.now();
293
+ const elapsed = now - lastFpsUpdateTime;
294
+
295
+ if (elapsed >= 1000) { // Update FPS once per second
296
+ const fps = Math.round((frameCount / elapsed) * 1000);
297
+ fpsCounter.textContent = `${fps} FPS`;
298
+ frameCount = 0;
299
+ lastFpsUpdateTime = now;
300
+ }
301
+
302
+ // Update the video display without clearing the bounding boxes
303
+ const ctx = detectionCanvas.getContext('2d');
304
+ ctx.drawImage(screenVideo, 0, 0, detectionCanvas.width, detectionCanvas.height);
305
+
306
+ // Check if we should send a new detection request
307
+ if (!detectionInProgress && now - lastDetectionTime >= MIN_DETECTION_INTERVAL) {
308
+ if (captureVideoFrame()) {
309
+ lastDetectionTime = now;
310
+ detectObjects();
311
+ }
312
+ }
313
+
314
+ // Continue the loop
315
+ animationFrameId = requestAnimationFrame(detectLoop);
316
+ }
317
+
318
+ async function detectObjects() {
319
+ if (!capturedFrame || detectionInProgress) {
320
+ return;
321
+ }
322
+
323
+ try {
324
+ detectionInProgress = true;
325
+
326
+ // Send the captured frame to your Flask backend
327
+ const response = await fetch('/detect', {
328
+ method: 'POST',
329
+ headers: {
330
+ 'Content-Type': 'application/json'
331
+ },
332
+ body: JSON.stringify({
333
+ image: capturedFrame
334
+ })
335
+ });
336
+
337
+ if (!response.ok) {
338
+ throw new Error(`Server returned ${response.status}`);
339
+ }
340
+
341
+ const result = await response.json();
342
+
343
+ if (result.success) {
344
+ // Display detection results
345
+ displayDetectionResults(result);
346
+
347
+ // Store the latest detections to be drawn in the detectLoop
348
+ latestDetections = result.detections;
349
+
350
+ // Check alarm status from server
351
+ handleAlarmStatus(result.alarm);
352
+ } else {
353
+ throw new Error(result.error || 'Detection failed');
354
+ }
355
+
356
+ } catch (error) {
357
+ console.error('Error in detection:', error);
358
+ statusElement.textContent = `Error: ${error.message}`;
359
+ } finally {
360
+ detectionInProgress = false;
361
+ }
362
+ }
363
+
364
+ // Add a global variable to store latest detections
365
+ let latestDetections = [];
366
+
367
+ function detectLoop() {
368
+ if (!detectionActive) return;
369
+
370
+ // Calculate FPS
371
+ frameCount++;
372
+ const now = performance.now();
373
+ const elapsed = now - lastFpsUpdateTime;
374
+
375
+ if (elapsed >= 1000) { // Update FPS once per second
376
+ const fps = Math.round((frameCount / elapsed) * 1000);
377
+ fpsCounter.textContent = `${fps} FPS`;
378
+ frameCount = 0;
379
+ lastFpsUpdateTime = now;
380
+ }
381
+
382
+ // Clear canvas and update the video display
383
+ const ctx = detectionCanvas.getContext('2d');
384
+ ctx.clearRect(0, 0, detectionCanvas.width, detectionCanvas.height);
385
+ ctx.drawImage(screenVideo, 0, 0, detectionCanvas.width, detectionCanvas.height);
386
+
387
+ // Draw detection boxes from the latest results
388
+ if (latestDetections && latestDetections.length) {
389
+ drawDetectionBoxes(latestDetections);
390
+ }
391
+
392
+ // Check if we should send a new detection request
393
+ if (!detectionInProgress && now - lastDetectionTime >= MIN_DETECTION_INTERVAL) {
394
+ if (captureVideoFrame()) {
395
+ lastDetectionTime = now;
396
+ detectObjects();
397
+ }
398
+ }
399
+
400
+ // Continue the loop
401
+ animationFrameId = requestAnimationFrame(detectLoop);
402
+ }
403
+
404
+ // Add function to handle alarm status
405
+ function handleAlarmStatus(alarmStatus) {
406
+ if (alarmStatus.active) {
407
+ if (!alarmActive) {
408
+ triggerAlarm();
409
+ }
410
+ } else {
411
+ if (alarmActive) {
412
+ resetAlarm();
413
+ }
414
+ }
415
+ }
416
+
417
+ function triggerAlarm() {
418
+ alarmActive = true;
419
+ humanAlarm.classList.add('alarm-active');
420
+
421
+ // Play alarm sound
422
+ humanDetectionAudio.play().catch(err => console.log('Audio play error:', err));
423
+
424
+ // Update status
425
+ // statusElement.textContent = 'ALERT: Human detected!';
426
+ }
427
+
428
+ function resetAlarm() {
429
+ alarmActive = false;
430
+ humanAlarm.classList.remove('alarm-active');
431
+ humanDetectionAudio.pause();
432
+ humanDetectionAudio.currentTime = 0;
433
+
434
+ // Reset on server side too
435
+ fetch('/reset_alarm', {
436
+ method: 'POST',
437
+ headers: {
438
+ 'Content-Type': 'application/json'
439
+ }
440
+ }).catch(err => console.log('Error resetting alarm on server:', err));
441
+ }
442
+
443
+ function displayDetectionResults(result) {
444
+ let html = '<h3>Detection Results</h3>';
445
+
446
+ if (result.detections && result.detections.length) {
447
+ html += '<ul>';
448
+ result.detections.forEach(detection => {
449
+ html += `<li>${detection.class} (${(detection.confidence * 100).toFixed(2)}%)</li>`;
450
+ });
451
+ html += '</ul>';
452
+ } else {
453
+ html += '<p>No objects detected.</p>';
454
+ }
455
+
456
+ detectionResults.innerHTML = html;
457
+ }
458
+
459
+ function drawDetectionBoxes(detections) {
460
+ if (!detections || !detections.length) return;
461
+
462
+ const canvas = detectionCanvas;
463
+ const ctx = canvas.getContext('2d');
464
+
465
+ // No need to clear or redraw the video frame here, that's done in detectLoop
466
+
467
+ // Draw detection boxes
468
+ detections.forEach(detection => {
469
+ const { box, class: className, confidence } = detection;
470
+ const [x1, y1, x2, y2] = box;
471
+
472
+ ctx.strokeStyle = 'lime';
473
+ ctx.lineWidth = 3;
474
+ ctx.strokeRect(x1, y1, x2-x1, y2-y1);
475
+
476
+ // Draw label
477
+ ctx.fillStyle = 'lime';
478
+ ctx.font = '16px Arial';
479
+ const label = `${className} ${(confidence * 100).toFixed(1)}%`;
480
+ const textWidth = ctx.measureText(label).width;
481
+
482
+ ctx.fillStyle = 'rgba(0, 0, 0, 0.7)';
483
+ ctx.fillRect(x1, y1 - 25, textWidth + 10, 25);
484
+
485
+ ctx.fillStyle = 'white';
486
+ ctx.fillText(label, x1 + 5, y1 - 7);
487
+ });
488
+ }
489
+
490
+
491
+ <!-- function drawDetectionBoxes(detections) {-->
492
+ <!-- // if (!detections || !detections.length) return;-->
493
+
494
+ <!-- const canvas = detectionCanvas;-->
495
+ <!-- const ctx = canvas.getContext('2d');-->
496
+
497
+ <!-- // Clear previous drawings-->
498
+ <!-- ctx.clearRect(0, 0, canvas.width, canvas.height);-->
499
+
500
+ <!-- // Draw current video frame-->
501
+ <!-- ctx.drawImage(screenVideo, 0, 0, canvas.width, canvas.height);-->
502
+
503
+ <!-- // Draw detection boxes only for humans-->
504
+ <!-- if (detections && detections.length) {-->
505
+ <!-- detections.forEach(detection => {-->
506
+ <!-- const { box, class: className, confidence } = detection;-->
507
+
508
+ <!-- // Only draw boxes for human/person detections-->
509
+ <!-- if (className.toLowerCase() === 'person' || className.toLowerCase() === 'human') {-->
510
+ <!-- const [x1, y1, x2, y2] = box;-->
511
+
512
+ <!-- // Red color for humans-->
513
+ <!-- ctx.strokeStyle = 'red';-->
514
+ <!-- ctx.lineWidth = 3;-->
515
+ <!-- ctx.strokeRect(x1, y1, x2-x1, y2-y1);-->
516
+
517
+ <!-- // Draw label-->
518
+ <!-- ctx.font = '16px Arial';-->
519
+ <!-- const label = `${className} ${(confidence * 100).toFixed(1)}%`;-->
520
+ <!-- const textWidth = ctx.measureText(label).width;-->
521
+
522
+ <!-- ctx.fillStyle = 'rgba(0, 0, 0, 0.7)';-->
523
+ <!-- ctx.fillRect(x1, y1 - 25, textWidth + 10, 25);-->
524
+
525
+ <!-- ctx.fillStyle = 'white';-->
526
+ <!-- ctx.fillText(label, x1 + 5, y1 - 7);-->
527
+ <!-- }-->
528
+ <!-- });-->
529
+ <!-- }-->
530
+ <!-- }-->
531
+ </script>
532
+ </body>
533
+ </html>