vancauwe commited on
Commit
fc967ca
Β·
2 Parent(s): fa82999 0301738

Merge branch 'chore/documentation' docs first draft

Browse files
This view is limited to 50 files because it contains too many changes. Β  See raw diff
Files changed (50) hide show
  1. README.md +1 -1
  2. app.py +0 -1
  3. dev/call_hf_batch.py +94 -0
  4. docs/dev_notes.md +40 -0
  5. docs/fix_tabrender.md +5 -0
  6. docs/input_handling.md +8 -0
  7. docs/main.md +10 -0
  8. docs/obs_map.md +7 -0
  9. docs/st_logs.md +7 -0
  10. docs/whale_gallery.md +4 -0
  11. docs/whale_viewer.md +4 -0
  12. images/references/640x427-atlantic-white-sided-dolphin.jpg +0 -3
  13. images/references/640x427-long-finned-pilot-whale.webp +0 -3
  14. images/references/640x427-southern-right-whale.jpg +0 -3
  15. images/references/Humpback.webp +0 -3
  16. images/references/Whale_Short-Finned_Pilot-markedDW.png +0 -3
  17. images/references/beluga.webp +0 -3
  18. images/references/blue-whale.webp +0 -3
  19. images/references/bottlenose_dolphin.webp +0 -3
  20. images/references/brydes.webp +0 -3
  21. images/references/common_dolphin.webp +0 -3
  22. images/references/cuviers_beaked_whale.webp +0 -3
  23. images/references/false-killer-whale.webp +0 -3
  24. images/references/fin-whale.webp +0 -3
  25. images/references/gray-whale.webp +0 -3
  26. images/references/killer_whale.webp +0 -3
  27. images/references/melon.webp +0 -3
  28. images/references/minke-whale.webp +0 -3
  29. images/references/pantropical-spotted-dolphin.webp +0 -3
  30. images/references/pygmy-killer-whale.webp +0 -3
  31. images/references/rough-toothed-dolphin.webp +0 -3
  32. images/references/sei.webp +0 -3
  33. images/references/spinner.webp +0 -3
  34. mkdocs.yaml +20 -3
  35. {call_models β†’ snippets}/click_map.py +0 -0
  36. {call_models β†’ snippets}/d_entry.py +0 -0
  37. snippets/extract_meta.py +1 -1
  38. {call_models β†’ snippets}/hotdogs.py +0 -0
  39. {call_models β†’ snippets}/imgs/cakes.jpg +0 -0
  40. {call_models β†’ snippets}/test_upload.py +0 -0
  41. {call_models β†’ src}/alps_map.py +0 -0
  42. {call_models β†’ src}/entry_and_hotdog.py +49 -12
  43. {call_models β†’ src}/fix_tabrender.py +37 -1
  44. {call_models β†’ src}/images/references/640x427-atlantic-white-sided-dolphin.jpg +0 -0
  45. {call_models β†’ src}/images/references/640x427-long-finned-pilot-whale.webp +0 -0
  46. {call_models β†’ src}/images/references/640x427-southern-right-whale.jpg +0 -0
  47. {call_models β†’ src}/images/references/Humpback.webp +0 -0
  48. {call_models β†’ src}/images/references/Whale_Short-Finned_Pilot-markedDW.png +0 -0
  49. {call_models β†’ src}/images/references/beluga.webp +0 -0
  50. {call_models β†’ src}/images/references/blue-whale.webp +0 -0
README.md CHANGED
@@ -6,7 +6,7 @@ colorTo: red
6
  sdk: streamlit
7
  sdk_version: 1.39.0
8
  python_version: "3.10"
9
- app_file: call_models/entry_and_hotdog.py
10
  pinned: false
11
  license: apache-2.0
12
  short_description: 'SDSC Hackathon - Project 10. '
 
6
  sdk: streamlit
7
  sdk_version: 1.39.0
8
  python_version: "3.10"
9
+ app_file: src/main.py
10
  pinned: false
11
  license: apache-2.0
12
  short_description: 'SDSC Hackathon - Project 10. '
app.py DELETED
@@ -1 +0,0 @@
1
- call_models/entry_and_hotdog.py
 
 
dev/call_hf_batch.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from huggingface_hub import HfApi
3
+ import cv2
4
+ from pathlib import Path
5
+ import pandas as pd
6
+
7
+ from transformers import pipeline
8
+ from transformers import AutoModelForImageClassification
9
+ import time
10
+
11
+ '''
12
+ how to use this script:
13
+ 1. get data from the kaggle competition, including images and the train.csv file
14
+ edit the "base" variable, assuming the following layout
15
+
16
+ ceteans/
17
+ β”œβ”€β”€ images
18
+ β”‚Β Β  β”œβ”€β”€ 00021adfb725ed.jpg
19
+ β”‚Β Β  β”œβ”€β”€ 000562241d384d.jpg
20
+ β”‚Β Β  β”œβ”€β”€ ...
21
+ └── train.csv
22
+
23
+ 2. inspect the df_results dataframe to see how the model is performing
24
+
25
+
26
+ '''
27
+ # setup for the ML model on huggingface (our wrapper)
28
+ os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
29
+ rev = 'main'
30
+
31
+ # load the model
32
+ cetacean_classifier = AutoModelForImageClassification.from_pretrained(
33
+ "Saving-Willy/cetacean-classifier",
34
+ revision=rev,
35
+ trust_remote_code=True)
36
+
37
+ # get ready to load images
38
+ base = Path('~/Documents/ceteans/').expanduser()
39
+ df = pd.read_csv(base / 'train.csv')
40
+
41
+ i_max = 100 # put a limit on the number of images to classify in this test (or None)
42
+
43
+ # for each file in the folder base/images, 1/ load image, 2/ classify, 3/ compare against the relevant row in df
44
+ # also keep track of the time it takes to classify each image
45
+
46
+
47
+ classifications = []
48
+
49
+ img_pth = base / 'images'
50
+ img_files = list(img_pth.glob('*.jpg'))
51
+
52
+
53
+ for i, img_file in enumerate(img_files):
54
+ # lets check we can get the right target.
55
+ img_id = img_file.name # includes .jpg
56
+ target = df.loc[df['image'] == img_id, 'species'].item()
57
+ #print(img_id, target)
58
+
59
+ start_time = time.time()
60
+ image = cv2.imread(str(img_file))
61
+ load_time = time.time() - start_time
62
+
63
+ start_time = time.time()
64
+ out = cetacean_classifier(image) # get top 3 matches
65
+ classify_time = time.time() - start_time
66
+
67
+ whale_prediction1 = out['predictions'][0]
68
+
69
+ # comparison
70
+ ok = whale_prediction1 == target
71
+ any = target in [x for x in out['predictions']]
72
+ row = [img_id, target, ok, any, load_time, classify_time] + list(out['predictions'])
73
+
74
+ print(i, row)
75
+
76
+ classifications.append(row)
77
+
78
+ if i_max is not None and i >= i_max:
79
+ break
80
+
81
+
82
+ df_results = pd.DataFrame(classifications, columns=['img_id', 'target', 'ok', 'any', 'load_time', 'classify_time'] + [f'pred_{i}' for i in range(3)])
83
+
84
+ # print out a few summary stats
85
+ # mean time to load and classify (formatted 3dp), +- std dev (formatted to 2dp),
86
+ print(f"Mean load time: {df_results['load_time'].mean():.3f} +- {df_results['load_time'].std():.2f} s")
87
+ print(f"Mean classify time: {df_results['classify_time'].mean():.3f} +- {df_results['classify_time'].std():.2f} s")
88
+
89
+ # accuracy: count of ok / count of any
90
+ print(f"Accuracy: correct with top prediction: {df_results['ok'].sum()} | any of top 3 correct: {df_results['any'].sum():.3f} (of total {df_results.shape[0]})")
91
+
92
+ # diversity: is the model just predicting one class for everything it sees?
93
+ print("Which classes are predicted?")
94
+ print(df_results.pred_0.value_counts())
docs/dev_notes.md ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # How to run the UI
2
+
3
+ We set this up so it is hosted as a huggingface space. Each commit to `main` triggers a push and a rebuild on their servers.
4
+
5
+ For local testing, assuming you have all the required packages installed in a
6
+ conda env or virtualenv, and that env is activated:
7
+
8
+ ```
9
+ cd src
10
+ streamlit run main.py
11
+ ```
12
+ Then use a web browser to view the site indiciated, by default: http://localhost:8501
13
+
14
+ # How to build and view docs locally
15
+
16
+ We have a CI action to presesnt the docs on github.io.
17
+ To validate locally, you need the deps listed in `requirements.txt` installed.
18
+
19
+ Run
20
+ ```
21
+ mkdocs serve
22
+ ```
23
+ And navigate to the wish server running locally, by default: http://127.0.0.1:8888/
24
+
25
+ This automatically watches for changes in the markdown files, but if you edit the
26
+ something else like the docstrings in py files, triggering a rebuild in another terminal
27
+ refreshes the site, without having to quit and restart the server.
28
+ ```
29
+ mkdocs build -c
30
+ ```
31
+
32
+
33
+
34
+ # Set up a venv
35
+
36
+ (standard stuff)
37
+
38
+ # Set up a conda env
39
+
40
+ (Standard stuff)
docs/fix_tabrender.md ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ A js fix for certain UI elements, including maps, getting rendered into a
2
+ zero-sized frame by default. Here we resize it so it is visible once the tab is
3
+ clicked and no further interaction is required to see it.
4
+
5
+ ::: src.fix_tabrender
docs/input_handling.md ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ This module focuses on image and metadata entry:
2
+
3
+ - UI elements to upload an image and populate the metadata (or edit the
4
+ auto-discovered metadata)
5
+ - a container class for an observation
6
+
7
+
8
+ ::: src.input_handling
docs/main.md ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # Main entry point
2
+
3
+ This module sets up the streamlit UI frontend,
4
+ as well as logger and session state elements in the backend.
5
+
6
+ The session state is used to retain values from one interaction to the next, since the streamlit execution model is to re-run the entire script top-to-bottom upon each user interaction (e.g. click).
7
+ See streamlit [docs](https://docs.streamlit.io/develop/api-reference/caching-and-state/st.session_state).
8
+
9
+
10
+ ::: src.entry_and_hotdog
docs/obs_map.md ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ This module provides rendering of observations on an interactive map, with a variety of tilesets available.
2
+
3
+ Note: OSM, ESRI, and CartoDB map tiles are served without authentication/tokens,
4
+ and so render correctly on the huggingface deployment. The Stamen tiles render
5
+ on localhost but require a token to present on a 3rd-party site.
6
+
7
+ ::: src.obs_map
docs/st_logs.md ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ This module provides utilities to incorporate a standard python logger within streamlit.
2
+
3
+
4
+ # Streamlit log handler
5
+
6
+ ::: src.st_logs
7
+
docs/whale_gallery.md ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ This module provides a gallery of the whales and dolphins that the classifier
2
+ is trained on. It diplays the images and links to further info on the species.
3
+
4
+ ::: src.whale_gallery
docs/whale_viewer.md ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ This module provides a streamlit rendering for the whales and dolphins that the classifier is aware of, and also holds the
2
+ metadata for them (images, class names that the classifier uses, and URLS for further information about each species).
3
+
4
+ ::: src.whale_viewer
images/references/640x427-atlantic-white-sided-dolphin.jpg DELETED

Git LFS Details

  • SHA256: ba6a9d014030f57a16f8c9a2a1fd757367ce384d6c3e00d23fa78d34ae29ea4b
  • Pointer size: 130 Bytes
  • Size of remote file: 20.9 kB
images/references/640x427-long-finned-pilot-whale.webp DELETED

Git LFS Details

  • SHA256: c4aa414e4412adc13101518a5d6d2b5829d37d2881e52813081c40dd664b2525
  • Pointer size: 131 Bytes
  • Size of remote file: 107 kB
images/references/640x427-southern-right-whale.jpg DELETED

Git LFS Details

  • SHA256: f49f60d729b2dd810a3ee69171b6beee8c6baaa8df26926c6d9df82bd620844e
  • Pointer size: 130 Bytes
  • Size of remote file: 20.9 kB
images/references/Humpback.webp DELETED

Git LFS Details

  • SHA256: 5660b04a32b5f154e9e1e7d74fb85a498f42b54e3503266738687ad7128731ee
  • Pointer size: 131 Bytes
  • Size of remote file: 169 kB
images/references/Whale_Short-Finned_Pilot-markedDW.png DELETED

Git LFS Details

  • SHA256: afb8e1b9c39761f1a4e4252fe47c6362c155fdec846c070af7770bc037f870a8
  • Pointer size: 131 Bytes
  • Size of remote file: 132 kB
images/references/beluga.webp DELETED

Git LFS Details

  • SHA256: a806dbc284f56e9821ea5d92b9b2f29757def579d03eb46abebaa568d3634748
  • Pointer size: 131 Bytes
  • Size of remote file: 106 kB
images/references/blue-whale.webp DELETED

Git LFS Details

  • SHA256: 7c3c8fad25250868f01a96421cf2aa10d9a06a297f52bf72d2489d084465c633
  • Pointer size: 130 Bytes
  • Size of remote file: 59.9 kB
images/references/bottlenose_dolphin.webp DELETED

Git LFS Details

  • SHA256: d01bdc2317ea829d9aca7e947dd0c17548288ef4111780a508b0f6c4e1640278
  • Pointer size: 131 Bytes
  • Size of remote file: 135 kB
images/references/brydes.webp DELETED

Git LFS Details

  • SHA256: e0fd3cc26bec1ac00ccf5b35232844f0e88584f8ff45db109a60d45c776273ea
  • Pointer size: 131 Bytes
  • Size of remote file: 126 kB
images/references/common_dolphin.webp DELETED

Git LFS Details

  • SHA256: 86972fe463ac13428cfc45ef6a0c62a5ea6000ee5acb8c68424db1f26a3faad0
  • Pointer size: 131 Bytes
  • Size of remote file: 121 kB
images/references/cuviers_beaked_whale.webp DELETED

Git LFS Details

  • SHA256: 9315fb8c1907a425dde8793268ef743c660b13b5cc53c77c70a7953df74698f6
  • Pointer size: 131 Bytes
  • Size of remote file: 129 kB
images/references/false-killer-whale.webp DELETED

Git LFS Details

  • SHA256: 65dfc2aefd7f4a16ed30ba0bc93b06324cb67efad4dd21fc5b82b67db27af443
  • Pointer size: 131 Bytes
  • Size of remote file: 114 kB
images/references/fin-whale.webp DELETED

Git LFS Details

  • SHA256: bbc78f05705020c12db3063eb63dc2a0ca9f217088ddf8ab434aebf2b4796e49
  • Pointer size: 130 Bytes
  • Size of remote file: 17.7 kB
images/references/gray-whale.webp DELETED

Git LFS Details

  • SHA256: 19e42e897fa9fec312d968b209818253c75f6b3b3130c44225dbc95dc724c048
  • Pointer size: 131 Bytes
  • Size of remote file: 137 kB
images/references/killer_whale.webp DELETED

Git LFS Details

  • SHA256: 9df433f88111f0fd967e937f8c03d98a97aefe15eb9bc319ed5a7580380ff88e
  • Pointer size: 130 Bytes
  • Size of remote file: 85 kB
images/references/melon.webp DELETED

Git LFS Details

  • SHA256: 9468f1a324feb02faf1709d733a9353aadf27a4a609c3e8d025125836fae3c42
  • Pointer size: 131 Bytes
  • Size of remote file: 106 kB
images/references/minke-whale.webp DELETED

Git LFS Details

  • SHA256: e429d1835e9cb370a8ba9791be16bfbcc5706dcc0e5f4e0c75c792b5e7a88095
  • Pointer size: 131 Bytes
  • Size of remote file: 120 kB
images/references/pantropical-spotted-dolphin.webp DELETED

Git LFS Details

  • SHA256: 2539bf6b2cd45a7d09527c9c6d50f1eb63e8c2296b6b467d5058433a2f405c7a
  • Pointer size: 131 Bytes
  • Size of remote file: 137 kB
images/references/pygmy-killer-whale.webp DELETED

Git LFS Details

  • SHA256: 84c7cd4b7aa1e943b0281061208062807297b538badb93ffa86bb7b59b650357
  • Pointer size: 130 Bytes
  • Size of remote file: 19.5 kB
images/references/rough-toothed-dolphin.webp DELETED

Git LFS Details

  • SHA256: e26ec510c284ec27c25e8ff23128244d4ef952c07ef8b816e4d79455c61e7098
  • Pointer size: 131 Bytes
  • Size of remote file: 160 kB
images/references/sei.webp DELETED

Git LFS Details

  • SHA256: 13099859ac1ac3fa45c58ecb3ea19a841d4b3b654592e631202d24271ae40d43
  • Pointer size: 131 Bytes
  • Size of remote file: 105 kB
images/references/spinner.webp DELETED

Git LFS Details

  • SHA256: dc4ef6d4401f7342ef69eeaeaf4e62c098805196b00b3e814545befdd01e1b17
  • Pointer size: 131 Bytes
  • Size of remote file: 114 kB
mkdocs.yaml CHANGED
@@ -16,10 +16,27 @@ plugins:
16
  - mkdocstrings:
17
  default_handler: python
18
  handlers:
19
- python:
20
- paths: [src]
21
 
22
 
23
  nav:
24
  - README: index.md
25
- - App: app.md
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  - mkdocstrings:
17
  default_handler: python
18
  handlers:
19
+ python:
20
+ paths: [nonexistent_path_on_purpose]
21
 
22
 
23
  nav:
24
  - README: index.md
25
+ #- Quickstart:
26
+ #- Installation: installation.md
27
+ #- Usage: usage.md
28
+ - API:
29
+ - Main app: main.md
30
+ - Modules:
31
+ - Data entry handling: input_handling.md
32
+ - Map of observations: obs_map.md
33
+ - Whale gallery: whale_gallery.md
34
+ - Whale viewer: whale_viewer.md
35
+ - Logging: st_logs.md
36
+ - Tab-rendering fix (js): fix_tabrender.md
37
+
38
+ - Development clutter:
39
+ - Demo app: app.md
40
+
41
+ - How to contribute:
42
+ - Dev Notes: dev_notes.md
{call_models β†’ snippets}/click_map.py RENAMED
File without changes
{call_models β†’ snippets}/d_entry.py RENAMED
File without changes
snippets/extract_meta.py CHANGED
@@ -38,7 +38,7 @@ def extract_gps(image_path):
38
 
39
  return (lat, lon)
40
  # Example usage
41
- image_path = '../call_models/imgs/cakes.jpg'
42
  datetime_info = extract_datetime(image_path)
43
  gps_info = extract_gps(image_path)
44
  print(f'Date and Time: {datetime_info}')
 
38
 
39
  return (lat, lon)
40
  # Example usage
41
+ image_path = 'imgs/cakes.jpg' # this file has good exif data, inc GPS, timestamps etc.
42
  datetime_info = extract_datetime(image_path)
43
  gps_info = extract_gps(image_path)
44
  print(f'Date and Time: {datetime_info}')
{call_models β†’ snippets}/hotdogs.py RENAMED
File without changes
{call_models β†’ snippets}/imgs/cakes.jpg RENAMED
File without changes
{call_models β†’ snippets}/test_upload.py RENAMED
File without changes
{call_models β†’ src}/alps_map.py RENAMED
File without changes
{call_models β†’ src}/entry_and_hotdog.py RENAMED
@@ -1,27 +1,27 @@
1
- import datetime
2
- import os
3
  import json
4
  import logging
 
5
  import tempfile
 
6
  import pandas as pd
7
  import streamlit as st
 
8
  import folium
9
  from streamlit_folium import st_folium
10
  from huggingface_hub import HfApi
11
- #from datasets import load_dataset
12
- #from fix_tabrender import js_show_zeroheight_iframe
13
 
14
- import whale_viewer as sw_wv
15
- import input_handling as sw_inp
16
  import alps_map as sw_am
17
- import whale_gallery as sw_wg
18
  import obs_map as sw_map
19
  import st_logs as sw_logs
 
 
20
 
21
 
22
 
23
- from transformers import pipeline
24
- from transformers import AutoModelForImageClassification
25
 
26
  # setup for the ML model on huggingface (our wrapper)
27
  os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
@@ -64,14 +64,29 @@ if "tab_log" not in st.session_state:
64
  st.session_state.tab_log = None
65
 
66
 
67
- def metadata2md():
 
 
 
 
 
 
68
  markdown_str = "\n"
69
  for key, value in st.session_state.full_data.items():
70
  markdown_str += f"- **{key}**: {value}\n"
71
  return markdown_str
72
 
73
 
74
- def push_observation(tab_log=None):
 
 
 
 
 
 
 
 
 
75
  # we get the data from session state: 1 is the dict 2 is the image.
76
  # first, lets do an info display (popup)
77
  metadata_str = json.dumps(st.session_state.full_data)
@@ -105,7 +120,26 @@ def push_observation(tab_log=None):
105
  st.info(msg)
106
 
107
 
108
- if __name__ == "__main__":
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
 
110
  g_logger.info("App started.")
111
  g_logger.warning(f"[D] Streamlit version: {st.__version__}. Python version: {os.sys.version}")
@@ -306,3 +340,6 @@ if __name__ == "__main__":
306
  tab_hotdogs.write(f"Session Data: {json.dumps(st.session_state.full_data)}")
307
 
308
 
 
 
 
 
1
+ #import datetime
 
2
  import json
3
  import logging
4
+ import os
5
  import tempfile
6
+
7
  import pandas as pd
8
  import streamlit as st
9
+ from streamlit.delta_generator import DeltaGenerator # for type hinting
10
  import folium
11
  from streamlit_folium import st_folium
12
  from huggingface_hub import HfApi
13
+ from transformers import pipeline
14
+ from transformers import AutoModelForImageClassification
15
 
 
 
16
  import alps_map as sw_am
17
+ import input_handling as sw_inp
18
  import obs_map as sw_map
19
  import st_logs as sw_logs
20
+ import whale_gallery as sw_wg
21
+ import whale_viewer as sw_wv
22
 
23
 
24
 
 
 
25
 
26
  # setup for the ML model on huggingface (our wrapper)
27
  os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
 
64
  st.session_state.tab_log = None
65
 
66
 
67
+ def metadata2md() -> str:
68
+ """Get metadata from cache and return as markdown-formatted key-value list
69
+
70
+ Returns:
71
+ str: Markdown-formatted key-value list of metadata
72
+
73
+ """
74
  markdown_str = "\n"
75
  for key, value in st.session_state.full_data.items():
76
  markdown_str += f"- **{key}**: {value}\n"
77
  return markdown_str
78
 
79
 
80
+ def push_observation(tab_log:DeltaGenerator=None):
81
+ """
82
+ Push the observation to the Hugging Face dataset
83
+
84
+ Args:
85
+ tab_log (streamlit.container): The container to log messages to. If not provided,
86
+ log messages are in any case written to the global logger (TODO: test - didn't
87
+ push any data since generating the logger)
88
+
89
+ """
90
  # we get the data from session state: 1 is the dict 2 is the image.
91
  # first, lets do an info display (popup)
92
  metadata_str = json.dumps(st.session_state.full_data)
 
120
  st.info(msg)
121
 
122
 
123
+
124
+ def main() -> None:
125
+ """
126
+ Main entry point to set up the streamlit UI and run the application.
127
+
128
+ The organisation is as follows:
129
+
130
+ 1. data input (a new observation) is handled in the sidebar
131
+ 2. the rest of the interface is organised in tabs:
132
+
133
+ - cetean classifier
134
+ - hotdog classifier
135
+ - map to present the obersvations
136
+ - table of recent log entries
137
+ - gallery of whale images
138
+
139
+ The majority of the tabs are instantiated from modules. Currently the two
140
+ classifiers are still in-line here.
141
+
142
+ """
143
 
144
  g_logger.info("App started.")
145
  g_logger.warning(f"[D] Streamlit version: {st.__version__}. Python version: {os.sys.version}")
 
340
  tab_hotdogs.write(f"Session Data: {json.dumps(st.session_state.full_data)}")
341
 
342
 
343
+
344
+ if __name__ == "__main__":
345
+ main()
{call_models β†’ src}/fix_tabrender.py RENAMED
@@ -10,6 +10,21 @@ import streamlit as st
10
  import uuid, html
11
  # workaround for streamlit making tabs height 0 when not active, breaks map
12
  def inject_iframe_js_code(source: str) -> None:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  div_id = uuid.uuid4()
14
 
15
  st.markdown(
@@ -28,7 +43,28 @@ def inject_iframe_js_code(source: str) -> None:
28
  unsafe_allow_html=True,
29
  )
30
 
31
- def js_show_zeroheight_iframe(component_iframe_title: str, height: str = "auto"):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  source = f"""
33
  (function() {{
34
  var attempts = 0;
 
10
  import uuid, html
11
  # workaround for streamlit making tabs height 0 when not active, breaks map
12
  def inject_iframe_js_code(source: str) -> None:
13
+ """
14
+ Injects JavaScript code into a Streamlit app using an iframe.
15
+
16
+ This function creates a hidden div with a unique ID and injects the provided
17
+ JavaScript code into the parent document using an iframe. The iframe's source
18
+ is a JavaScript URL that creates a script element, sets its type to 'text/javascript',
19
+ and assigns the provided JavaScript code to its text content. The script element
20
+ is then appended to the hidden div in the parent document.
21
+
22
+ Args:
23
+ source (str): The JavaScript code to be injected.
24
+
25
+ Returns:
26
+ None
27
+ """
28
  div_id = uuid.uuid4()
29
 
30
  st.markdown(
 
43
  unsafe_allow_html=True,
44
  )
45
 
46
+ def js_show_zeroheight_iframe(component_iframe_title: str, height: str = "auto") -> None:
47
+ """
48
+ Injects JavaScript code to dynamically set iframe height (located by title)
49
+
50
+ This function generates and injects JavaScript code that searches for
51
+ iframes with the given title and sets their height to the specified value.
52
+ The script attempts to find the iframes up to a maximum number of attempts,
53
+ and also listens for user interactions to reattempt setting the height.
54
+
55
+ See https://github.com/streamlit/streamlit/issues/7376
56
+
57
+
58
+ Args:
59
+ component_iframe_title (str): The title attribute of the iframes to target.
60
+ height (str, optional): The height to set for the iframes. Defaults to "auto".
61
+
62
+ Notes:
63
+ - The JavaScript code will attempt to find the iframes every 250
64
+ milliseconds, up to a maximum of 20 attempts.
65
+ - If the iframes are found, their height will be set to the specified value.
66
+ - User interactions (e.g., click events) triggers a reattempt to set the height.
67
+ """
68
  source = f"""
69
  (function() {{
70
  var attempts = 0;
{call_models β†’ src}/images/references/640x427-atlantic-white-sided-dolphin.jpg RENAMED
File without changes
{call_models β†’ src}/images/references/640x427-long-finned-pilot-whale.webp RENAMED
File without changes
{call_models β†’ src}/images/references/640x427-southern-right-whale.jpg RENAMED
File without changes
{call_models β†’ src}/images/references/Humpback.webp RENAMED
File without changes
{call_models β†’ src}/images/references/Whale_Short-Finned_Pilot-markedDW.png RENAMED
File without changes
{call_models β†’ src}/images/references/beluga.webp RENAMED
File without changes
{call_models β†’ src}/images/references/blue-whale.webp RENAMED
File without changes