Spaces:
Sleeping
Sleeping
Michela
commited on
Commit
·
e62e0c5
1
Parent(s):
c2b3caf
Upload data and app
Browse files- app.py +142 -0
- data/annotations/DHd2025_referenceReports_merged-cleaned__annotations.xlsx +0 -0
- data/indices/DHd_index-cleaned.csv +0 -0
- data/retrieval_analysis/PFERDE_clean_retrieved_annotations_full.xlsx +0 -0
- data/retrieval_analysis/PFERDE_orig_retrieved_annotations_full.xlsx +0 -0
- data/retrieval_analysis/PFERDE_prep_retrieved_annotations_full.xlsx +0 -0
- data/retrieval_results/sonnini_cleaned/i_onit-sonnini-DHd2025-clean-q_Pferd, Pferde.csv +0 -0
- data/retrieval_results/sonnini_cleaned/i_onit-sonnini-DHd2025-clean-q_Pferd, Pferde.xlsx +0 -0
- data/retrieval_results/sonnini_llm_corrected/i_onit-sonnini-DHd2025-prep-q_Pferd, Pferde.csv +0 -0
- data/retrieval_results/sonnini_llm_corrected/i_onit-sonnini-DHd2025-prep-q_Pferd, Pferde.xlsx +0 -0
- data/retrieval_results/sonnini_original_OCR/i_onit-test-index-sonnini-q_Pferd-Pferde.csv +0 -0
- data/retrieval_results/sonnini_original_OCR/i_onit-test-index-sonnini-q_Pferd-Pferde.xlsx +0 -0
- src/analysis/analysis_of_results.ipynb +1515 -0
- src/analysis/query_index.ipynb +1685 -0
- src/indexing/index_data.py +97 -0
- src/preprocessing/clean_books.py +80 -0
- src/preprocessing/llm_keywords.py +120 -0
- src/preprocessing/llm_preprocessing.py +120 -0
- src/utils/annotations_preprocessing.py +92 -0
- src/utils/extract_data.py +128 -0
app.py
ADDED
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Import packages
|
2 |
+
import gradio as gr
|
3 |
+
import pandas as pd
|
4 |
+
from difflib import SequenceMatcher
|
5 |
+
|
6 |
+
# Import results
|
7 |
+
results_clean = pd.read_csv("data/retrieval_results/sonnini_cleaned/i_onit-sonnini-DHd2025-clean-q_Pferd, Pferde.csv").head(100)
|
8 |
+
results_prep = pd.read_csv("data/retrieval_results/sonnini_llm_corrected/i_onit-sonnini-DHd2025-prep-q_Pferd, Pferde.csv").head(100)
|
9 |
+
results_orig = pd.read_csv("data/retrieval_results/sonnini_original_ocr/i_onit-test-index-sonnini-q_Pferd-Pferde.csv").head(100)
|
10 |
+
|
11 |
+
# Drop 'text_prep' from results_orig
|
12 |
+
results_clean.drop(columns=['text_prep'], inplace=True)
|
13 |
+
|
14 |
+
# Modify the "document" column to remove "_page175.txt" and keep the "Z166069305_00175"
|
15 |
+
results_orig['document'] = results_orig['document'].str[:-12]
|
16 |
+
|
17 |
+
# Modify the "page" column to extract the numeric part and remove leading zeroes
|
18 |
+
results_orig['page'] = results_orig['page'].str.extract(r'(\d+)', expand=False).astype(int)
|
19 |
+
|
20 |
+
data_sources = {"Results Cleaned OCR": results_clean, "Results LLM Preprocessed OCR": results_prep, "Results Original OCR": results_orig}
|
21 |
+
|
22 |
+
# Pagination settings
|
23 |
+
R = 5 # Number of preview rows per page
|
24 |
+
|
25 |
+
# Define a function to highlight parts of the text
|
26 |
+
def highlight_text(text, highlights):
|
27 |
+
# Ensure highlights is a list of strings
|
28 |
+
if isinstance(highlights, str):
|
29 |
+
highlights = [highlights]
|
30 |
+
# Wrap each highlight in <mark> tags
|
31 |
+
for highlight in highlights:
|
32 |
+
# Replace highlight text with a highlighted version
|
33 |
+
text = text.replace(highlight, f'<mark>{highlight}</mark>')
|
34 |
+
return text
|
35 |
+
|
36 |
+
|
37 |
+
# Function to create preview rows
|
38 |
+
def preview_results(page, selected_data_source):
|
39 |
+
data_source = data_sources[selected_data_source]
|
40 |
+
start_idx = (page - 1) * R
|
41 |
+
end_idx = min(start_idx + R, len(data_source))
|
42 |
+
|
43 |
+
results = data_source.iloc[start_idx:end_idx]
|
44 |
+
|
45 |
+
row_elements = []
|
46 |
+
for idx, (_, row) in enumerate(results.iterrows(), start=start_idx + 1):
|
47 |
+
highlighted_text = row['unpacked_highlights']
|
48 |
+
row_html = f"""
|
49 |
+
<div style='border:1px solid #ddd; padding:10px; margin:5px 0; font-size: 18px;'>
|
50 |
+
<b>{idx}. \'{row['document']}\'</b> - Score: {row['_score']} - Rank: {row['rank']}
|
51 |
+
<br><i>{highlighted_text}</i>
|
52 |
+
</div>
|
53 |
+
"""
|
54 |
+
row_elements.append(row_html)
|
55 |
+
|
56 |
+
return "".join(row_elements)
|
57 |
+
|
58 |
+
# Function to show details of a selected row
|
59 |
+
def show_details(document_name, selected_data_source):
|
60 |
+
data_source = data_sources[selected_data_source]
|
61 |
+
row = data_source[data_source["document"] == document_name]
|
62 |
+
|
63 |
+
if row.empty:
|
64 |
+
return "<p style='color:red;'>Document not found. Please select a valid document.</p>"
|
65 |
+
|
66 |
+
row = row.iloc[0] # Extract first matching row
|
67 |
+
return f"""
|
68 |
+
<div style="display: flex; justify-content: space-between; align-items: start;">
|
69 |
+
<div style="width: 65%; font-size: 18px;">
|
70 |
+
<h3>📄 Preview: {row['barcode']}, Page {row['page']}</h3>
|
71 |
+
<p><b>Retrieved text chunk: </b><i>{row["unpacked_highlights"]}</i></p>
|
72 |
+
<p><b>OCR text (LLM-corrected): </b>{highlight_text(row.get('text_prep') or row.get('text_clean') or row.get('text'), row["unpacked_highlights"])}</p>
|
73 |
+
<p><a href="https://digital.onb.ac.at/OnbViewer/viewer.faces?doc=ABO_%2B{row['barcode']}&order={row['page']}&view=SINGLE" target="_blank">🔍 Open ÖNB Viewer</a></p>
|
74 |
+
</div>
|
75 |
+
<div style="width: 30%; text-align: right;">
|
76 |
+
<img src="{row['iiif_link']}" alt="IIIF Image Preview"
|
77 |
+
style="max-width: 100%; height: auto; border: 1px solid #ddd;">
|
78 |
+
</div>
|
79 |
+
</div>
|
80 |
+
<div style="font-size: 18px;">
|
81 |
+
<p><b>Source: </b>C. S. Sonnini's, ehemaligen Offiziers und Jngenieurs des französischen Seewesens <br>und Mitgliedes mehrerer gelehrten und litterarischen Gesellschaften, <br><i>Reisen in Ober= und Niederägypten</i>, Bd. 1. Leipzig/Gera: Wilh. Heinsius, 1800</p>
|
82 |
+
<p><b>Citation link: <a href="http://data.onb.ac.at/rep/1058B194"target="_blank">http://data.onb.ac.at/rep/1058B194</a></p>
|
83 |
+
</div>
|
84 |
+
"""
|
85 |
+
|
86 |
+
# Gradio Interface
|
87 |
+
with gr.Blocks() as demo:
|
88 |
+
gr.Markdown("## 🔍 Preview Text Retrieval Results with Marqo Vector Database")
|
89 |
+
|
90 |
+
data_source_dropdown = gr.Dropdown(choices=list(data_sources.keys()), label="Select Data Source", value="Results Cleaned OCR")
|
91 |
+
page_slider = gr.Slider(1, 1, step=1, label="Page", interactive=True)
|
92 |
+
preview_output = gr.HTML()
|
93 |
+
|
94 |
+
gr.Markdown("## 📝 Inspect Document Details")
|
95 |
+
|
96 |
+
doc_name_input = gr.Textbox(label="Copy and paste document name to search bar (e.g. Z166069305_430):", interactive=True)
|
97 |
+
inspect_button = gr.Button("Inspect")
|
98 |
+
inspect_output = gr.HTML()
|
99 |
+
|
100 |
+
# Function to update preview when data source changes
|
101 |
+
def update_data_source(selected_data_source):
|
102 |
+
max_page = (len(data_sources[selected_data_source]) // R) + 1
|
103 |
+
page_slider.maximum = max_page # Update the max page count dynamically
|
104 |
+
return preview_results(1, selected_data_source), 1 # Reset slider to 1
|
105 |
+
|
106 |
+
# Function to update preview when page slider changes
|
107 |
+
def update_preview(page, selected_data_source):
|
108 |
+
return preview_results(page, selected_data_source)
|
109 |
+
|
110 |
+
# Function to update document details
|
111 |
+
def update_details(doc_name, selected_data_source):
|
112 |
+
return show_details(doc_name, selected_data_source)
|
113 |
+
|
114 |
+
# Handle data source change
|
115 |
+
data_source_dropdown.change(
|
116 |
+
update_data_source,
|
117 |
+
inputs=[data_source_dropdown],
|
118 |
+
outputs=[preview_output, page_slider] # Update both preview and reset slider
|
119 |
+
)
|
120 |
+
# Handle page slider change
|
121 |
+
page_slider.change(update_preview, inputs=[page_slider, data_source_dropdown], outputs=[preview_output])
|
122 |
+
|
123 |
+
# Handle inspect button click
|
124 |
+
inspect_button.click(update_details, inputs=[doc_name_input, data_source_dropdown], outputs=[inspect_output])
|
125 |
+
|
126 |
+
# Initialize preview with default data source
|
127 |
+
preview_output.value = update_data_source("Results Cleaned OCR")
|
128 |
+
|
129 |
+
# Further information block at the end
|
130 |
+
gr.Markdown("""
|
131 |
+
## 📚 Further Information
|
132 |
+
<div style="font-size: 18px;">
|
133 |
+
<p>This demo lets you explore our preliminary results for retrieving <i>nature</i> representations in imperfect OCR data extracted from 17-19 century German texts.
|
134 |
+
This research was done in the <a href="https://onit.oeaw.ac.at/">Ottoman Nature in Travelogues (ONiT)</a> project and funded by the Austrian Science Fund (FWF: P 35245).
|
135 |
+
The text retrieval was done with hybrid vector/lexical search (BM25) by using a <a href="https://docs.marqo.ai/">Marqo</a>
|
136 |
+
vector index. The texts were indexed as one page per document unit, and by splitting them in 2-sentence vectors and embedding them with
|
137 |
+
<a href="https://huggingface.co/flax-sentence-embeddings/all_datasets_v4_mpnet-base">flax-sentence-embeddings/all_datasets_v4_mpnet-base</a> model.</p>
|
138 |
+
<p>For more information, contact <a href="mailto:[email protected]">michela(dot)vignoli(at)ait(dot)ac(dot)at</a>.</p>
|
139 |
+
</div>
|
140 |
+
""")
|
141 |
+
|
142 |
+
demo.launch()
|
data/annotations/DHd2025_referenceReports_merged-cleaned__annotations.xlsx
ADDED
Binary file (723 kB). View file
|
|
data/indices/DHd_index-cleaned.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
data/retrieval_analysis/PFERDE_clean_retrieved_annotations_full.xlsx
ADDED
Binary file (196 kB). View file
|
|
data/retrieval_analysis/PFERDE_orig_retrieved_annotations_full.xlsx
ADDED
Binary file (206 kB). View file
|
|
data/retrieval_analysis/PFERDE_prep_retrieved_annotations_full.xlsx
ADDED
Binary file (199 kB). View file
|
|
data/retrieval_results/sonnini_cleaned/i_onit-sonnini-DHd2025-clean-q_Pferd, Pferde.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
data/retrieval_results/sonnini_cleaned/i_onit-sonnini-DHd2025-clean-q_Pferd, Pferde.xlsx
ADDED
Binary file (637 kB). View file
|
|
data/retrieval_results/sonnini_llm_corrected/i_onit-sonnini-DHd2025-prep-q_Pferd, Pferde.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
data/retrieval_results/sonnini_llm_corrected/i_onit-sonnini-DHd2025-prep-q_Pferd, Pferde.xlsx
ADDED
Binary file (655 kB). View file
|
|
data/retrieval_results/sonnini_original_OCR/i_onit-test-index-sonnini-q_Pferd-Pferde.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
data/retrieval_results/sonnini_original_OCR/i_onit-test-index-sonnini-q_Pferd-Pferde.xlsx
ADDED
Binary file (477 kB). View file
|
|
src/analysis/analysis_of_results.ipynb
ADDED
@@ -0,0 +1,1515 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"id": "a5c7b436",
|
6 |
+
"metadata": {},
|
7 |
+
"source": [
|
8 |
+
"# Analysis of Retrieval Results"
|
9 |
+
]
|
10 |
+
},
|
11 |
+
{
|
12 |
+
"cell_type": "markdown",
|
13 |
+
"id": "6082ebb6",
|
14 |
+
"metadata": {},
|
15 |
+
"source": [
|
16 |
+
"## Load annotations and retrieval results"
|
17 |
+
]
|
18 |
+
},
|
19 |
+
{
|
20 |
+
"cell_type": "code",
|
21 |
+
"execution_count": 1,
|
22 |
+
"id": "fefb786d",
|
23 |
+
"metadata": {},
|
24 |
+
"outputs": [
|
25 |
+
{
|
26 |
+
"data": {
|
27 |
+
"text/html": [
|
28 |
+
"<div>\n",
|
29 |
+
"<style scoped>\n",
|
30 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
31 |
+
" vertical-align: middle;\n",
|
32 |
+
" }\n",
|
33 |
+
"\n",
|
34 |
+
" .dataframe tbody tr th {\n",
|
35 |
+
" vertical-align: top;\n",
|
36 |
+
" }\n",
|
37 |
+
"\n",
|
38 |
+
" .dataframe thead th {\n",
|
39 |
+
" text-align: right;\n",
|
40 |
+
" }\n",
|
41 |
+
"</style>\n",
|
42 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
43 |
+
" <thead>\n",
|
44 |
+
" <tr style=\"text-align: right;\">\n",
|
45 |
+
" <th></th>\n",
|
46 |
+
" <th>UUID</th>\n",
|
47 |
+
" <th>FILE</th>\n",
|
48 |
+
" <th>QUOTE_TRANSCRIPTION</th>\n",
|
49 |
+
" <th>ANCHOR</th>\n",
|
50 |
+
" <th>COMMENTS</th>\n",
|
51 |
+
" <th>TAGS</th>\n",
|
52 |
+
" <th>PAGE</th>\n",
|
53 |
+
" <th>TRANSCRIPTION_CLEANED</th>\n",
|
54 |
+
" </tr>\n",
|
55 |
+
" </thead>\n",
|
56 |
+
" <tbody>\n",
|
57 |
+
" <tr>\n",
|
58 |
+
" <th>551</th>\n",
|
59 |
+
" <td>2faf6dce-7806-48b6-8e8b-2bf926fc88fc</td>\n",
|
60 |
+
" <td>Z166069305_merged.txt</td>\n",
|
61 |
+
" <td>Negypten aber hat nicht allein Wunder der Kunſ...</td>\n",
|
62 |
+
" <td>char-offset:2567</td>\n",
|
63 |
+
" <td>p. 8</td>\n",
|
64 |
+
" <td>'plants vegetation 25G'</td>\n",
|
65 |
+
" <td>8</td>\n",
|
66 |
+
" <td>('n? Negypten aber hat nicht allein Wunder der...</td>\n",
|
67 |
+
" </tr>\n",
|
68 |
+
" <tr>\n",
|
69 |
+
" <th>552</th>\n",
|
70 |
+
" <td>2e026f81-cbe7-43ba-9b37-5f5bd2760119</td>\n",
|
71 |
+
" <td>Z166069305_merged.txt</td>\n",
|
72 |
+
" <td>Barbaren bewohnen jezt das Land und Räuber beh...</td>\n",
|
73 |
+
" <td>char-offset:3391</td>\n",
|
74 |
+
" <td>p. 8</td>\n",
|
75 |
+
" <td>'plants vegetation 25G'</td>\n",
|
76 |
+
" <td>8</td>\n",
|
77 |
+
" <td>('r. Barbaren bewohnen jezt das Land und Raube...</td>\n",
|
78 |
+
" </tr>\n",
|
79 |
+
" <tr>\n",
|
80 |
+
" <th>553</th>\n",
|
81 |
+
" <td>0d5708db-1bf6-4de4-ad64-9759ebb4fa3c</td>\n",
|
82 |
+
" <td>Z166069305_merged.txt</td>\n",
|
83 |
+
" <td>Die Waffen eines Mamefu den zu Pferde find zwe...</td>\n",
|
84 |
+
" <td>char-offset:5538</td>\n",
|
85 |
+
" <td>p. 9-10\\n</td>\n",
|
86 |
+
" <td>'animals 25F', 'horses and kindred animals 46C...</td>\n",
|
87 |
+
" <td>10</td>\n",
|
88 |
+
" <td>('n. Die Waffen eines Mamefu den zu Pferde fin...</td>\n",
|
89 |
+
" </tr>\n",
|
90 |
+
" <tr>\n",
|
91 |
+
" <th>554</th>\n",
|
92 |
+
" <td>64b5828e-b14d-49dc-8d88-f97de52f9202</td>\n",
|
93 |
+
" <td>Z166069305_merged.txt</td>\n",
|
94 |
+
" <td>Den Zaum reines Pferdes nimmt er zwiſchen die ...</td>\n",
|
95 |
+
" <td>char-offset:5987</td>\n",
|
96 |
+
" <td>p. 10\\n</td>\n",
|
97 |
+
" <td>'animals 25F', 'horses and kindred animals 46C...</td>\n",
|
98 |
+
" <td>10</td>\n",
|
99 |
+
" <td>('l. Den Zaum reines Pferdes nimmt er zwischen...</td>\n",
|
100 |
+
" </tr>\n",
|
101 |
+
" <tr>\n",
|
102 |
+
" <th>555</th>\n",
|
103 |
+
" <td>fa2e8b0d-d8aa-46c8-ac97-1a8e2b14600d</td>\n",
|
104 |
+
" <td>Z166069305_merged.txt</td>\n",
|
105 |
+
" <td>Seine Häuſer ſind arms felige Kothhütten, und ...</td>\n",
|
106 |
+
" <td>char-offset:8769</td>\n",
|
107 |
+
" <td>p. 11-12</td>\n",
|
108 |
+
" <td>'plants vegetation 25G'</td>\n",
|
109 |
+
" <td>12</td>\n",
|
110 |
+
" <td>(' Hauser sind arms felige Kothhutten, und fei...</td>\n",
|
111 |
+
" </tr>\n",
|
112 |
+
" <tr>\n",
|
113 |
+
" <th>...</th>\n",
|
114 |
+
" <td>...</td>\n",
|
115 |
+
" <td>...</td>\n",
|
116 |
+
" <td>...</td>\n",
|
117 |
+
" <td>...</td>\n",
|
118 |
+
" <td>...</td>\n",
|
119 |
+
" <td>...</td>\n",
|
120 |
+
" <td>...</td>\n",
|
121 |
+
" <td>...</td>\n",
|
122 |
+
" </tr>\n",
|
123 |
+
" <tr>\n",
|
124 |
+
" <th>1117</th>\n",
|
125 |
+
" <td>3d4a21ad-ddd8-4d3a-b018-d7dbb9f186dc</td>\n",
|
126 |
+
" <td>Z166069305_merged.txt</td>\n",
|
127 |
+
" <td>Bittt adit2: 001), ont ma Des Abends begaben ſ...</td>\n",
|
128 |
+
" <td>char-offset:821475</td>\n",
|
129 |
+
" <td>NaN</td>\n",
|
130 |
+
" <td>'animals 25F', 'birds 25F3'</td>\n",
|
131 |
+
" <td>504</td>\n",
|
132 |
+
" <td>(\"00 Bittt adit2: 001), ont ma Des Abends bega...</td>\n",
|
133 |
+
" </tr>\n",
|
134 |
+
" <tr>\n",
|
135 |
+
" <th>1118</th>\n",
|
136 |
+
" <td>2aefbba4-e796-4add-bdb1-0768afe0a3ce</td>\n",
|
137 |
+
" <td>Z166069305_merged.txt</td>\n",
|
138 |
+
" <td>57 3 Chazelle lieb Chazelles</td>\n",
|
139 |
+
" <td>char-offset:823734</td>\n",
|
140 |
+
" <td>NaN</td>\n",
|
141 |
+
" <td>'animals 25F', 'hoofed animalGAZELLE 25F24'</td>\n",
|
142 |
+
" <td>504</td>\n",
|
143 |
+
" <td>('ue 57 3 Chazelle lieb Chazelles 65', 814073)</td>\n",
|
144 |
+
" </tr>\n",
|
145 |
+
" <tr>\n",
|
146 |
+
" <th>1119</th>\n",
|
147 |
+
" <td>f50b57c6-d751-4d41-8670-8b5254d964ec</td>\n",
|
148 |
+
" <td>Z166069305_merged.txt</td>\n",
|
149 |
+
" <td>Ein' to svil, dulu UPUN Fahn irlichin( jiyi vo...</td>\n",
|
150 |
+
" <td>char-offset:824751</td>\n",
|
151 |
+
" <td>NaN</td>\n",
|
152 |
+
" <td>'animals 25F'</td>\n",
|
153 |
+
" <td>504</td>\n",
|
154 |
+
" <td>(\"c. Ein' to suil, dulu UPUN Fahn irlichin( ji...</td>\n",
|
155 |
+
" </tr>\n",
|
156 |
+
" <tr>\n",
|
157 |
+
" <th>1120</th>\n",
|
158 |
+
" <td>d8cfcc30-5f00-4681-826b-c5c7a87e768b</td>\n",
|
159 |
+
" <td>Z166069305_merged.txt</td>\n",
|
160 |
+
" <td>Dez HENNÈI, Blüthen. Eine Blüthe ron natiiitic...</td>\n",
|
161 |
+
" <td>char-offset:824901</td>\n",
|
162 |
+
" <td>NaN</td>\n",
|
163 |
+
" <td>'plants vegetation 25G'</td>\n",
|
164 |
+
" <td>504</td>\n",
|
165 |
+
" <td>('-- Dez HENNEI, Bluthen. Eine Bluthe ron nati...</td>\n",
|
166 |
+
" </tr>\n",
|
167 |
+
" <tr>\n",
|
168 |
+
" <th>1121</th>\n",
|
169 |
+
" <td>bbf95b5f-40d1-476a-a392-2dabb1278c16</td>\n",
|
170 |
+
" <td>Z166069305_merged.txt</td>\n",
|
171 |
+
" <td>Riedel fee. Fig. 1, ATLÉ, A, Stamn.2.7weig. Fi...</td>\n",
|
172 |
+
" <td>char-offset:825427</td>\n",
|
173 |
+
" <td>NaN</td>\n",
|
174 |
+
" <td>'plants vegetation 25G'</td>\n",
|
175 |
+
" <td>504</td>\n",
|
176 |
+
" <td>(\" 1 Riedel fee. Fig. 1, ATLE, A, Stamn.2.7wei...</td>\n",
|
177 |
+
" </tr>\n",
|
178 |
+
" </tbody>\n",
|
179 |
+
"</table>\n",
|
180 |
+
"<p>571 rows × 8 columns</p>\n",
|
181 |
+
"</div>"
|
182 |
+
],
|
183 |
+
"text/plain": [
|
184 |
+
" UUID FILE \\\n",
|
185 |
+
"551 2faf6dce-7806-48b6-8e8b-2bf926fc88fc Z166069305_merged.txt \n",
|
186 |
+
"552 2e026f81-cbe7-43ba-9b37-5f5bd2760119 Z166069305_merged.txt \n",
|
187 |
+
"553 0d5708db-1bf6-4de4-ad64-9759ebb4fa3c Z166069305_merged.txt \n",
|
188 |
+
"554 64b5828e-b14d-49dc-8d88-f97de52f9202 Z166069305_merged.txt \n",
|
189 |
+
"555 fa2e8b0d-d8aa-46c8-ac97-1a8e2b14600d Z166069305_merged.txt \n",
|
190 |
+
"... ... ... \n",
|
191 |
+
"1117 3d4a21ad-ddd8-4d3a-b018-d7dbb9f186dc Z166069305_merged.txt \n",
|
192 |
+
"1118 2aefbba4-e796-4add-bdb1-0768afe0a3ce Z166069305_merged.txt \n",
|
193 |
+
"1119 f50b57c6-d751-4d41-8670-8b5254d964ec Z166069305_merged.txt \n",
|
194 |
+
"1120 d8cfcc30-5f00-4681-826b-c5c7a87e768b Z166069305_merged.txt \n",
|
195 |
+
"1121 bbf95b5f-40d1-476a-a392-2dabb1278c16 Z166069305_merged.txt \n",
|
196 |
+
"\n",
|
197 |
+
" QUOTE_TRANSCRIPTION ANCHOR \\\n",
|
198 |
+
"551 Negypten aber hat nicht allein Wunder der Kunſ... char-offset:2567 \n",
|
199 |
+
"552 Barbaren bewohnen jezt das Land und Räuber beh... char-offset:3391 \n",
|
200 |
+
"553 Die Waffen eines Mamefu den zu Pferde find zwe... char-offset:5538 \n",
|
201 |
+
"554 Den Zaum reines Pferdes nimmt er zwiſchen die ... char-offset:5987 \n",
|
202 |
+
"555 Seine Häuſer ſind arms felige Kothhütten, und ... char-offset:8769 \n",
|
203 |
+
"... ... ... \n",
|
204 |
+
"1117 Bittt adit2: 001), ont ma Des Abends begaben ſ... char-offset:821475 \n",
|
205 |
+
"1118 57 3 Chazelle lieb Chazelles char-offset:823734 \n",
|
206 |
+
"1119 Ein' to svil, dulu UPUN Fahn irlichin( jiyi vo... char-offset:824751 \n",
|
207 |
+
"1120 Dez HENNÈI, Blüthen. Eine Blüthe ron natiiitic... char-offset:824901 \n",
|
208 |
+
"1121 Riedel fee. Fig. 1, ATLÉ, A, Stamn.2.7weig. Fi... char-offset:825427 \n",
|
209 |
+
"\n",
|
210 |
+
" COMMENTS TAGS PAGE \\\n",
|
211 |
+
"551 p. 8 'plants vegetation 25G' 8 \n",
|
212 |
+
"552 p. 8 'plants vegetation 25G' 8 \n",
|
213 |
+
"553 p. 9-10\\n 'animals 25F', 'horses and kindred animals 46C... 10 \n",
|
214 |
+
"554 p. 10\\n 'animals 25F', 'horses and kindred animals 46C... 10 \n",
|
215 |
+
"555 p. 11-12 'plants vegetation 25G' 12 \n",
|
216 |
+
"... ... ... ... \n",
|
217 |
+
"1117 NaN 'animals 25F', 'birds 25F3' 504 \n",
|
218 |
+
"1118 NaN 'animals 25F', 'hoofed animalGAZELLE 25F24' 504 \n",
|
219 |
+
"1119 NaN 'animals 25F' 504 \n",
|
220 |
+
"1120 NaN 'plants vegetation 25G' 504 \n",
|
221 |
+
"1121 NaN 'plants vegetation 25G' 504 \n",
|
222 |
+
"\n",
|
223 |
+
" TRANSCRIPTION_CLEANED \n",
|
224 |
+
"551 ('n? Negypten aber hat nicht allein Wunder der... \n",
|
225 |
+
"552 ('r. Barbaren bewohnen jezt das Land und Raube... \n",
|
226 |
+
"553 ('n. Die Waffen eines Mamefu den zu Pferde fin... \n",
|
227 |
+
"554 ('l. Den Zaum reines Pferdes nimmt er zwischen... \n",
|
228 |
+
"555 (' Hauser sind arms felige Kothhutten, und fei... \n",
|
229 |
+
"... ... \n",
|
230 |
+
"1117 (\"00 Bittt adit2: 001), ont ma Des Abends bega... \n",
|
231 |
+
"1118 ('ue 57 3 Chazelle lieb Chazelles 65', 814073) \n",
|
232 |
+
"1119 (\"c. Ein' to suil, dulu UPUN Fahn irlichin( ji... \n",
|
233 |
+
"1120 ('-- Dez HENNEI, Bluthen. Eine Bluthe ron nati... \n",
|
234 |
+
"1121 (\" 1 Riedel fee. Fig. 1, ATLE, A, Stamn.2.7wei... \n",
|
235 |
+
"\n",
|
236 |
+
"[571 rows x 8 columns]"
|
237 |
+
]
|
238 |
+
},
|
239 |
+
"execution_count": 1,
|
240 |
+
"metadata": {},
|
241 |
+
"output_type": "execute_result"
|
242 |
+
}
|
243 |
+
],
|
244 |
+
"source": [
|
245 |
+
"import pandas as pd\n",
|
246 |
+
"import os\n",
|
247 |
+
"import re\n",
|
248 |
+
"\n",
|
249 |
+
"text_annotations_path = \"data/annotations/\"\n",
|
250 |
+
"filename = 'DHd2025_referenceReports_merged-cleaned__annotations.xlsx'\n",
|
251 |
+
"\n",
|
252 |
+
"annotations = pd.read_excel(os.path.join(text_annotations_path, filename))\n",
|
253 |
+
"\n",
|
254 |
+
"## Remove unwanted tags from list\n",
|
255 |
+
"unwanted_strings = ['pages', 'subheading', 'footnote', 'table of contents', 'footnote irrelevant', 'title', 'scan mistake', 'dictionary']\n",
|
256 |
+
"\n",
|
257 |
+
"# Remove rows where 'TAGS' contains any unwanted string\n",
|
258 |
+
"annotations = annotations[~annotations['TAGS'].str.contains('|'.join(unwanted_strings), case=False, na=False)].reset_index(drop=True)\n",
|
259 |
+
"annotations = annotations[annotations['FILE'].str.contains('Z166069305', case=False, na=False)]\n",
|
260 |
+
"\n",
|
261 |
+
"annotations"
|
262 |
+
]
|
263 |
+
},
|
264 |
+
{
|
265 |
+
"cell_type": "code",
|
266 |
+
"execution_count": 2,
|
267 |
+
"id": "c6bff210",
|
268 |
+
"metadata": {},
|
269 |
+
"outputs": [
|
270 |
+
{
|
271 |
+
"name": "stdout",
|
272 |
+
"output_type": "stream",
|
273 |
+
"text": [
|
274 |
+
"Number of excluded sentences: 0\n",
|
275 |
+
"Number of annotated mentions: 48\n"
|
276 |
+
]
|
277 |
+
},
|
278 |
+
{
|
279 |
+
"data": {
|
280 |
+
"text/plain": [
|
281 |
+
"{'id': '0d5708db-1bf6-4de4-ad64-9759ebb4fa3c',\n",
|
282 |
+
" 'annotated_text': \"('n. Die Waffen eines Mamefu den zu Pferde find zwei --- 00010_page10_cleaned.txt --- grose Flinten, die ihm seine Diener zur Seite nach: tragen und die er ein einziges mal losschiest. Da', 5402)\",\n",
|
283 |
+
" 'comment': 'p. 9-10\\n',\n",
|
284 |
+
" 'page': 10,\n",
|
285 |
+
" 'file': 'Z166069305_merged.txt'}"
|
286 |
+
]
|
287 |
+
},
|
288 |
+
"execution_count": 2,
|
289 |
+
"metadata": {},
|
290 |
+
"output_type": "execute_result"
|
291 |
+
}
|
292 |
+
],
|
293 |
+
"source": [
|
294 |
+
"## Filter the DataFrame\n",
|
295 |
+
"filtered_df = annotations[annotations['TAGS'].str.contains('horse', na=False)]\n",
|
296 |
+
"# Count the number of rows before filtering short sentences\n",
|
297 |
+
"before_filter_count = len(filtered_df)\n",
|
298 |
+
"\n",
|
299 |
+
"# Filter sentences that are 5 words or longer in 'QUOTE_TRANSCRIPTION'\n",
|
300 |
+
"filtered_df = filtered_df[filtered_df['TRANSCRIPTION_CLEANED'].apply(lambda x: len(x.split()) >= 5)]\n",
|
301 |
+
"# Count the number of rows after filtering short sentences\n",
|
302 |
+
"after_filter_count = len(filtered_df)\n",
|
303 |
+
"excluded_count = before_filter_count - after_filter_count\n",
|
304 |
+
"print(f\"Number of excluded sentences: {excluded_count}\")\n",
|
305 |
+
"\n",
|
306 |
+
"mentions_dict = filtered_df.set_index('UUID').to_dict(orient='index')\n",
|
307 |
+
"\n",
|
308 |
+
"mentions = [{\"id\": key, \"annotated_text\": v['TRANSCRIPTION_CLEANED'], \"comment\": v['COMMENTS'], \"page\": v['PAGE'], \"file\": v['FILE']} for key, v in mentions_dict.items()]\n",
|
309 |
+
"\n",
|
310 |
+
"print(f\"Number of annotated mentions: {len(filtered_df)}\")\n",
|
311 |
+
"mentions[0]"
|
312 |
+
]
|
313 |
+
},
|
314 |
+
{
|
315 |
+
"cell_type": "code",
|
316 |
+
"execution_count": 3,
|
317 |
+
"id": "cc29b140",
|
318 |
+
"metadata": {},
|
319 |
+
"outputs": [
|
320 |
+
{
|
321 |
+
"name": "stdout",
|
322 |
+
"output_type": "stream",
|
323 |
+
"text": [
|
324 |
+
"Total annotations: 48\n",
|
325 |
+
"On total pages: 40\n"
|
326 |
+
]
|
327 |
+
}
|
328 |
+
],
|
329 |
+
"source": [
|
330 |
+
"## Summary of annotations\n",
|
331 |
+
"\n",
|
332 |
+
"# Total annotations:\n",
|
333 |
+
"# Remove duplicates based on \"page\"\n",
|
334 |
+
"unique_annot = {entry[\"id\"]: entry for entry in mentions}.values()\n",
|
335 |
+
"\n",
|
336 |
+
"# Remove duplicates based on \"page\"\n",
|
337 |
+
"unique_pages_annot = {entry[\"page\"]: entry for entry in mentions}.values()\n",
|
338 |
+
"\n",
|
339 |
+
"# Extract the \"page\" value from each dictionary of unique pages found\n",
|
340 |
+
"unique_pages_list = [item['page'] for item in unique_pages_annot]\n",
|
341 |
+
"\n",
|
342 |
+
"print(f'Total annotations: {len(unique_annot)}')\n",
|
343 |
+
"print(f'On total pages: {len(unique_pages_annot)}')"
|
344 |
+
]
|
345 |
+
},
|
346 |
+
{
|
347 |
+
"cell_type": "code",
|
348 |
+
"execution_count": 5,
|
349 |
+
"id": "bd863c3b",
|
350 |
+
"metadata": {},
|
351 |
+
"outputs": [],
|
352 |
+
"source": [
|
353 |
+
"## Load retrieval results with Marqo DHd2025\n",
|
354 |
+
"\n",
|
355 |
+
"## Pferd, Pferde\n",
|
356 |
+
"retr_orig = pd.read_csv('data/retrieval_results/sonnini_original_OCR/i_onit-test-index-sonnini-q_Pferd-Pferde.csv')\n",
|
357 |
+
"retr_prep = pd.read_csv('data/retrieval_results/sonnini_llm_corrected/i_onit-sonnini-DHd2025-prep-q_Pferd, Pferde.csv')\n",
|
358 |
+
"retr_clean = pd.read_csv('data/retrieval_results/sonnini_cleaned/i_onit-sonnini-DHd2025-clean-q_Pferd, Pferde.csv')"
|
359 |
+
]
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"cell_type": "markdown",
|
363 |
+
"id": "6b4a54bd",
|
364 |
+
"metadata": {},
|
365 |
+
"source": [
|
366 |
+
"## Evaluation of results"
|
367 |
+
]
|
368 |
+
},
|
369 |
+
{
|
370 |
+
"cell_type": "code",
|
371 |
+
"execution_count": 7,
|
372 |
+
"id": "296250aa",
|
373 |
+
"metadata": {},
|
374 |
+
"outputs": [
|
375 |
+
{
|
376 |
+
"name": "stdout",
|
377 |
+
"output_type": "stream",
|
378 |
+
"text": [
|
379 |
+
"200\n"
|
380 |
+
]
|
381 |
+
},
|
382 |
+
{
|
383 |
+
"data": {
|
384 |
+
"text/plain": [
|
385 |
+
"{'id': '01b71d37-9ead-4eb2-8504-d16cbd00e866',\n",
|
386 |
+
" 'text_document': 'befand mich in einiger Entfernung davon, und der Ueber- rest unserer Begleitung folgte in ziemlicher Weite nach. Ein Trupp Beduinen zu Pferde. brach auf einmal hinter den Mauern hervor. Ich konnte sie an. fanglich unter den Staubwolken, die sie erregten, nicht unterscheiden: als fie fich aber ausgebreitet hatten, erfannte ich sowohl die Ort als die Anzahl von Leuten, mit denen wir zu thun haben sollten. Ich wandte sogleich inein Pferd um, und mit diefem vortreflichen Reuter, auf welchem man mich nicht hatte einbohlen konnen, hatte ich bald meine Gefahrten erreicht, die auch von ihren Came len herab diese. Reuterei gewahr worden waren. Ich fand sie zu Fus und in geschlossene Glieder gestellt. Ich sturzte mich von meinem Pferde herab und munterte fie zu einenu muthigen Widerstande auf. Wir waren im Ganzen rechs Personen, unter welchen wir nur auf drei | rechnen konnten. Zwei Eingebohrne fonnten nicht viel helfen, und der Zeichner war noch jung und ohne Erfah- rung, und konnte keine Flinte losschiesen. ut Die Standhaftigkeit einer Handvou Menschen, die fich mitten in einer Sandflache allein befanden, und die von keiner Seite gedekt waren, liberraschte eine Schwa: dron von ungefahr hundert Beduinen: man kann daraus schliesen, wie wenig furchtbar solche Feinde find, deren Muth fich blos darauf beschrankt, das fie fich in zahlreichen Horden versammeln, um eine leichte Plundes rung zu veruben, und auf eine feige Art den kleinen Krieg zu fuhren. Ob fie gleich in starkem Galop auf uns zuge. ritten kamen, so machten sie doch plozlich, etwann hundert Schritte von uns, halt. Sie riefen uns zu, kein Feuer zu geben, und ich erwiederte ihnen, fie sollten nicht vors rucken. Sie blieben einige Augenblicke in einer Art von Ungewisheit, was sie thun souten, wahrend welcher wir fie unter einander berathschlagen sahen. Sie theilten sich',\n",
|
387 |
+
" 'text_vector': 'befand mich in einiger Entfernung davon, und der Ueber- rest unserer Begleitung folgte in ziemlicher Weite nach. Ein Trupp Beduinen zu Pferde.',\n",
|
388 |
+
" 'page': 430,\n",
|
389 |
+
" 'rerank': 1,\n",
|
390 |
+
" 'barcode': 'Z166069305',\n",
|
391 |
+
" 'onb_link': 'https://digital.onb.ac.at/OnbViewer/viewer.faces?doc=ABO_%2BZ166069305'}"
|
392 |
+
]
|
393 |
+
},
|
394 |
+
"execution_count": 7,
|
395 |
+
"metadata": {},
|
396 |
+
"output_type": "execute_result"
|
397 |
+
}
|
398 |
+
],
|
399 |
+
"source": [
|
400 |
+
"## Analyse overlap of retrieved texts with annotated texts\n",
|
401 |
+
"\n",
|
402 |
+
"\"\"\"\n",
|
403 |
+
"In this notebook, we attempted to compare the heterogeneous texts that were annotated during the project\n",
|
404 |
+
"with the retrieval results by comparing n-word phrases from the annotated sentences with the retrieved\n",
|
405 |
+
"sentence vectors. This approach is experimental. An alternative evaluation by calculating the Levenshtein\n",
|
406 |
+
"distance between the annotated text vectors and the retrieved text vectors will be done in future.\n",
|
407 |
+
"\"\"\"\n",
|
408 |
+
"\n",
|
409 |
+
"## Create dict from retrieved texts\n",
|
410 |
+
"#retr = retr_orig.head(200)\n",
|
411 |
+
"#retr = retr_prep.head(200)\n",
|
412 |
+
"retr = retr_clean.head(200)\n",
|
413 |
+
"\n",
|
414 |
+
"retrieved_dict = retr.set_index('_id').to_dict(orient='index')\n",
|
415 |
+
"\n",
|
416 |
+
"retrieved = [\n",
|
417 |
+
" {\n",
|
418 |
+
" \"id\": key,\n",
|
419 |
+
" \"text_document\": v['text_clean'], # 'text', 'text_clean'\n",
|
420 |
+
" \"text_vector\": v['unpacked_highlights'] if pd.notna(v['unpacked_highlights']) else v['text'],\n",
|
421 |
+
" \"page\": v['page'], #int((re.search(r'\\d+', v['page'])).group()),\n",
|
422 |
+
" \"rerank\": v['rerank'],\n",
|
423 |
+
" \"barcode\": v['barcode'],\n",
|
424 |
+
" \"onb_link\": v['onb_viewer_link']\n",
|
425 |
+
" }\n",
|
426 |
+
" for key, v in retrieved_dict.items()\n",
|
427 |
+
"]\n",
|
428 |
+
"\n",
|
429 |
+
"print(len(retrieved))\n",
|
430 |
+
"retrieved[0]"
|
431 |
+
]
|
432 |
+
},
|
433 |
+
{
|
434 |
+
"cell_type": "code",
|
435 |
+
"execution_count": 8,
|
436 |
+
"id": "307ab5c6",
|
437 |
+
"metadata": {
|
438 |
+
"code_folding": []
|
439 |
+
},
|
440 |
+
"outputs": [],
|
441 |
+
"source": [
|
442 |
+
"## Function to generate n-word phrases from a sentence\n",
|
443 |
+
"def generate_n_word_phrases(sentence, n):\n",
|
444 |
+
" words = sentence.lower().split()\n",
|
445 |
+
" return [' '.join(words[i:i+n]) for i in range(len(words) - n + 1)]\n",
|
446 |
+
"\n",
|
447 |
+
"## Function to check if any of the n-word phrases is in the text_vector\n",
|
448 |
+
"## and store the first matching phrase\n",
|
449 |
+
"def check_sentence_in_text_vector(sentence, text_vector, n):\n",
|
450 |
+
" text_vector_lower = text_vector.lower()\n",
|
451 |
+
" phrases = generate_n_word_phrases(sentence, n)\n",
|
452 |
+
" for phrase in phrases:\n",
|
453 |
+
" if phrase in text_vector_lower:\n",
|
454 |
+
" return True, phrase # Return True and the matching phrase\n",
|
455 |
+
" return False, None # No match found, return None\n",
|
456 |
+
" #return any(phrase in text_vector_lower for phrase in phrases)\n",
|
457 |
+
"\n",
|
458 |
+
"# Number of words in the phrase to check\n",
|
459 |
+
"n = 5 # Change this to the desired number of words\n",
|
460 |
+
"\n",
|
461 |
+
"# List to store results\n",
|
462 |
+
"results = []\n",
|
463 |
+
"\n",
|
464 |
+
"# Iterate through each sentence in the mentions list\n",
|
465 |
+
"for sentence in mentions:\n",
|
466 |
+
" found = False\n",
|
467 |
+
" # Check if any n-word phrase from this sentence is present in any of the text_vector fields\n",
|
468 |
+
" for entry in retrieved:\n",
|
469 |
+
" #print(entry['text_vector'])\n",
|
470 |
+
" match_found, match = check_sentence_in_text_vector(sentence['annotated_text'], entry['text_vector'], n)\n",
|
471 |
+
" if match_found == True:\n",
|
472 |
+
" found = True\n",
|
473 |
+
" break # No need to check further if a match is found\n",
|
474 |
+
" if found == True:\n",
|
475 |
+
" results.append({\n",
|
476 |
+
" \"sentence_annotation\": sentence['annotated_text'],\n",
|
477 |
+
" \"id_annotation\": sentence['id'],\n",
|
478 |
+
" \"page_annotation\": sentence['page'],\n",
|
479 |
+
" \"comment\": sentence['comment'],\n",
|
480 |
+
" \"found_in_text_vector\": found,\n",
|
481 |
+
" \"text_vector\": entry['text_vector'],\n",
|
482 |
+
" \"matching_phrase\": match,\n",
|
483 |
+
" \"id_text_vector\": entry['id'],\n",
|
484 |
+
" \"barcode\": entry['barcode'],\n",
|
485 |
+
" \"page_text\": entry['page'],\n",
|
486 |
+
" \"onb_link\": entry['onb_link'],\n",
|
487 |
+
" \"file_annotation\": sentence['file']\n",
|
488 |
+
" })\n",
|
489 |
+
" else:\n",
|
490 |
+
" results.append({\n",
|
491 |
+
" \"sentence_annotation\": sentence['annotated_text'],\n",
|
492 |
+
" \"id_annotation\": sentence['id'],\n",
|
493 |
+
" \"page_annotation\": sentence['page'],\n",
|
494 |
+
" \"file_annotation\": sentence['file'],\n",
|
495 |
+
" \"found_in_text_vector\": found\n",
|
496 |
+
" })\n",
|
497 |
+
"\n",
|
498 |
+
"retrieved_df = pd.DataFrame(retrieved)\n",
|
499 |
+
"retrieved_df.rename(columns={'id': 'id_text_vector'}, inplace=True)\n",
|
500 |
+
"mat_df = pd.DataFrame(results)\n",
|
501 |
+
"matches_df = retrieved_df.merge(mat_df, on='id_text_vector', how='outer')\n",
|
502 |
+
"\n",
|
503 |
+
"# Sort the DataFrame based on the 'rrf_rank' column in ascending order\n",
|
504 |
+
"matches_df = matches_df.sort_values(by='rerank', ascending=True)"
|
505 |
+
]
|
506 |
+
},
|
507 |
+
{
|
508 |
+
"cell_type": "code",
|
509 |
+
"execution_count": 9,
|
510 |
+
"id": "68868787",
|
511 |
+
"metadata": {},
|
512 |
+
"outputs": [
|
513 |
+
{
|
514 |
+
"name": "stdout",
|
515 |
+
"output_type": "stream",
|
516 |
+
"text": [
|
517 |
+
"True Positives: Found 27 relevant mentions on 27 pages of 48 relevant mentions on 40 pages\n",
|
518 |
+
"(67.50%).\n",
|
519 |
+
"Found average of 1.00 found mentions per page vs. GT of 1.20 annotations per page.\n"
|
520 |
+
]
|
521 |
+
},
|
522 |
+
{
|
523 |
+
"data": {
|
524 |
+
"text/html": [
|
525 |
+
"<div>\n",
|
526 |
+
"<style scoped>\n",
|
527 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
528 |
+
" vertical-align: middle;\n",
|
529 |
+
" }\n",
|
530 |
+
"\n",
|
531 |
+
" .dataframe tbody tr th {\n",
|
532 |
+
" vertical-align: top;\n",
|
533 |
+
" }\n",
|
534 |
+
"\n",
|
535 |
+
" .dataframe thead th {\n",
|
536 |
+
" text-align: right;\n",
|
537 |
+
" }\n",
|
538 |
+
"</style>\n",
|
539 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
540 |
+
" <thead>\n",
|
541 |
+
" <tr style=\"text-align: right;\">\n",
|
542 |
+
" <th></th>\n",
|
543 |
+
" <th>sentence_annotation</th>\n",
|
544 |
+
" <th>text_vector_x</th>\n",
|
545 |
+
" <th>rerank</th>\n",
|
546 |
+
" <th>id_annotation</th>\n",
|
547 |
+
" <th>page</th>\n",
|
548 |
+
" <th>page_annotation</th>\n",
|
549 |
+
" <th>file_annotation</th>\n",
|
550 |
+
" </tr>\n",
|
551 |
+
" </thead>\n",
|
552 |
+
" <tbody>\n",
|
553 |
+
" <tr>\n",
|
554 |
+
" <th>0</th>\n",
|
555 |
+
" <td>('h. Ein Trupp Beduinen zu Pferde. brach auf e...</td>\n",
|
556 |
+
" <td>befand mich in einiger Entfernung davon, und d...</td>\n",
|
557 |
+
" <td>1.0</td>\n",
|
558 |
+
" <td>ffbabe61-0a21-4e4f-b6e4-2ddc4724b40c</td>\n",
|
559 |
+
" <td>430.0</td>\n",
|
560 |
+
" <td>434.0</td>\n",
|
561 |
+
" <td>Z166069305_merged.txt</td>\n",
|
562 |
+
" </tr>\n",
|
563 |
+
" <tr>\n",
|
564 |
+
" <th>1</th>\n",
|
565 |
+
" <td>('n. Man muntert beide auch mit einem Klatsche...</td>\n",
|
566 |
+
" <td>Man muntert beide auch mit einem Klatschen der...</td>\n",
|
567 |
+
" <td>2.0</td>\n",
|
568 |
+
" <td>9a41db07-a5c8-4b3f-931a-43418f800768</td>\n",
|
569 |
+
" <td>399.0</td>\n",
|
570 |
+
" <td>403.0</td>\n",
|
571 |
+
" <td>Z166069305_merged.txt</td>\n",
|
572 |
+
" </tr>\n",
|
573 |
+
" <tr>\n",
|
574 |
+
" <th>2</th>\n",
|
575 |
+
" <td>('t. Ein kriegerisches und eroberungssuchtiges...</td>\n",
|
576 |
+
" <td>Eine weife Nation hingegen, die den Ackerbau f...</td>\n",
|
577 |
+
" <td>3.0</td>\n",
|
578 |
+
" <td>2caacee3-c22d-4a76-a2d7-ba794856a103</td>\n",
|
579 |
+
" <td>175.0</td>\n",
|
580 |
+
" <td>176.0</td>\n",
|
581 |
+
" <td>Z166069305_merged.txt</td>\n",
|
582 |
+
" </tr>\n",
|
583 |
+
" <tr>\n",
|
584 |
+
" <th>3</th>\n",
|
585 |
+
" <td>('n. Die Waffen eines Mamefu den zu Pferde fin...</td>\n",
|
586 |
+
" <td>Die Waffen eines Mamefu den zu Pferde find zwei</td>\n",
|
587 |
+
" <td>4.0</td>\n",
|
588 |
+
" <td>0d5708db-1bf6-4de4-ad64-9759ebb4fa3c</td>\n",
|
589 |
+
" <td>9.0</td>\n",
|
590 |
+
" <td>10.0</td>\n",
|
591 |
+
" <td>Z166069305_merged.txt</td>\n",
|
592 |
+
" </tr>\n",
|
593 |
+
" <tr>\n",
|
594 |
+
" <th>4</th>\n",
|
595 |
+
" <td>('r. Das heise Clima dieser Gegenden des Alter...</td>\n",
|
596 |
+
" <td>Das Pferd, das eben fo feurig ist als die Luft...</td>\n",
|
597 |
+
" <td>5.0</td>\n",
|
598 |
+
" <td>4f7c0172-b25f-4e62-9cb5-a1c1213c6d7d</td>\n",
|
599 |
+
" <td>220.0</td>\n",
|
600 |
+
" <td>222.0</td>\n",
|
601 |
+
" <td>Z166069305_merged.txt</td>\n",
|
602 |
+
" </tr>\n",
|
603 |
+
" <tr>\n",
|
604 |
+
" <th>5</th>\n",
|
605 |
+
" <td>(\"n. Diese Banditen waren nicht damit zufriede...</td>\n",
|
606 |
+
" <td>Andere Uraber erwiesen meinen Gefahrten diesel...</td>\n",
|
607 |
+
" <td>6.0</td>\n",
|
608 |
+
" <td>c86544f5-33f0-4d9a-b048-4995ae36c493</td>\n",
|
609 |
+
" <td>435.0</td>\n",
|
610 |
+
" <td>439.0</td>\n",
|
611 |
+
" <td>Z166069305_merged.txt</td>\n",
|
612 |
+
" </tr>\n",
|
613 |
+
" <tr>\n",
|
614 |
+
" <th>6</th>\n",
|
615 |
+
" <td>('s. ?. Wir fahen zwei Beduinen zu Pferde; fie...</td>\n",
|
616 |
+
" <td>?. Wir fahen zwei Beduinen zu Pferde; fie floh...</td>\n",
|
617 |
+
" <td>7.0</td>\n",
|
618 |
+
" <td>3a1cf968-2b82-48d9-a784-d3a7f5198890</td>\n",
|
619 |
+
" <td>413.0</td>\n",
|
620 |
+
" <td>417.0</td>\n",
|
621 |
+
" <td>Z166069305_merged.txt</td>\n",
|
622 |
+
" </tr>\n",
|
623 |
+
" <tr>\n",
|
624 |
+
" <th>7</th>\n",
|
625 |
+
" <td>(\"b. Der Hraber muste mir fur den taglichen Pr...</td>\n",
|
626 |
+
" <td>Der Hraber muste mir fur den taglichen Preis v...</td>\n",
|
627 |
+
" <td>8.0</td>\n",
|
628 |
+
" <td>b0450149-d074-4079-87c4-cdc252ce7c67</td>\n",
|
629 |
+
" <td>383.0</td>\n",
|
630 |
+
" <td>387.0</td>\n",
|
631 |
+
" <td>Z166069305_merged.txt</td>\n",
|
632 |
+
" </tr>\n",
|
633 |
+
" <tr>\n",
|
634 |
+
" <th>8</th>\n",
|
635 |
+
" <td>('r. Ich war mit meinem Pferde uorausgeritten,...</td>\n",
|
636 |
+
" <td>Ich war mit meinem Pferde vorausgeritten, und ...</td>\n",
|
637 |
+
" <td>9.0</td>\n",
|
638 |
+
" <td>a16c02e9-16a6-4780-b65e-51510b8db6fb</td>\n",
|
639 |
+
" <td>406.0</td>\n",
|
640 |
+
" <td>410.0</td>\n",
|
641 |
+
" <td>Z166069305_merged.txt</td>\n",
|
642 |
+
" </tr>\n",
|
643 |
+
" <tr>\n",
|
644 |
+
" <th>9</th>\n",
|
645 |
+
" <td>('-- Das Lager stand etwan einige hundert Fus ...</td>\n",
|
646 |
+
" <td>Man bauete verschiedente Arten Viehfutter dara...</td>\n",
|
647 |
+
" <td>10.0</td>\n",
|
648 |
+
" <td>b8a8f0ea-926e-4d89-b21d-e89e69a35c2f</td>\n",
|
649 |
+
" <td>394.0</td>\n",
|
650 |
+
" <td>397.0</td>\n",
|
651 |
+
" <td>Z166069305_merged.txt</td>\n",
|
652 |
+
" </tr>\n",
|
653 |
+
" <tr>\n",
|
654 |
+
" <th>10</th>\n",
|
655 |
+
" <td>(\"n. Ich bemerkte oft frische Spuren uon Strau...</td>\n",
|
656 |
+
" <td>Ihre Jagd ist eine von den Leibesubuns gen, wo...</td>\n",
|
657 |
+
" <td>11.0</td>\n",
|
658 |
+
" <td>2703eecd-2bb6-432b-beaf-c92933736058</td>\n",
|
659 |
+
" <td>424.0</td>\n",
|
660 |
+
" <td>428.0</td>\n",
|
661 |
+
" <td>Z166069305_merged.txt</td>\n",
|
662 |
+
" </tr>\n",
|
663 |
+
" <tr>\n",
|
664 |
+
" <th>11</th>\n",
|
665 |
+
" <td>('b. Ich bestieg ein sehr schones Pferd, und i...</td>\n",
|
666 |
+
" <td>Ich bestieg ein sehr schones Pferd, und ich wu...</td>\n",
|
667 |
+
" <td>12.0</td>\n",
|
668 |
+
" <td>3f08cff7-7220-4c5e-9d1f-1b8d4fc92028</td>\n",
|
669 |
+
" <td>405.0</td>\n",
|
670 |
+
" <td>409.0</td>\n",
|
671 |
+
" <td>Z166069305_merged.txt</td>\n",
|
672 |
+
" </tr>\n",
|
673 |
+
" <tr>\n",
|
674 |
+
" <th>12</th>\n",
|
675 |
+
" <td>('n. Sie halten sich mit ihren trefa lich klei...</td>\n",
|
676 |
+
" <td>Sie halten sich mit ihren trefa lich kleinen P...</td>\n",
|
677 |
+
" <td>13.0</td>\n",
|
678 |
+
" <td>28370a93-4521-48d0-a8da-50545822debc</td>\n",
|
679 |
+
" <td>12.0</td>\n",
|
680 |
+
" <td>12.0</td>\n",
|
681 |
+
" <td>Z166069305_merged.txt</td>\n",
|
682 |
+
" </tr>\n",
|
683 |
+
" <tr>\n",
|
684 |
+
" <th>13</th>\n",
|
685 |
+
" <td>('n. Es war eine sehr gunstige Gelegenheit: di...</td>\n",
|
686 |
+
" <td>mittel herum, die man ihnen gereicht hatte: ih...</td>\n",
|
687 |
+
" <td>14.0</td>\n",
|
688 |
+
" <td>d9613671-efc9-4a0e-9770-d2b8432d71de</td>\n",
|
689 |
+
" <td>455.0</td>\n",
|
690 |
+
" <td>460.0</td>\n",
|
691 |
+
" <td>Z166069305_merged.txt</td>\n",
|
692 |
+
" </tr>\n",
|
693 |
+
" <tr>\n",
|
694 |
+
" <th>14</th>\n",
|
695 |
+
" <td>('r. Ihre ganze Nahrung besteht in Brodt oder ...</td>\n",
|
696 |
+
" <td>Und wirklich sind diejenigen, die in der Wuste...</td>\n",
|
697 |
+
" <td>15.0</td>\n",
|
698 |
+
" <td>5913d181-6da7-4169-9187-9eb885d33e33</td>\n",
|
699 |
+
" <td>448.0</td>\n",
|
700 |
+
" <td>452.0</td>\n",
|
701 |
+
" <td>Z166069305_merged.txt</td>\n",
|
702 |
+
" </tr>\n",
|
703 |
+
" <tr>\n",
|
704 |
+
" <th>15</th>\n",
|
705 |
+
" <td>('g. Sie waren bald bei uns und hatten uns, oh...</td>\n",
|
706 |
+
" <td>Es verdros mich, das ich mich solchen Raubern ...</td>\n",
|
707 |
+
" <td>16.0</td>\n",
|
708 |
+
" <td>1a1ef27f-0449-4664-8f6c-e929f089504a</td>\n",
|
709 |
+
" <td>431.0</td>\n",
|
710 |
+
" <td>435.0</td>\n",
|
711 |
+
" <td>Z166069305_merged.txt</td>\n",
|
712 |
+
" </tr>\n",
|
713 |
+
" <tr>\n",
|
714 |
+
" <th>17</th>\n",
|
715 |
+
" <td>('d. Der Efel, der den Munduorrath trug und ei...</td>\n",
|
716 |
+
" <td>Der Efel, der den Mundvorrath trug und einer l...</td>\n",
|
717 |
+
" <td>18.0</td>\n",
|
718 |
+
" <td>5892d1ea-8016-4e3e-9176-f6f6df833125</td>\n",
|
719 |
+
" <td>148.0</td>\n",
|
720 |
+
" <td>149.0</td>\n",
|
721 |
+
" <td>Z166069305_merged.txt</td>\n",
|
722 |
+
" </tr>\n",
|
723 |
+
" <tr>\n",
|
724 |
+
" <th>18</th>\n",
|
725 |
+
" <td>('t. Man reitet zu Pferde durch dieselbe, wenn...</td>\n",
|
726 |
+
" <td>Man reitet zu Pferde durch dieselbe, wenn nich...</td>\n",
|
727 |
+
" <td>19.0</td>\n",
|
728 |
+
" <td>e422911b-8c7b-4a1e-a4aa-df3d3f617c51</td>\n",
|
729 |
+
" <td>152.0</td>\n",
|
730 |
+
" <td>153.0</td>\n",
|
731 |
+
" <td>Z166069305_merged.txt</td>\n",
|
732 |
+
" </tr>\n",
|
733 |
+
" <tr>\n",
|
734 |
+
" <th>19</th>\n",
|
735 |
+
" <td>(\"n. Eine Viertelmeile weit richteten wir unse...</td>\n",
|
736 |
+
" <td>Eine Viertelmeile weit richteten wir unsern We...</td>\n",
|
737 |
+
" <td>20.0</td>\n",
|
738 |
+
" <td>2602d32c-2c40-4add-893c-abfc115b3c38</td>\n",
|
739 |
+
" <td>415.0</td>\n",
|
740 |
+
" <td>419.0</td>\n",
|
741 |
+
" <td>Z166069305_merged.txt</td>\n",
|
742 |
+
" </tr>\n",
|
743 |
+
" <tr>\n",
|
744 |
+
" <th>20</th>\n",
|
745 |
+
" <td>(\"n. Auf den Strasen uerkaufte man Bokshornste...</td>\n",
|
746 |
+
" <td>Auf den Strasen verkaufte man Bokshornstengel*...</td>\n",
|
747 |
+
" <td>21.0</td>\n",
|
748 |
+
" <td>b3a06b70-a367-4eef-a21e-788142390022</td>\n",
|
749 |
+
" <td>261.0</td>\n",
|
750 |
+
" <td>263.0</td>\n",
|
751 |
+
" <td>Z166069305_merged.txt</td>\n",
|
752 |
+
" </tr>\n",
|
753 |
+
" <tr>\n",
|
754 |
+
" <th>21</th>\n",
|
755 |
+
" <td>('l. Den Zaum reines Pferdes nimmt er zwischen...</td>\n",
|
756 |
+
" <td>Sein letztes Hulfeitrittet find jwei Sa: bel. ...</td>\n",
|
757 |
+
" <td>22.0</td>\n",
|
758 |
+
" <td>64b5828e-b14d-49dc-8d88-f97de52f9202</td>\n",
|
759 |
+
" <td>10.0</td>\n",
|
760 |
+
" <td>10.0</td>\n",
|
761 |
+
" <td>Z166069305_merged.txt</td>\n",
|
762 |
+
" </tr>\n",
|
763 |
+
" <tr>\n",
|
764 |
+
" <th>22</th>\n",
|
765 |
+
" <td>('n. Es giebt ein gutes Futter, das die Pferde...</td>\n",
|
766 |
+
" <td>machten Eigenschaften verdient der Anbau das B...</td>\n",
|
767 |
+
" <td>23.0</td>\n",
|
768 |
+
" <td>3bf200db-aeb9-4823-8923-6458b1f01d7e</td>\n",
|
769 |
+
" <td>263.0</td>\n",
|
770 |
+
" <td>266.0</td>\n",
|
771 |
+
" <td>Z166069305_merged.txt</td>\n",
|
772 |
+
" </tr>\n",
|
773 |
+
" <tr>\n",
|
774 |
+
" <th>24</th>\n",
|
775 |
+
" <td>('-- Wir kamen aus einer Stadt, wo die Menge u...</td>\n",
|
776 |
+
" <td>Wir kamen aus einer Stadt, wo die Menge von Ma...</td>\n",
|
777 |
+
" <td>25.0</td>\n",
|
778 |
+
" <td>cdd8f310-f15e-44d7-bd06-3e91351e3eae</td>\n",
|
779 |
+
" <td>59.0</td>\n",
|
780 |
+
" <td>59.0</td>\n",
|
781 |
+
" <td>Z166069305_merged.txt</td>\n",
|
782 |
+
" </tr>\n",
|
783 |
+
" <tr>\n",
|
784 |
+
" <th>25</th>\n",
|
785 |
+
" <td>(\"n. Den Sag nach der Ankunft dieses Mannes en...</td>\n",
|
786 |
+
" <td>Den Sag nach der Ankunft dieses Mannes entdekt...</td>\n",
|
787 |
+
" <td>26.0</td>\n",
|
788 |
+
" <td>59c9905e-6cfa-4544-8805-dfbd53e7ed29</td>\n",
|
789 |
+
" <td>454.0</td>\n",
|
790 |
+
" <td>459.0</td>\n",
|
791 |
+
" <td>Z166069305_merged.txt</td>\n",
|
792 |
+
" </tr>\n",
|
793 |
+
" <tr>\n",
|
794 |
+
" <th>26</th>\n",
|
795 |
+
" <td>('n. Die Araber uerfolgen fte zu Pferde; und m...</td>\n",
|
796 |
+
" <td>Sie entfernen Fich aber sogleich, fo bald fie ...</td>\n",
|
797 |
+
" <td>27.0</td>\n",
|
798 |
+
" <td>63f2c3e0-69a8-4a22-a356-7d0dd570dee1</td>\n",
|
799 |
+
" <td>422.0</td>\n",
|
800 |
+
" <td>426.0</td>\n",
|
801 |
+
" <td>Z166069305_merged.txt</td>\n",
|
802 |
+
" </tr>\n",
|
803 |
+
" <tr>\n",
|
804 |
+
" <th>33</th>\n",
|
805 |
+
" <td>(\"t. Jene artige Art uon uierfusigent Thieren,...</td>\n",
|
806 |
+
" <td>brauch von ihnen zu machen. Diese Fufe' find w...</td>\n",
|
807 |
+
" <td>34.0</td>\n",
|
808 |
+
" <td>1f056aa1-5980-4f62-9bff-147a9b409fd9</td>\n",
|
809 |
+
" <td>423.0</td>\n",
|
810 |
+
" <td>427.0</td>\n",
|
811 |
+
" <td>Z166069305_merged.txt</td>\n",
|
812 |
+
" </tr>\n",
|
813 |
+
" <tr>\n",
|
814 |
+
" <th>62</th>\n",
|
815 |
+
" <td>('t. Wir erkletterten auf Maulefeln die Sandhu...</td>\n",
|
816 |
+
" <td>Wir erkletterten auf Maulefeln die Sandhugel, ...</td>\n",
|
817 |
+
" <td>63.0</td>\n",
|
818 |
+
" <td>e4074c9a-c71c-4076-a22e-dce3f7e263c9</td>\n",
|
819 |
+
" <td>264.0</td>\n",
|
820 |
+
" <td>267.0</td>\n",
|
821 |
+
" <td>Z166069305_merged.txt</td>\n",
|
822 |
+
" </tr>\n",
|
823 |
+
" </tbody>\n",
|
824 |
+
"</table>\n",
|
825 |
+
"</div>"
|
826 |
+
],
|
827 |
+
"text/plain": [
|
828 |
+
" sentence_annotation \\\n",
|
829 |
+
"0 ('h. Ein Trupp Beduinen zu Pferde. brach auf e... \n",
|
830 |
+
"1 ('n. Man muntert beide auch mit einem Klatsche... \n",
|
831 |
+
"2 ('t. Ein kriegerisches und eroberungssuchtiges... \n",
|
832 |
+
"3 ('n. Die Waffen eines Mamefu den zu Pferde fin... \n",
|
833 |
+
"4 ('r. Das heise Clima dieser Gegenden des Alter... \n",
|
834 |
+
"5 (\"n. Diese Banditen waren nicht damit zufriede... \n",
|
835 |
+
"6 ('s. ?. Wir fahen zwei Beduinen zu Pferde; fie... \n",
|
836 |
+
"7 (\"b. Der Hraber muste mir fur den taglichen Pr... \n",
|
837 |
+
"8 ('r. Ich war mit meinem Pferde uorausgeritten,... \n",
|
838 |
+
"9 ('-- Das Lager stand etwan einige hundert Fus ... \n",
|
839 |
+
"10 (\"n. Ich bemerkte oft frische Spuren uon Strau... \n",
|
840 |
+
"11 ('b. Ich bestieg ein sehr schones Pferd, und i... \n",
|
841 |
+
"12 ('n. Sie halten sich mit ihren trefa lich klei... \n",
|
842 |
+
"13 ('n. Es war eine sehr gunstige Gelegenheit: di... \n",
|
843 |
+
"14 ('r. Ihre ganze Nahrung besteht in Brodt oder ... \n",
|
844 |
+
"15 ('g. Sie waren bald bei uns und hatten uns, oh... \n",
|
845 |
+
"17 ('d. Der Efel, der den Munduorrath trug und ei... \n",
|
846 |
+
"18 ('t. Man reitet zu Pferde durch dieselbe, wenn... \n",
|
847 |
+
"19 (\"n. Eine Viertelmeile weit richteten wir unse... \n",
|
848 |
+
"20 (\"n. Auf den Strasen uerkaufte man Bokshornste... \n",
|
849 |
+
"21 ('l. Den Zaum reines Pferdes nimmt er zwischen... \n",
|
850 |
+
"22 ('n. Es giebt ein gutes Futter, das die Pferde... \n",
|
851 |
+
"24 ('-- Wir kamen aus einer Stadt, wo die Menge u... \n",
|
852 |
+
"25 (\"n. Den Sag nach der Ankunft dieses Mannes en... \n",
|
853 |
+
"26 ('n. Die Araber uerfolgen fte zu Pferde; und m... \n",
|
854 |
+
"33 (\"t. Jene artige Art uon uierfusigent Thieren,... \n",
|
855 |
+
"62 ('t. Wir erkletterten auf Maulefeln die Sandhu... \n",
|
856 |
+
"\n",
|
857 |
+
" text_vector_x rerank \\\n",
|
858 |
+
"0 befand mich in einiger Entfernung davon, und d... 1.0 \n",
|
859 |
+
"1 Man muntert beide auch mit einem Klatschen der... 2.0 \n",
|
860 |
+
"2 Eine weife Nation hingegen, die den Ackerbau f... 3.0 \n",
|
861 |
+
"3 Die Waffen eines Mamefu den zu Pferde find zwei 4.0 \n",
|
862 |
+
"4 Das Pferd, das eben fo feurig ist als die Luft... 5.0 \n",
|
863 |
+
"5 Andere Uraber erwiesen meinen Gefahrten diesel... 6.0 \n",
|
864 |
+
"6 ?. Wir fahen zwei Beduinen zu Pferde; fie floh... 7.0 \n",
|
865 |
+
"7 Der Hraber muste mir fur den taglichen Preis v... 8.0 \n",
|
866 |
+
"8 Ich war mit meinem Pferde vorausgeritten, und ... 9.0 \n",
|
867 |
+
"9 Man bauete verschiedente Arten Viehfutter dara... 10.0 \n",
|
868 |
+
"10 Ihre Jagd ist eine von den Leibesubuns gen, wo... 11.0 \n",
|
869 |
+
"11 Ich bestieg ein sehr schones Pferd, und ich wu... 12.0 \n",
|
870 |
+
"12 Sie halten sich mit ihren trefa lich kleinen P... 13.0 \n",
|
871 |
+
"13 mittel herum, die man ihnen gereicht hatte: ih... 14.0 \n",
|
872 |
+
"14 Und wirklich sind diejenigen, die in der Wuste... 15.0 \n",
|
873 |
+
"15 Es verdros mich, das ich mich solchen Raubern ... 16.0 \n",
|
874 |
+
"17 Der Efel, der den Mundvorrath trug und einer l... 18.0 \n",
|
875 |
+
"18 Man reitet zu Pferde durch dieselbe, wenn nich... 19.0 \n",
|
876 |
+
"19 Eine Viertelmeile weit richteten wir unsern We... 20.0 \n",
|
877 |
+
"20 Auf den Strasen verkaufte man Bokshornstengel*... 21.0 \n",
|
878 |
+
"21 Sein letztes Hulfeitrittet find jwei Sa: bel. ... 22.0 \n",
|
879 |
+
"22 machten Eigenschaften verdient der Anbau das B... 23.0 \n",
|
880 |
+
"24 Wir kamen aus einer Stadt, wo die Menge von Ma... 25.0 \n",
|
881 |
+
"25 Den Sag nach der Ankunft dieses Mannes entdekt... 26.0 \n",
|
882 |
+
"26 Sie entfernen Fich aber sogleich, fo bald fie ... 27.0 \n",
|
883 |
+
"33 brauch von ihnen zu machen. Diese Fufe' find w... 34.0 \n",
|
884 |
+
"62 Wir erkletterten auf Maulefeln die Sandhugel, ... 63.0 \n",
|
885 |
+
"\n",
|
886 |
+
" id_annotation page page_annotation \\\n",
|
887 |
+
"0 ffbabe61-0a21-4e4f-b6e4-2ddc4724b40c 430.0 434.0 \n",
|
888 |
+
"1 9a41db07-a5c8-4b3f-931a-43418f800768 399.0 403.0 \n",
|
889 |
+
"2 2caacee3-c22d-4a76-a2d7-ba794856a103 175.0 176.0 \n",
|
890 |
+
"3 0d5708db-1bf6-4de4-ad64-9759ebb4fa3c 9.0 10.0 \n",
|
891 |
+
"4 4f7c0172-b25f-4e62-9cb5-a1c1213c6d7d 220.0 222.0 \n",
|
892 |
+
"5 c86544f5-33f0-4d9a-b048-4995ae36c493 435.0 439.0 \n",
|
893 |
+
"6 3a1cf968-2b82-48d9-a784-d3a7f5198890 413.0 417.0 \n",
|
894 |
+
"7 b0450149-d074-4079-87c4-cdc252ce7c67 383.0 387.0 \n",
|
895 |
+
"8 a16c02e9-16a6-4780-b65e-51510b8db6fb 406.0 410.0 \n",
|
896 |
+
"9 b8a8f0ea-926e-4d89-b21d-e89e69a35c2f 394.0 397.0 \n",
|
897 |
+
"10 2703eecd-2bb6-432b-beaf-c92933736058 424.0 428.0 \n",
|
898 |
+
"11 3f08cff7-7220-4c5e-9d1f-1b8d4fc92028 405.0 409.0 \n",
|
899 |
+
"12 28370a93-4521-48d0-a8da-50545822debc 12.0 12.0 \n",
|
900 |
+
"13 d9613671-efc9-4a0e-9770-d2b8432d71de 455.0 460.0 \n",
|
901 |
+
"14 5913d181-6da7-4169-9187-9eb885d33e33 448.0 452.0 \n",
|
902 |
+
"15 1a1ef27f-0449-4664-8f6c-e929f089504a 431.0 435.0 \n",
|
903 |
+
"17 5892d1ea-8016-4e3e-9176-f6f6df833125 148.0 149.0 \n",
|
904 |
+
"18 e422911b-8c7b-4a1e-a4aa-df3d3f617c51 152.0 153.0 \n",
|
905 |
+
"19 2602d32c-2c40-4add-893c-abfc115b3c38 415.0 419.0 \n",
|
906 |
+
"20 b3a06b70-a367-4eef-a21e-788142390022 261.0 263.0 \n",
|
907 |
+
"21 64b5828e-b14d-49dc-8d88-f97de52f9202 10.0 10.0 \n",
|
908 |
+
"22 3bf200db-aeb9-4823-8923-6458b1f01d7e 263.0 266.0 \n",
|
909 |
+
"24 cdd8f310-f15e-44d7-bd06-3e91351e3eae 59.0 59.0 \n",
|
910 |
+
"25 59c9905e-6cfa-4544-8805-dfbd53e7ed29 454.0 459.0 \n",
|
911 |
+
"26 63f2c3e0-69a8-4a22-a356-7d0dd570dee1 422.0 426.0 \n",
|
912 |
+
"33 1f056aa1-5980-4f62-9bff-147a9b409fd9 423.0 427.0 \n",
|
913 |
+
"62 e4074c9a-c71c-4076-a22e-dce3f7e263c9 264.0 267.0 \n",
|
914 |
+
"\n",
|
915 |
+
" file_annotation \n",
|
916 |
+
"0 Z166069305_merged.txt \n",
|
917 |
+
"1 Z166069305_merged.txt \n",
|
918 |
+
"2 Z166069305_merged.txt \n",
|
919 |
+
"3 Z166069305_merged.txt \n",
|
920 |
+
"4 Z166069305_merged.txt \n",
|
921 |
+
"5 Z166069305_merged.txt \n",
|
922 |
+
"6 Z166069305_merged.txt \n",
|
923 |
+
"7 Z166069305_merged.txt \n",
|
924 |
+
"8 Z166069305_merged.txt \n",
|
925 |
+
"9 Z166069305_merged.txt \n",
|
926 |
+
"10 Z166069305_merged.txt \n",
|
927 |
+
"11 Z166069305_merged.txt \n",
|
928 |
+
"12 Z166069305_merged.txt \n",
|
929 |
+
"13 Z166069305_merged.txt \n",
|
930 |
+
"14 Z166069305_merged.txt \n",
|
931 |
+
"15 Z166069305_merged.txt \n",
|
932 |
+
"17 Z166069305_merged.txt \n",
|
933 |
+
"18 Z166069305_merged.txt \n",
|
934 |
+
"19 Z166069305_merged.txt \n",
|
935 |
+
"20 Z166069305_merged.txt \n",
|
936 |
+
"21 Z166069305_merged.txt \n",
|
937 |
+
"22 Z166069305_merged.txt \n",
|
938 |
+
"24 Z166069305_merged.txt \n",
|
939 |
+
"25 Z166069305_merged.txt \n",
|
940 |
+
"26 Z166069305_merged.txt \n",
|
941 |
+
"33 Z166069305_merged.txt \n",
|
942 |
+
"62 Z166069305_merged.txt "
|
943 |
+
]
|
944 |
+
},
|
945 |
+
"execution_count": 9,
|
946 |
+
"metadata": {},
|
947 |
+
"output_type": "execute_result"
|
948 |
+
}
|
949 |
+
],
|
950 |
+
"source": [
|
951 |
+
"# Filter rows where 'found_in_text_vector' is True --> True Positives\n",
|
952 |
+
"found = matches_df[matches_df['found_in_text_vector'] == True]\n",
|
953 |
+
"found_fil = found['id_annotation'].drop_duplicates(keep='first')\n",
|
954 |
+
"found_pages = found['page'].drop_duplicates(keep='first')\n",
|
955 |
+
"\n",
|
956 |
+
"# Extract the \"page\" value from each dictionary of unique pages found\n",
|
957 |
+
"unique_foundPages_list = found_pages.to_list()\n",
|
958 |
+
"\n",
|
959 |
+
"# Filter rows where the absolute difference between 'page_annotation' and 'page_text' is less than or equal to 4\n",
|
960 |
+
"#found = found[abs(found['page_annotation'] - found['page_text']) <= 6]\n",
|
961 |
+
"\n",
|
962 |
+
"print(f\"True Positives: Found {len(found_fil)} relevant mentions on {len(unique_foundPages_list)} pages of {len(unique_annot)} relevant mentions on {len(unique_pages_list)} pages\")\n",
|
963 |
+
"print(f\"({len(unique_foundPages_list)/len(unique_pages_list)*100:.2f}%).\")\n",
|
964 |
+
"print(f\"Found average of {len(found_fil)/len(found_pages):.2f} found mentions per page vs. GT of {len(unique_annot)/len(unique_pages_annot):.2f} annotations per page.\")\n",
|
965 |
+
"\n",
|
966 |
+
"found[['sentence_annotation', 'text_vector_x', 'rerank', 'id_annotation', 'page', 'page_annotation', 'file_annotation']]"
|
967 |
+
]
|
968 |
+
},
|
969 |
+
{
|
970 |
+
"cell_type": "code",
|
971 |
+
"execution_count": 10,
|
972 |
+
"id": "1e9beb8e",
|
973 |
+
"metadata": {},
|
974 |
+
"outputs": [
|
975 |
+
{
|
976 |
+
"name": "stdout",
|
977 |
+
"output_type": "stream",
|
978 |
+
"text": [
|
979 |
+
"False Negatives: Missed 21 mentions on 20 pages, of which 0 pages were retrieved with another vector.\n",
|
980 |
+
"(50.00%).\n"
|
981 |
+
]
|
982 |
+
},
|
983 |
+
{
|
984 |
+
"data": {
|
985 |
+
"text/html": [
|
986 |
+
"<div>\n",
|
987 |
+
"<style scoped>\n",
|
988 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
989 |
+
" vertical-align: middle;\n",
|
990 |
+
" }\n",
|
991 |
+
"\n",
|
992 |
+
" .dataframe tbody tr th {\n",
|
993 |
+
" vertical-align: top;\n",
|
994 |
+
" }\n",
|
995 |
+
"\n",
|
996 |
+
" .dataframe thead th {\n",
|
997 |
+
" text-align: right;\n",
|
998 |
+
" }\n",
|
999 |
+
"</style>\n",
|
1000 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
1001 |
+
" <thead>\n",
|
1002 |
+
" <tr style=\"text-align: right;\">\n",
|
1003 |
+
" <th></th>\n",
|
1004 |
+
" <th>sentence_annotation</th>\n",
|
1005 |
+
" <th>id_annotation</th>\n",
|
1006 |
+
" <th>page_annotation</th>\n",
|
1007 |
+
" <th>file_annotation</th>\n",
|
1008 |
+
" </tr>\n",
|
1009 |
+
" </thead>\n",
|
1010 |
+
" <tbody>\n",
|
1011 |
+
" <tr>\n",
|
1012 |
+
" <th>200</th>\n",
|
1013 |
+
" <td>('r. Wir giengen in geschloffenen Reihen und m...</td>\n",
|
1014 |
+
" <td>a406cecb-48dc-4b27-9e2e-e831301a59bc</td>\n",
|
1015 |
+
" <td>149.0</td>\n",
|
1016 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1017 |
+
" </tr>\n",
|
1018 |
+
" <tr>\n",
|
1019 |
+
" <th>201</th>\n",
|
1020 |
+
" <td>('s. Da die Wagen nicht gewohnlich sind, so be...</td>\n",
|
1021 |
+
" <td>53e38871-8353-4ea1-acc5-e60964d4b02d</td>\n",
|
1022 |
+
" <td>151.0</td>\n",
|
1023 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1024 |
+
" </tr>\n",
|
1025 |
+
" <tr>\n",
|
1026 |
+
" <th>202</th>\n",
|
1027 |
+
" <td>('t. Man tritt mit den Fusen allerhand Muschel...</td>\n",
|
1028 |
+
" <td>32cc068f-6a60-4040-aa4a-0575f48b5c52</td>\n",
|
1029 |
+
" <td>155.0</td>\n",
|
1030 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1031 |
+
" </tr>\n",
|
1032 |
+
" <tr>\n",
|
1033 |
+
" <th>203</th>\n",
|
1034 |
+
" <td>('r. Sie kennen kein ander Spaziergehen, --- 0...</td>\n",
|
1035 |
+
" <td>638cd5aa-72b7-46a0-bbf1-82ec0f884747</td>\n",
|
1036 |
+
" <td>186.0</td>\n",
|
1037 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1038 |
+
" </tr>\n",
|
1039 |
+
" <tr>\n",
|
1040 |
+
" <th>204</th>\n",
|
1041 |
+
" <td>('s. In dem Departement, wo ich wohne und in d...</td>\n",
|
1042 |
+
" <td>8438c431-385f-40f0-b167-51527c2c8e7e</td>\n",
|
1043 |
+
" <td>221.0</td>\n",
|
1044 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1045 |
+
" </tr>\n",
|
1046 |
+
" <tr>\n",
|
1047 |
+
" <th>205</th>\n",
|
1048 |
+
" <td>('t. Ich wuste, das er besucht zu werden uerdi...</td>\n",
|
1049 |
+
" <td>41bd83f1-06ec-4186-af2e-63dea256dfc8</td>\n",
|
1050 |
+
" <td>267.0</td>\n",
|
1051 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1052 |
+
" </tr>\n",
|
1053 |
+
" <tr>\n",
|
1054 |
+
" <th>206</th>\n",
|
1055 |
+
" <td>('n. Sie hatten in ihrem Hause ein Paar rothe ...</td>\n",
|
1056 |
+
" <td>e5ebef45-e36c-4742-9976-702dfad43fc5</td>\n",
|
1057 |
+
" <td>270.0</td>\n",
|
1058 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1059 |
+
" </tr>\n",
|
1060 |
+
" <tr>\n",
|
1061 |
+
" <th>207</th>\n",
|
1062 |
+
" <td>('e. Nach diesem Kaufe nahmen wir uon dem gute...</td>\n",
|
1063 |
+
" <td>a02a872b-8602-4ff4-93bd-f539ff93ada6</td>\n",
|
1064 |
+
" <td>276.0</td>\n",
|
1065 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1066 |
+
" </tr>\n",
|
1067 |
+
" <tr>\n",
|
1068 |
+
" <th>208</th>\n",
|
1069 |
+
" <td>('n. Der Viceconsul, rein Drogman und ein fran...</td>\n",
|
1070 |
+
" <td>a5b12bfb-3a78-46ac-8d62-73d5bf4425c9</td>\n",
|
1071 |
+
" <td>277.0</td>\n",
|
1072 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1073 |
+
" </tr>\n",
|
1074 |
+
" <tr>\n",
|
1075 |
+
" <th>209</th>\n",
|
1076 |
+
" <td>('n. Unsere Esel Fielen nieder, sanken in den ...</td>\n",
|
1077 |
+
" <td>ceed7937-1824-4222-ba6c-686b2b4d7738</td>\n",
|
1078 |
+
" <td>278.0</td>\n",
|
1079 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1080 |
+
" </tr>\n",
|
1081 |
+
" <tr>\n",
|
1082 |
+
" <th>210</th>\n",
|
1083 |
+
" <td>('tu Wir hatten Maulefel, womit man gewohnlich...</td>\n",
|
1084 |
+
" <td>94d76c3d-1115-4a4e-91b5-501f6e88de65</td>\n",
|
1085 |
+
" <td>386.0</td>\n",
|
1086 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1087 |
+
" </tr>\n",
|
1088 |
+
" <tr>\n",
|
1089 |
+
" <th>211</th>\n",
|
1090 |
+
" <td>('. Ich trug kein Bedenken, mich ihnen anzuuer...</td>\n",
|
1091 |
+
" <td>acc36476-9315-4751-9646-bba3a2231e07</td>\n",
|
1092 |
+
" <td>386.0</td>\n",
|
1093 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1094 |
+
" </tr>\n",
|
1095 |
+
" <tr>\n",
|
1096 |
+
" <th>212</th>\n",
|
1097 |
+
" <td>('e. Damit man nun nicht an meiner Furchtlosig...</td>\n",
|
1098 |
+
" <td>b9449bfd-c3bb-4b97-9c45-305b10b7aeca</td>\n",
|
1099 |
+
" <td>389.0</td>\n",
|
1100 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1101 |
+
" </tr>\n",
|
1102 |
+
" <tr>\n",
|
1103 |
+
" <th>213</th>\n",
|
1104 |
+
" <td>('). Bei meiner Rukkunft nach Abukir fand ich ...</td>\n",
|
1105 |
+
" <td>995c9029-3950-44cb-a7c7-e6f91a96ecf7</td>\n",
|
1106 |
+
" <td>391.0</td>\n",
|
1107 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1108 |
+
" </tr>\n",
|
1109 |
+
" <tr>\n",
|
1110 |
+
" <th>214</th>\n",
|
1111 |
+
" <td>('-- Plinius berichtet nach dem Xenophon, die ...</td>\n",
|
1112 |
+
" <td>fe020016-1d51-4a1f-bbc1-d4a65973bf67</td>\n",
|
1113 |
+
" <td>402.0</td>\n",
|
1114 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1115 |
+
" </tr>\n",
|
1116 |
+
" <tr>\n",
|
1117 |
+
" <th>215</th>\n",
|
1118 |
+
" <td>('g. Ehe wir noch an dieseru Ore anlangten, sa...</td>\n",
|
1119 |
+
" <td>4795c52d-1f6e-4de6-b87f-a5ae17735dad</td>\n",
|
1120 |
+
" <td>419.0</td>\n",
|
1121 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1122 |
+
" </tr>\n",
|
1123 |
+
" <tr>\n",
|
1124 |
+
" <th>216</th>\n",
|
1125 |
+
" <td>('ort. Wir kamen auf einen\\' bedekten Sand, de...</td>\n",
|
1126 |
+
" <td>92f4ee0e-d411-4c55-bda6-02b5f8aefce6</td>\n",
|
1127 |
+
" <td>433.0</td>\n",
|
1128 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1129 |
+
" </tr>\n",
|
1130 |
+
" <tr>\n",
|
1131 |
+
" <th>217</th>\n",
|
1132 |
+
" <td>('n. Ob fie gleich in starkem Galop auf uns zu...</td>\n",
|
1133 |
+
" <td>0c529bb3-6ce7-4a2f-8741-1aaa4f2205d4</td>\n",
|
1134 |
+
" <td>434.0</td>\n",
|
1135 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1136 |
+
" </tr>\n",
|
1137 |
+
" <tr>\n",
|
1138 |
+
" <th>218</th>\n",
|
1139 |
+
" <td>('li Wenn man den Arabern glaubt, die der Mein...</td>\n",
|
1140 |
+
" <td>9a71ef03-d718-402b-b7f9-14844f486a22</td>\n",
|
1141 |
+
" <td>452.0</td>\n",
|
1142 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1143 |
+
" </tr>\n",
|
1144 |
+
" <tr>\n",
|
1145 |
+
" <th>219</th>\n",
|
1146 |
+
" <td>('t, Ich hatte einen uon den Landleuten, die i...</td>\n",
|
1147 |
+
" <td>e6e36f7b-3376-4624-a48b-ff2d88ba6167</td>\n",
|
1148 |
+
" <td>460.0</td>\n",
|
1149 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1150 |
+
" </tr>\n",
|
1151 |
+
" <tr>\n",
|
1152 |
+
" <th>220</th>\n",
|
1153 |
+
" <td>('91 Ich uerlies endlich diesen. heltischer Au...</td>\n",
|
1154 |
+
" <td>39769078-50ac-4faf-b32a-e74fd7cc46b0</td>\n",
|
1155 |
+
" <td>462.0</td>\n",
|
1156 |
+
" <td>Z166069305_merged.txt</td>\n",
|
1157 |
+
" </tr>\n",
|
1158 |
+
" </tbody>\n",
|
1159 |
+
"</table>\n",
|
1160 |
+
"</div>"
|
1161 |
+
],
|
1162 |
+
"text/plain": [
|
1163 |
+
" sentence_annotation \\\n",
|
1164 |
+
"200 ('r. Wir giengen in geschloffenen Reihen und m... \n",
|
1165 |
+
"201 ('s. Da die Wagen nicht gewohnlich sind, so be... \n",
|
1166 |
+
"202 ('t. Man tritt mit den Fusen allerhand Muschel... \n",
|
1167 |
+
"203 ('r. Sie kennen kein ander Spaziergehen, --- 0... \n",
|
1168 |
+
"204 ('s. In dem Departement, wo ich wohne und in d... \n",
|
1169 |
+
"205 ('t. Ich wuste, das er besucht zu werden uerdi... \n",
|
1170 |
+
"206 ('n. Sie hatten in ihrem Hause ein Paar rothe ... \n",
|
1171 |
+
"207 ('e. Nach diesem Kaufe nahmen wir uon dem gute... \n",
|
1172 |
+
"208 ('n. Der Viceconsul, rein Drogman und ein fran... \n",
|
1173 |
+
"209 ('n. Unsere Esel Fielen nieder, sanken in den ... \n",
|
1174 |
+
"210 ('tu Wir hatten Maulefel, womit man gewohnlich... \n",
|
1175 |
+
"211 ('. Ich trug kein Bedenken, mich ihnen anzuuer... \n",
|
1176 |
+
"212 ('e. Damit man nun nicht an meiner Furchtlosig... \n",
|
1177 |
+
"213 ('). Bei meiner Rukkunft nach Abukir fand ich ... \n",
|
1178 |
+
"214 ('-- Plinius berichtet nach dem Xenophon, die ... \n",
|
1179 |
+
"215 ('g. Ehe wir noch an dieseru Ore anlangten, sa... \n",
|
1180 |
+
"216 ('ort. Wir kamen auf einen\\' bedekten Sand, de... \n",
|
1181 |
+
"217 ('n. Ob fie gleich in starkem Galop auf uns zu... \n",
|
1182 |
+
"218 ('li Wenn man den Arabern glaubt, die der Mein... \n",
|
1183 |
+
"219 ('t, Ich hatte einen uon den Landleuten, die i... \n",
|
1184 |
+
"220 ('91 Ich uerlies endlich diesen. heltischer Au... \n",
|
1185 |
+
"\n",
|
1186 |
+
" id_annotation page_annotation \\\n",
|
1187 |
+
"200 a406cecb-48dc-4b27-9e2e-e831301a59bc 149.0 \n",
|
1188 |
+
"201 53e38871-8353-4ea1-acc5-e60964d4b02d 151.0 \n",
|
1189 |
+
"202 32cc068f-6a60-4040-aa4a-0575f48b5c52 155.0 \n",
|
1190 |
+
"203 638cd5aa-72b7-46a0-bbf1-82ec0f884747 186.0 \n",
|
1191 |
+
"204 8438c431-385f-40f0-b167-51527c2c8e7e 221.0 \n",
|
1192 |
+
"205 41bd83f1-06ec-4186-af2e-63dea256dfc8 267.0 \n",
|
1193 |
+
"206 e5ebef45-e36c-4742-9976-702dfad43fc5 270.0 \n",
|
1194 |
+
"207 a02a872b-8602-4ff4-93bd-f539ff93ada6 276.0 \n",
|
1195 |
+
"208 a5b12bfb-3a78-46ac-8d62-73d5bf4425c9 277.0 \n",
|
1196 |
+
"209 ceed7937-1824-4222-ba6c-686b2b4d7738 278.0 \n",
|
1197 |
+
"210 94d76c3d-1115-4a4e-91b5-501f6e88de65 386.0 \n",
|
1198 |
+
"211 acc36476-9315-4751-9646-bba3a2231e07 386.0 \n",
|
1199 |
+
"212 b9449bfd-c3bb-4b97-9c45-305b10b7aeca 389.0 \n",
|
1200 |
+
"213 995c9029-3950-44cb-a7c7-e6f91a96ecf7 391.0 \n",
|
1201 |
+
"214 fe020016-1d51-4a1f-bbc1-d4a65973bf67 402.0 \n",
|
1202 |
+
"215 4795c52d-1f6e-4de6-b87f-a5ae17735dad 419.0 \n",
|
1203 |
+
"216 92f4ee0e-d411-4c55-bda6-02b5f8aefce6 433.0 \n",
|
1204 |
+
"217 0c529bb3-6ce7-4a2f-8741-1aaa4f2205d4 434.0 \n",
|
1205 |
+
"218 9a71ef03-d718-402b-b7f9-14844f486a22 452.0 \n",
|
1206 |
+
"219 e6e36f7b-3376-4624-a48b-ff2d88ba6167 460.0 \n",
|
1207 |
+
"220 39769078-50ac-4faf-b32a-e74fd7cc46b0 462.0 \n",
|
1208 |
+
"\n",
|
1209 |
+
" file_annotation \n",
|
1210 |
+
"200 Z166069305_merged.txt \n",
|
1211 |
+
"201 Z166069305_merged.txt \n",
|
1212 |
+
"202 Z166069305_merged.txt \n",
|
1213 |
+
"203 Z166069305_merged.txt \n",
|
1214 |
+
"204 Z166069305_merged.txt \n",
|
1215 |
+
"205 Z166069305_merged.txt \n",
|
1216 |
+
"206 Z166069305_merged.txt \n",
|
1217 |
+
"207 Z166069305_merged.txt \n",
|
1218 |
+
"208 Z166069305_merged.txt \n",
|
1219 |
+
"209 Z166069305_merged.txt \n",
|
1220 |
+
"210 Z166069305_merged.txt \n",
|
1221 |
+
"211 Z166069305_merged.txt \n",
|
1222 |
+
"212 Z166069305_merged.txt \n",
|
1223 |
+
"213 Z166069305_merged.txt \n",
|
1224 |
+
"214 Z166069305_merged.txt \n",
|
1225 |
+
"215 Z166069305_merged.txt \n",
|
1226 |
+
"216 Z166069305_merged.txt \n",
|
1227 |
+
"217 Z166069305_merged.txt \n",
|
1228 |
+
"218 Z166069305_merged.txt \n",
|
1229 |
+
"219 Z166069305_merged.txt \n",
|
1230 |
+
"220 Z166069305_merged.txt "
|
1231 |
+
]
|
1232 |
+
},
|
1233 |
+
"execution_count": 10,
|
1234 |
+
"metadata": {},
|
1235 |
+
"output_type": "execute_result"
|
1236 |
+
}
|
1237 |
+
],
|
1238 |
+
"source": [
|
1239 |
+
"# Filter rows where 'found_in_text_vector' is False --> False Negatives\n",
|
1240 |
+
"missed = matches_df[matches_df['found_in_text_vector'] == False]\n",
|
1241 |
+
"missed_fil = missed['id_annotation'].drop_duplicates(keep='first')\n",
|
1242 |
+
"missed_pages = missed['page_annotation'].drop_duplicates(keep='first')\n",
|
1243 |
+
"\n",
|
1244 |
+
"# Filter out pages that are in unique pages found\n",
|
1245 |
+
"missed_pages_unique = missed_pages[~missed_pages.isin(unique_foundPages_list)]\n",
|
1246 |
+
"retrieved_pages_otherVec = missed_pages[missed_pages.isin(unique_foundPages_list)]\n",
|
1247 |
+
"\n",
|
1248 |
+
"print(f\"False Negatives: Missed {len(missed_fil)} mentions on {len(missed_pages_unique)} pages, of which {len(retrieved_pages_otherVec)} pages were retrieved with another vector.\")\n",
|
1249 |
+
"print(f\"({(len(missed_pages_unique))/len(unique_pages_list)*100:.2f}%).\")\n",
|
1250 |
+
"\n",
|
1251 |
+
"missed[['sentence_annotation', 'id_annotation', 'page_annotation', 'file_annotation']]"
|
1252 |
+
]
|
1253 |
+
},
|
1254 |
+
{
|
1255 |
+
"cell_type": "code",
|
1256 |
+
"execution_count": 11,
|
1257 |
+
"id": "e7753ea4",
|
1258 |
+
"metadata": {},
|
1259 |
+
"outputs": [
|
1260 |
+
{
|
1261 |
+
"name": "stdout",
|
1262 |
+
"output_type": "stream",
|
1263 |
+
"text": [
|
1264 |
+
"Retrieved 173 vectors on 173 pages of which 11 pages do contain annotations (correct pages, but other vector retrieved).\n",
|
1265 |
+
"and of which 162 pages do not contain annotations (= False Positives)\n",
|
1266 |
+
"(405.00)\n",
|
1267 |
+
"and of which 0 pages were retrieved with other vectors (= True Negatives).\n"
|
1268 |
+
]
|
1269 |
+
},
|
1270 |
+
{
|
1271 |
+
"data": {
|
1272 |
+
"text/html": [
|
1273 |
+
"<div>\n",
|
1274 |
+
"<style scoped>\n",
|
1275 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
1276 |
+
" vertical-align: middle;\n",
|
1277 |
+
" }\n",
|
1278 |
+
"\n",
|
1279 |
+
" .dataframe tbody tr th {\n",
|
1280 |
+
" vertical-align: top;\n",
|
1281 |
+
" }\n",
|
1282 |
+
"\n",
|
1283 |
+
" .dataframe thead th {\n",
|
1284 |
+
" text-align: right;\n",
|
1285 |
+
" }\n",
|
1286 |
+
"</style>\n",
|
1287 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
1288 |
+
" <thead>\n",
|
1289 |
+
" <tr style=\"text-align: right;\">\n",
|
1290 |
+
" <th></th>\n",
|
1291 |
+
" <th>text_vector_x</th>\n",
|
1292 |
+
" <th>rerank</th>\n",
|
1293 |
+
" <th>id_text_vector</th>\n",
|
1294 |
+
" <th>page</th>\n",
|
1295 |
+
" <th>page_annotation</th>\n",
|
1296 |
+
" <th>file_annotation</th>\n",
|
1297 |
+
" </tr>\n",
|
1298 |
+
" </thead>\n",
|
1299 |
+
" <tbody>\n",
|
1300 |
+
" <tr>\n",
|
1301 |
+
" <th>16</th>\n",
|
1302 |
+
" <td>Unter einer Regierung, die nur die Absicht bat...</td>\n",
|
1303 |
+
" <td>17.0</td>\n",
|
1304 |
+
" <td>9ee8949a-e0cc-4334-bc36-b71becd2d5c7</td>\n",
|
1305 |
+
" <td>400.0</td>\n",
|
1306 |
+
" <td>NaN</td>\n",
|
1307 |
+
" <td>NaN</td>\n",
|
1308 |
+
" </tr>\n",
|
1309 |
+
" <tr>\n",
|
1310 |
+
" <th>23</th>\n",
|
1311 |
+
" <td>feiner Neise 1. th.).</td>\n",
|
1312 |
+
" <td>24.0</td>\n",
|
1313 |
+
" <td>39e2e422-b7c3-40c1-90ee-12718c603261</td>\n",
|
1314 |
+
" <td>393.0</td>\n",
|
1315 |
+
" <td>NaN</td>\n",
|
1316 |
+
" <td>NaN</td>\n",
|
1317 |
+
" </tr>\n",
|
1318 |
+
" <tr>\n",
|
1319 |
+
" <th>27</th>\n",
|
1320 |
+
" <td>**) Ursprung der Gefese 2. Bd.</td>\n",
|
1321 |
+
" <td>28.0</td>\n",
|
1322 |
+
" <td>4ff5ac89-e2f3-468f-bad6-712ecce32e54</td>\n",
|
1323 |
+
" <td>170.0</td>\n",
|
1324 |
+
" <td>NaN</td>\n",
|
1325 |
+
" <td>NaN</td>\n",
|
1326 |
+
" </tr>\n",
|
1327 |
+
" <tr>\n",
|
1328 |
+
" <th>28</th>\n",
|
1329 |
+
" <td>Von frih morgens an, bis auf den Abend, hat ma...</td>\n",
|
1330 |
+
" <td>29.0</td>\n",
|
1331 |
+
" <td>d4357d62-9022-4b84-803b-e634b791a923</td>\n",
|
1332 |
+
" <td>182.0</td>\n",
|
1333 |
+
" <td>NaN</td>\n",
|
1334 |
+
" <td>NaN</td>\n",
|
1335 |
+
" </tr>\n",
|
1336 |
+
" <tr>\n",
|
1337 |
+
" <th>29</th>\n",
|
1338 |
+
" <td>Das Wasser, das * Strix passerina, Lin, Chevec...</td>\n",
|
1339 |
+
" <td>30.0</td>\n",
|
1340 |
+
" <td>7aaae2e6-1e90-46a2-9f05-fe28c7b2842b</td>\n",
|
1341 |
+
" <td>236.0</td>\n",
|
1342 |
+
" <td>NaN</td>\n",
|
1343 |
+
" <td>NaN</td>\n",
|
1344 |
+
" </tr>\n",
|
1345 |
+
" <tr>\n",
|
1346 |
+
" <th>...</th>\n",
|
1347 |
+
" <td>...</td>\n",
|
1348 |
+
" <td>...</td>\n",
|
1349 |
+
" <td>...</td>\n",
|
1350 |
+
" <td>...</td>\n",
|
1351 |
+
" <td>...</td>\n",
|
1352 |
+
" <td>...</td>\n",
|
1353 |
+
" </tr>\n",
|
1354 |
+
" <tr>\n",
|
1355 |
+
" <th>195</th>\n",
|
1356 |
+
" <td>fchichte uberhaupt, und jene des. Mungo insbes...</td>\n",
|
1357 |
+
" <td>196.0</td>\n",
|
1358 |
+
" <td>f8995776-d337-413e-bc6b-d54a1fb5ff86</td>\n",
|
1359 |
+
" <td>223.0</td>\n",
|
1360 |
+
" <td>NaN</td>\n",
|
1361 |
+
" <td>NaN</td>\n",
|
1362 |
+
" </tr>\n",
|
1363 |
+
" <tr>\n",
|
1364 |
+
" <th>196</th>\n",
|
1365 |
+
" <td>Ob sie gleich gesehen hatten, was seit Morgens...</td>\n",
|
1366 |
+
" <td>197.0</td>\n",
|
1367 |
+
" <td>3315a574-24e7-4a73-b660-bd151a27956b</td>\n",
|
1368 |
+
" <td>437.0</td>\n",
|
1369 |
+
" <td>NaN</td>\n",
|
1370 |
+
" <td>NaN</td>\n",
|
1371 |
+
" </tr>\n",
|
1372 |
+
" <tr>\n",
|
1373 |
+
" <th>197</th>\n",
|
1374 |
+
" <td>Die fehr oftern Uncinigkeiten zwischen den Beh...</td>\n",
|
1375 |
+
" <td>198.0</td>\n",
|
1376 |
+
" <td>bd45fa07-bd6a-46f3-9ba9-516d48d55fb5</td>\n",
|
1377 |
+
" <td>161.0</td>\n",
|
1378 |
+
" <td>NaN</td>\n",
|
1379 |
+
" <td>NaN</td>\n",
|
1380 |
+
" </tr>\n",
|
1381 |
+
" <tr>\n",
|
1382 |
+
" <th>198</th>\n",
|
1383 |
+
" <td>Es ist bemerkenswerth, das man diesen unruhige...</td>\n",
|
1384 |
+
" <td>199.0</td>\n",
|
1385 |
+
" <td>736f2f9e-85b2-470d-a94e-f34f64db4189</td>\n",
|
1386 |
+
" <td>89.0</td>\n",
|
1387 |
+
" <td>NaN</td>\n",
|
1388 |
+
" <td>NaN</td>\n",
|
1389 |
+
" </tr>\n",
|
1390 |
+
" <tr>\n",
|
1391 |
+
" <th>199</th>\n",
|
1392 |
+
" <td>M. 24 und 26. Wit find Flein auf Erden und Flu...</td>\n",
|
1393 |
+
" <td>200.0</td>\n",
|
1394 |
+
" <td>ebb43751-cbfe-40d5-9056-95e68b9102a6</td>\n",
|
1395 |
+
" <td>126.0</td>\n",
|
1396 |
+
" <td>NaN</td>\n",
|
1397 |
+
" <td>NaN</td>\n",
|
1398 |
+
" </tr>\n",
|
1399 |
+
" </tbody>\n",
|
1400 |
+
"</table>\n",
|
1401 |
+
"<p>173 rows × 6 columns</p>\n",
|
1402 |
+
"</div>"
|
1403 |
+
],
|
1404 |
+
"text/plain": [
|
1405 |
+
" text_vector_x rerank \\\n",
|
1406 |
+
"16 Unter einer Regierung, die nur die Absicht bat... 17.0 \n",
|
1407 |
+
"23 feiner Neise 1. th.). 24.0 \n",
|
1408 |
+
"27 **) Ursprung der Gefese 2. Bd. 28.0 \n",
|
1409 |
+
"28 Von frih morgens an, bis auf den Abend, hat ma... 29.0 \n",
|
1410 |
+
"29 Das Wasser, das * Strix passerina, Lin, Chevec... 30.0 \n",
|
1411 |
+
".. ... ... \n",
|
1412 |
+
"195 fchichte uberhaupt, und jene des. Mungo insbes... 196.0 \n",
|
1413 |
+
"196 Ob sie gleich gesehen hatten, was seit Morgens... 197.0 \n",
|
1414 |
+
"197 Die fehr oftern Uncinigkeiten zwischen den Beh... 198.0 \n",
|
1415 |
+
"198 Es ist bemerkenswerth, das man diesen unruhige... 199.0 \n",
|
1416 |
+
"199 M. 24 und 26. Wit find Flein auf Erden und Flu... 200.0 \n",
|
1417 |
+
"\n",
|
1418 |
+
" id_text_vector page page_annotation \\\n",
|
1419 |
+
"16 9ee8949a-e0cc-4334-bc36-b71becd2d5c7 400.0 NaN \n",
|
1420 |
+
"23 39e2e422-b7c3-40c1-90ee-12718c603261 393.0 NaN \n",
|
1421 |
+
"27 4ff5ac89-e2f3-468f-bad6-712ecce32e54 170.0 NaN \n",
|
1422 |
+
"28 d4357d62-9022-4b84-803b-e634b791a923 182.0 NaN \n",
|
1423 |
+
"29 7aaae2e6-1e90-46a2-9f05-fe28c7b2842b 236.0 NaN \n",
|
1424 |
+
".. ... ... ... \n",
|
1425 |
+
"195 f8995776-d337-413e-bc6b-d54a1fb5ff86 223.0 NaN \n",
|
1426 |
+
"196 3315a574-24e7-4a73-b660-bd151a27956b 437.0 NaN \n",
|
1427 |
+
"197 bd45fa07-bd6a-46f3-9ba9-516d48d55fb5 161.0 NaN \n",
|
1428 |
+
"198 736f2f9e-85b2-470d-a94e-f34f64db4189 89.0 NaN \n",
|
1429 |
+
"199 ebb43751-cbfe-40d5-9056-95e68b9102a6 126.0 NaN \n",
|
1430 |
+
"\n",
|
1431 |
+
" file_annotation \n",
|
1432 |
+
"16 NaN \n",
|
1433 |
+
"23 NaN \n",
|
1434 |
+
"27 NaN \n",
|
1435 |
+
"28 NaN \n",
|
1436 |
+
"29 NaN \n",
|
1437 |
+
".. ... \n",
|
1438 |
+
"195 NaN \n",
|
1439 |
+
"196 NaN \n",
|
1440 |
+
"197 NaN \n",
|
1441 |
+
"198 NaN \n",
|
1442 |
+
"199 NaN \n",
|
1443 |
+
"\n",
|
1444 |
+
"[173 rows x 6 columns]"
|
1445 |
+
]
|
1446 |
+
},
|
1447 |
+
"execution_count": 11,
|
1448 |
+
"metadata": {},
|
1449 |
+
"output_type": "execute_result"
|
1450 |
+
}
|
1451 |
+
],
|
1452 |
+
"source": [
|
1453 |
+
"# Filter rows where 'found_in_text_vector' is NaN --> False Positives\n",
|
1454 |
+
"FP = matches_df[matches_df['found_in_text_vector'].isna()]\n",
|
1455 |
+
"fP_pages = FP['page'].drop_duplicates(keep='first')\n",
|
1456 |
+
"\n",
|
1457 |
+
"# Extract the \"page\" value from each dictionary of unique pages found\n",
|
1458 |
+
"unique_FPPages_list = fP_pages.to_list()\n",
|
1459 |
+
"\n",
|
1460 |
+
"# Filter out pages that are in unique pages found\n",
|
1461 |
+
"ret_pages_Annot = fP_pages[fP_pages.isin(unique_pages_list)]\n",
|
1462 |
+
"ret_pages_noAnnot = fP_pages[~fP_pages.isin(unique_pages_list)]\n",
|
1463 |
+
"retrieved_otherVec = fP_pages[fP_pages.isin(unique_foundPages_list)]\n",
|
1464 |
+
"\n",
|
1465 |
+
"print(f\"Retrieved {len(FP)} vectors on {len(unique_FPPages_list)} pages of which {len(ret_pages_Annot)} pages do contain annotations (correct pages, but other vector retrieved).\")\n",
|
1466 |
+
"print(f\"and of which {len(ret_pages_noAnnot)} pages do not contain annotations (= False Positives)\")\n",
|
1467 |
+
"print(f\"({len(ret_pages_noAnnot)/len(unique_pages_list)*100:.2f})\")\n",
|
1468 |
+
"print(f\"and of which {len(retrieved_otherVec)} pages were retrieved with other vectors (= True Negatives).\")\n",
|
1469 |
+
"FP[['text_vector_x', 'rerank', 'id_text_vector', 'page', 'page_annotation', 'file_annotation']]"
|
1470 |
+
]
|
1471 |
+
},
|
1472 |
+
{
|
1473 |
+
"cell_type": "code",
|
1474 |
+
"execution_count": null,
|
1475 |
+
"id": "2bf572ee",
|
1476 |
+
"metadata": {},
|
1477 |
+
"outputs": [],
|
1478 |
+
"source": []
|
1479 |
+
}
|
1480 |
+
],
|
1481 |
+
"metadata": {
|
1482 |
+
"kernelspec": {
|
1483 |
+
"display_name": "Python 3 (ipykernel)",
|
1484 |
+
"language": "python",
|
1485 |
+
"name": "python3"
|
1486 |
+
},
|
1487 |
+
"language_info": {
|
1488 |
+
"codemirror_mode": {
|
1489 |
+
"name": "ipython",
|
1490 |
+
"version": 3
|
1491 |
+
},
|
1492 |
+
"file_extension": ".py",
|
1493 |
+
"mimetype": "text/x-python",
|
1494 |
+
"name": "python",
|
1495 |
+
"nbconvert_exporter": "python",
|
1496 |
+
"pygments_lexer": "ipython3",
|
1497 |
+
"version": "3.9.12"
|
1498 |
+
},
|
1499 |
+
"toc": {
|
1500 |
+
"base_numbering": 1,
|
1501 |
+
"nav_menu": {},
|
1502 |
+
"number_sections": true,
|
1503 |
+
"sideBar": true,
|
1504 |
+
"skip_h1_title": false,
|
1505 |
+
"title_cell": "Table of Contents",
|
1506 |
+
"title_sidebar": "Contents",
|
1507 |
+
"toc_cell": false,
|
1508 |
+
"toc_position": {},
|
1509 |
+
"toc_section_display": true,
|
1510 |
+
"toc_window_display": false
|
1511 |
+
}
|
1512 |
+
},
|
1513 |
+
"nbformat": 4,
|
1514 |
+
"nbformat_minor": 5
|
1515 |
+
}
|
src/analysis/query_index.ipynb
ADDED
@@ -0,0 +1,1685 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"id": "73ef8baa",
|
6 |
+
"metadata": {},
|
7 |
+
"source": [
|
8 |
+
"# Query Marqo Index"
|
9 |
+
]
|
10 |
+
},
|
11 |
+
{
|
12 |
+
"cell_type": "code",
|
13 |
+
"execution_count": 1,
|
14 |
+
"id": "dee4e8d3",
|
15 |
+
"metadata": {
|
16 |
+
"code_folding": []
|
17 |
+
},
|
18 |
+
"outputs": [],
|
19 |
+
"source": [
|
20 |
+
"## Import packages\n",
|
21 |
+
"import marqo as mq\n",
|
22 |
+
"import pandas as pd\n",
|
23 |
+
"import ipywidgets as widgets\n",
|
24 |
+
"from IPython.display import display, HTML\n",
|
25 |
+
"from IPython.core.display import Javascript\n",
|
26 |
+
"from pprint import pprint"
|
27 |
+
]
|
28 |
+
},
|
29 |
+
{
|
30 |
+
"cell_type": "code",
|
31 |
+
"execution_count": 2,
|
32 |
+
"id": "e80297f9",
|
33 |
+
"metadata": {
|
34 |
+
"code_folding": []
|
35 |
+
},
|
36 |
+
"outputs": [
|
37 |
+
{
|
38 |
+
"name": "stdout",
|
39 |
+
"output_type": "stream",
|
40 |
+
"text": [
|
41 |
+
"Connected to onit-sonnini-DHd2025-prep.\n"
|
42 |
+
]
|
43 |
+
}
|
44 |
+
],
|
45 |
+
"source": [
|
46 |
+
"## Connect to Marqo\n",
|
47 |
+
"\n",
|
48 |
+
"MARQO_URL = \"http://your.ip:port\"\n",
|
49 |
+
"marqoClient = mq.Client(url=MARQO_URL)\n",
|
50 |
+
"#pprint(marqoClient.get_indexes())\n",
|
51 |
+
"\n",
|
52 |
+
"## DHd 2025 ##\n",
|
53 |
+
"indexName = \"onit-sonnini-DHd2025-prep\" ## index with LLM-corrected texts as tensor field\n",
|
54 |
+
"#indexName = \"onit-sonnini-DHd2025-clean\" ## index with cleaned texts as tensor field\n",
|
55 |
+
"print(f'Connected to {indexName}.')\n",
|
56 |
+
"\n",
|
57 |
+
"# Load corpus data\n",
|
58 |
+
"bc_corpus = pd.read_csv(\"data/ONiT_barcodes_ALL_metadata_ONB_status_2024-05-23.csv\")\n",
|
59 |
+
"bc_corpus = bc_corpus.drop_duplicates(subset='barcode', keep='last')"
|
60 |
+
]
|
61 |
+
},
|
62 |
+
{
|
63 |
+
"cell_type": "code",
|
64 |
+
"execution_count": 3,
|
65 |
+
"id": "353081b8",
|
66 |
+
"metadata": {},
|
67 |
+
"outputs": [
|
68 |
+
{
|
69 |
+
"data": {
|
70 |
+
"text/plain": [
|
71 |
+
"{'message': 'Welcome to Marqo', 'version': '2.5.1'}"
|
72 |
+
]
|
73 |
+
},
|
74 |
+
"execution_count": 3,
|
75 |
+
"metadata": {},
|
76 |
+
"output_type": "execute_result"
|
77 |
+
}
|
78 |
+
],
|
79 |
+
"source": [
|
80 |
+
"marqoClient.index(indexName).get_marqo()"
|
81 |
+
]
|
82 |
+
},
|
83 |
+
{
|
84 |
+
"cell_type": "code",
|
85 |
+
"execution_count": 4,
|
86 |
+
"id": "c9ceb6de",
|
87 |
+
"metadata": {
|
88 |
+
"code_folding": []
|
89 |
+
},
|
90 |
+
"outputs": [
|
91 |
+
{
|
92 |
+
"data": {
|
93 |
+
"text/html": [
|
94 |
+
"<div>\n",
|
95 |
+
"<style scoped>\n",
|
96 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
97 |
+
" vertical-align: middle;\n",
|
98 |
+
" }\n",
|
99 |
+
"\n",
|
100 |
+
" .dataframe tbody tr th {\n",
|
101 |
+
" vertical-align: top;\n",
|
102 |
+
" }\n",
|
103 |
+
"\n",
|
104 |
+
" .dataframe thead th {\n",
|
105 |
+
" text-align: right;\n",
|
106 |
+
" }\n",
|
107 |
+
"</style>\n",
|
108 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
109 |
+
" <thead>\n",
|
110 |
+
" <tr style=\"text-align: right;\">\n",
|
111 |
+
" <th></th>\n",
|
112 |
+
" <th>barcode</th>\n",
|
113 |
+
" <th>page</th>\n",
|
114 |
+
" <th>iiif_link</th>\n",
|
115 |
+
" <th>text_clean</th>\n",
|
116 |
+
" <th>text_orig</th>\n",
|
117 |
+
" <th>text_prep</th>\n",
|
118 |
+
" </tr>\n",
|
119 |
+
" </thead>\n",
|
120 |
+
" <tbody>\n",
|
121 |
+
" <tr>\n",
|
122 |
+
" <th>0</th>\n",
|
123 |
+
" <td>Z166069305</td>\n",
|
124 |
+
" <td>5</td>\n",
|
125 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
126 |
+
" <td>C. S.' Sonnin is,\\nehemaligen Dffiziers uub In...</td>\n",
|
127 |
+
" <td>!\\n\\nC. S.' Sonnin i’s,\\n\\n;\\nehemaligen Dffiz...</td>\n",
|
128 |
+
" <td>C. S.' Sonnini's,\\nehemaligen Offiziers und In...</td>\n",
|
129 |
+
" </tr>\n",
|
130 |
+
" <tr>\n",
|
131 |
+
" <th>1</th>\n",
|
132 |
+
" <td>Z166069305</td>\n",
|
133 |
+
" <td>6</td>\n",
|
134 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
135 |
+
" <td>2125 murid\\ngobiothers\\nconale\\nKOENISE\\nKAISE...</td>\n",
|
136 |
+
" <td>2125 murid\\n\\ngobiothers\\n\\nconale\\n\\nܪܝ\\n\\n، ...</td>\n",
|
137 |
+
" <td>2255 Murdoch \\nGouverneurs\\nComte\\nKOENIGSE\\nK...</td>\n",
|
138 |
+
" </tr>\n",
|
139 |
+
" <tr>\n",
|
140 |
+
" <th>2</th>\n",
|
141 |
+
" <td>Z166069305</td>\n",
|
142 |
+
" <td>7</td>\n",
|
143 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
144 |
+
" <td>V o r re o e\\nDe 6 u i berpe Bet,$.\\nundteichl...</td>\n",
|
145 |
+
" <td>V o r re o e\\nDė 6 u i berpe Bét,$.\\n\\nundteic...</td>\n",
|
146 |
+
" <td>Vorrede.\\n\\nDeutschland hat in betreff seiner ...</td>\n",
|
147 |
+
" </tr>\n",
|
148 |
+
" <tr>\n",
|
149 |
+
" <th>3</th>\n",
|
150 |
+
" <td>Z166069305</td>\n",
|
151 |
+
" <td>8</td>\n",
|
152 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
153 |
+
" <td>Welt auf fich gezogen haben. Viele feiner Denk...</td>\n",
|
154 |
+
" <td>IV\\n\\n.\\n\\nWelt auf fich gezogen haben. Viele ...</td>\n",
|
155 |
+
" <td>Welt auf sich gezogen haben. Viele seiner Denk...</td>\n",
|
156 |
+
" </tr>\n",
|
157 |
+
" <tr>\n",
|
158 |
+
" <th>4</th>\n",
|
159 |
+
" <td>Z166069305</td>\n",
|
160 |
+
" <td>9</td>\n",
|
161 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
162 |
+
" <td>und auslandische Sklaven genossen die Vortheil...</td>\n",
|
163 |
+
" <td>)\\n\\n-\\nI\\n\\nI\\n\\n1\\n\\n11\\n\\nr.\\n\\nund ausländ...</td>\n",
|
164 |
+
" <td>Und ausländische Sklaven genossen die Vortheil...</td>\n",
|
165 |
+
" </tr>\n",
|
166 |
+
" <tr>\n",
|
167 |
+
" <th>...</th>\n",
|
168 |
+
" <td>...</td>\n",
|
169 |
+
" <td>...</td>\n",
|
170 |
+
" <td>...</td>\n",
|
171 |
+
" <td>...</td>\n",
|
172 |
+
" <td>...</td>\n",
|
173 |
+
" <td>...</td>\n",
|
174 |
+
" </tr>\n",
|
175 |
+
" <tr>\n",
|
176 |
+
" <th>486</th>\n",
|
177 |
+
" <td>Z166069305</td>\n",
|
178 |
+
" <td>494</td>\n",
|
179 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
180 |
+
" <td><empty page></td>\n",
|
181 |
+
" <td>+\\n\\nè³½\\n\\n1\\n\\n\\n</td>\n",
|
182 |
+
" <td><empty page></td>\n",
|
183 |
+
" </tr>\n",
|
184 |
+
" <tr>\n",
|
185 |
+
" <th>487</th>\n",
|
186 |
+
" <td>Z166069305</td>\n",
|
187 |
+
" <td>495</td>\n",
|
188 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
189 |
+
" <td>Riedel fo\\nBedminenzelt</td>\n",
|
190 |
+
" <td>.7.\\n\\n.3.6\\n\\n>\\n\\n1 0\\n\\nRiedel fo\\n\\n(\\n\\n(...</td>\n",
|
191 |
+
" <td>Riedel von Eisenbach</td>\n",
|
192 |
+
" </tr>\n",
|
193 |
+
" <tr>\n",
|
194 |
+
" <th>488</th>\n",
|
195 |
+
" <td>Z166069305</td>\n",
|
196 |
+
" <td>499</td>\n",
|
197 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
198 |
+
" <td>Osterreichische Nationalbibliothek\\n+ Z166069305</td>\n",
|
199 |
+
" <td>Österreichische Nationalbibliothek\\n\\n+ Z1660...</td>\n",
|
200 |
+
" <td>Please provide the faulty OCR texts generated ...</td>\n",
|
201 |
+
" </tr>\n",
|
202 |
+
" <tr>\n",
|
203 |
+
" <th>489</th>\n",
|
204 |
+
" <td>Z166069305</td>\n",
|
205 |
+
" <td>503</td>\n",
|
206 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
207 |
+
" <td><empty page></td>\n",
|
208 |
+
" <td>{\\n \"status code\" : 404,\\n \"message\" : \"The ...</td>\n",
|
209 |
+
" <td><empty page></td>\n",
|
210 |
+
" </tr>\n",
|
211 |
+
" <tr>\n",
|
212 |
+
" <th>490</th>\n",
|
213 |
+
" <td>Z166069305</td>\n",
|
214 |
+
" <td>504</td>\n",
|
215 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
216 |
+
" <td><empty page></td>\n",
|
217 |
+
" <td>{\\n \"status code\" : 404,\\n \"message\" : \"The ...</td>\n",
|
218 |
+
" <td><empty page></td>\n",
|
219 |
+
" </tr>\n",
|
220 |
+
" </tbody>\n",
|
221 |
+
"</table>\n",
|
222 |
+
"<p>491 rows × 6 columns</p>\n",
|
223 |
+
"</div>"
|
224 |
+
],
|
225 |
+
"text/plain": [
|
226 |
+
" barcode page iiif_link \\\n",
|
227 |
+
"0 Z166069305 5 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
228 |
+
"1 Z166069305 6 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
229 |
+
"2 Z166069305 7 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
230 |
+
"3 Z166069305 8 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
231 |
+
"4 Z166069305 9 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
232 |
+
".. ... ... ... \n",
|
233 |
+
"486 Z166069305 494 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
234 |
+
"487 Z166069305 495 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
235 |
+
"488 Z166069305 499 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
236 |
+
"489 Z166069305 503 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
237 |
+
"490 Z166069305 504 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
238 |
+
"\n",
|
239 |
+
" text_clean \\\n",
|
240 |
+
"0 C. S.' Sonnin is,\\nehemaligen Dffiziers uub In... \n",
|
241 |
+
"1 2125 murid\\ngobiothers\\nconale\\nKOENISE\\nKAISE... \n",
|
242 |
+
"2 V o r re o e\\nDe 6 u i berpe Bet,$.\\nundteichl... \n",
|
243 |
+
"3 Welt auf fich gezogen haben. Viele feiner Denk... \n",
|
244 |
+
"4 und auslandische Sklaven genossen die Vortheil... \n",
|
245 |
+
".. ... \n",
|
246 |
+
"486 <empty page> \n",
|
247 |
+
"487 Riedel fo\\nBedminenzelt \n",
|
248 |
+
"488 Osterreichische Nationalbibliothek\\n+ Z166069305 \n",
|
249 |
+
"489 <empty page> \n",
|
250 |
+
"490 <empty page> \n",
|
251 |
+
"\n",
|
252 |
+
" text_orig \\\n",
|
253 |
+
"0 !\\n\\nC. S.' Sonnin i’s,\\n\\n;\\nehemaligen Dffiz... \n",
|
254 |
+
"1 2125 murid\\n\\ngobiothers\\n\\nconale\\n\\nܪܝ\\n\\n، ... \n",
|
255 |
+
"2 V o r re o e\\nDė 6 u i berpe Bét,$.\\n\\nundteic... \n",
|
256 |
+
"3 IV\\n\\n.\\n\\nWelt auf fich gezogen haben. Viele ... \n",
|
257 |
+
"4 )\\n\\n-\\nI\\n\\nI\\n\\n1\\n\\n11\\n\\nr.\\n\\nund ausländ... \n",
|
258 |
+
".. ... \n",
|
259 |
+
"486 +\\n\\nè³½\\n\\n1\\n\\n\\n \n",
|
260 |
+
"487 .7.\\n\\n.3.6\\n\\n>\\n\\n1 0\\n\\nRiedel fo\\n\\n(\\n\\n(... \n",
|
261 |
+
"488 Österreichische Nationalbibliothek\\n\\n+ Z1660... \n",
|
262 |
+
"489 {\\n \"status code\" : 404,\\n \"message\" : \"The ... \n",
|
263 |
+
"490 {\\n \"status code\" : 404,\\n \"message\" : \"The ... \n",
|
264 |
+
"\n",
|
265 |
+
" text_prep \n",
|
266 |
+
"0 C. S.' Sonnini's,\\nehemaligen Offiziers und In... \n",
|
267 |
+
"1 2255 Murdoch \\nGouverneurs\\nComte\\nKOENIGSE\\nK... \n",
|
268 |
+
"2 Vorrede.\\n\\nDeutschland hat in betreff seiner ... \n",
|
269 |
+
"3 Welt auf sich gezogen haben. Viele seiner Denk... \n",
|
270 |
+
"4 Und ausländische Sklaven genossen die Vortheil... \n",
|
271 |
+
".. ... \n",
|
272 |
+
"486 <empty page> \n",
|
273 |
+
"487 Riedel von Eisenbach \n",
|
274 |
+
"488 Please provide the faulty OCR texts generated ... \n",
|
275 |
+
"489 <empty page> \n",
|
276 |
+
"490 <empty page> \n",
|
277 |
+
"\n",
|
278 |
+
"[491 rows x 6 columns]"
|
279 |
+
]
|
280 |
+
},
|
281 |
+
"execution_count": 4,
|
282 |
+
"metadata": {},
|
283 |
+
"output_type": "execute_result"
|
284 |
+
}
|
285 |
+
],
|
286 |
+
"source": [
|
287 |
+
"# Preview data\n",
|
288 |
+
"import pandas as pd\n",
|
289 |
+
"\n",
|
290 |
+
"index_DHd = pd.read_csv(\"C:/onit_rag/data/DHd_index-cleaned.csv\")\n",
|
291 |
+
"index_DHd"
|
292 |
+
]
|
293 |
+
},
|
294 |
+
{
|
295 |
+
"cell_type": "code",
|
296 |
+
"execution_count": 14,
|
297 |
+
"id": "203aa5ea",
|
298 |
+
"metadata": {},
|
299 |
+
"outputs": [],
|
300 |
+
"source": [
|
301 |
+
"## Query parameter\n",
|
302 |
+
"\n",
|
303 |
+
"query = \"Pferd, Pferde\"\n",
|
304 |
+
"limit = 1000 # max limit = 1000\n",
|
305 |
+
"threshold = 0.7 # set threshold\n",
|
306 |
+
"filter_string = \"barcode:(Z166069305)\" # Sonnini Bd. 1"
|
307 |
+
]
|
308 |
+
},
|
309 |
+
{
|
310 |
+
"cell_type": "code",
|
311 |
+
"execution_count": 15,
|
312 |
+
"id": "693ec044",
|
313 |
+
"metadata": {
|
314 |
+
"code_folding": []
|
315 |
+
},
|
316 |
+
"outputs": [
|
317 |
+
{
|
318 |
+
"name": "stdout",
|
319 |
+
"output_type": "stream",
|
320 |
+
"text": [
|
321 |
+
"Output tensor search: 491\n",
|
322 |
+
"Output lexical search: 28\n",
|
323 |
+
"Output tensor search after filtering vectors < 5 tokens: 447\n",
|
324 |
+
"Output tensor search with threshold: 447\n"
|
325 |
+
]
|
326 |
+
}
|
327 |
+
],
|
328 |
+
"source": [
|
329 |
+
"## Query the index\n",
|
330 |
+
"\n",
|
331 |
+
"#results = marqoClient.index(indexName).search(q=query, limit=limit) # basic search\n",
|
332 |
+
"results_tensor = marqoClient.index(indexName).search(q=query, limit=limit, filter_string=filter_string) # tensor search\n",
|
333 |
+
"print('Output tensor search: ', len(results_tensor['hits']))\n",
|
334 |
+
"results_lexical = marqoClient.index(indexName).search(q=query, limit=limit, filter_string=filter_string, search_method=\"LEXICAL\") # keyword search (BM25)\n",
|
335 |
+
"print('Output lexical search: ', len(results_lexical['hits']))\n",
|
336 |
+
"\n",
|
337 |
+
"## Load into dataframes\n",
|
338 |
+
"\n",
|
339 |
+
"# results tensor search\n",
|
340 |
+
"output1 = pd.DataFrame(results_tensor[\"hits\"])\n",
|
341 |
+
"# Filter rows where _highlights are 5 tokens or more\n",
|
342 |
+
"output1 = output1[output1[\"_highlights\"].apply(lambda x: len(str(x).split()) >= 5)]\n",
|
343 |
+
"print('Output tensor search after filtering vectors < 5 tokens: ', len(output1))\n",
|
344 |
+
"### Filter rows where _score is >= threshold\n",
|
345 |
+
"output1 = output1[output1[\"_score\"] >= threshold]\n",
|
346 |
+
"print('Output tensor search with threshold: ', len(output1))\n",
|
347 |
+
"# Add corpus metadata\n",
|
348 |
+
"# Merge the two DataFrames on the 'barcode' column to add 'corpus'\n",
|
349 |
+
"output1 = output1.merge(bc_corpus, on='barcode', how='inner')\n",
|
350 |
+
"# Add index +1 as a new column called 'rank'\n",
|
351 |
+
"output1['rank'] = output1.index + 1\n",
|
352 |
+
"# Add document identifier\n",
|
353 |
+
"output1['document'] = output1.apply(lambda row: f\"{row['barcode']}_{row['page']}\", axis=1)\n",
|
354 |
+
"\n",
|
355 |
+
"# results keyword search\n",
|
356 |
+
"if len(results_lexical['hits']) == 0:\n",
|
357 |
+
" output2 = output1\n",
|
358 |
+
" print(\"No lexical search results! Using only tensor results.\")\n",
|
359 |
+
" \n",
|
360 |
+
"else:\n",
|
361 |
+
" output2 = pd.DataFrame(results_lexical[\"hits\"])\n",
|
362 |
+
" # Add corpus metadata\n",
|
363 |
+
" # Merge the two DataFrames on the 'barcode' column to add 'corpus'\n",
|
364 |
+
" output2 = output2.merge(bc_corpus, on='barcode', how='inner')\n",
|
365 |
+
" # Add index +1 as a new column called 'rank'\n",
|
366 |
+
" output2['rank'] = output2.index + 1#apply(lambda row: output2.index + 1, axis=1)\n",
|
367 |
+
" # Add document identifier\n",
|
368 |
+
" output2['document'] = output2.apply(lambda row: f\"{row['barcode']}_{row['page']}\", axis=1)"
|
369 |
+
]
|
370 |
+
},
|
371 |
+
{
|
372 |
+
"cell_type": "code",
|
373 |
+
"execution_count": 11,
|
374 |
+
"id": "545badce",
|
375 |
+
"metadata": {
|
376 |
+
"code_folding": [
|
377 |
+
0
|
378 |
+
]
|
379 |
+
},
|
380 |
+
"outputs": [
|
381 |
+
{
|
382 |
+
"data": {
|
383 |
+
"text/html": [
|
384 |
+
"<div>\n",
|
385 |
+
"<style scoped>\n",
|
386 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
387 |
+
" vertical-align: middle;\n",
|
388 |
+
" }\n",
|
389 |
+
"\n",
|
390 |
+
" .dataframe tbody tr th {\n",
|
391 |
+
" vertical-align: top;\n",
|
392 |
+
" }\n",
|
393 |
+
"\n",
|
394 |
+
" .dataframe thead th {\n",
|
395 |
+
" text-align: right;\n",
|
396 |
+
" }\n",
|
397 |
+
"</style>\n",
|
398 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
399 |
+
" <thead>\n",
|
400 |
+
" <tr style=\"text-align: right;\">\n",
|
401 |
+
" <th></th>\n",
|
402 |
+
" <th>document</th>\n",
|
403 |
+
" <th>rrf_score</th>\n",
|
404 |
+
" <th>barcode</th>\n",
|
405 |
+
" <th>page</th>\n",
|
406 |
+
" <th>iiif_link</th>\n",
|
407 |
+
" <th>text_orig</th>\n",
|
408 |
+
" <th>text_clean</th>\n",
|
409 |
+
" <th>text_prep</th>\n",
|
410 |
+
" <th>_id</th>\n",
|
411 |
+
" <th>_highlights</th>\n",
|
412 |
+
" <th>_score</th>\n",
|
413 |
+
" <th>rank</th>\n",
|
414 |
+
" <th>corpus</th>\n",
|
415 |
+
" <th>rerank</th>\n",
|
416 |
+
" </tr>\n",
|
417 |
+
" </thead>\n",
|
418 |
+
" <tbody>\n",
|
419 |
+
" <tr>\n",
|
420 |
+
" <th>0</th>\n",
|
421 |
+
" <td>Z166069305_430</td>\n",
|
422 |
+
" <td>0.032522</td>\n",
|
423 |
+
" <td>Z166069305</td>\n",
|
424 |
+
" <td>430</td>\n",
|
425 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
426 |
+
" <td>402 befand mich in einiger Entfernung davon, ...</td>\n",
|
427 |
+
" <td>befand mich in einiger Entfernung davon, und d...</td>\n",
|
428 |
+
" <td>befand mich in einiger Entfernung davon, und d...</td>\n",
|
429 |
+
" <td>7e2b21a1-4cdb-4b6a-b1c0-75c30bfe13bb</td>\n",
|
430 |
+
" <td>[{'text_prep': 'befand mich in einiger Entfern...</td>\n",
|
431 |
+
" <td>0.892140</td>\n",
|
432 |
+
" <td>1/2</td>\n",
|
433 |
+
" <td>D19</td>\n",
|
434 |
+
" <td>1</td>\n",
|
435 |
+
" </tr>\n",
|
436 |
+
" <tr>\n",
|
437 |
+
" <th>1</th>\n",
|
438 |
+
" <td>Z166069305_10</td>\n",
|
439 |
+
" <td>0.030118</td>\n",
|
440 |
+
" <td>Z166069305</td>\n",
|
441 |
+
" <td>10</td>\n",
|
442 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
443 |
+
" <td>VI große Flinten, die ihm ſeine Diener zur Se...</td>\n",
|
444 |
+
" <td>grose Flinten, die ihm seine Diener zur Seite ...</td>\n",
|
445 |
+
" <td>große Flinten, die ihm seine Diener zur Seite ...</td>\n",
|
446 |
+
" <td>ab0905d4-6ca7-4e94-8fb2-3d2081632d6d</td>\n",
|
447 |
+
" <td>[{'text_prep': 'Sein letztes Hulfritt trifft z...</td>\n",
|
448 |
+
" <td>0.872264</td>\n",
|
449 |
+
" <td>4/9</td>\n",
|
450 |
+
" <td>D19</td>\n",
|
451 |
+
" <td>2</td>\n",
|
452 |
+
" </tr>\n",
|
453 |
+
" <tr>\n",
|
454 |
+
" <th>2</th>\n",
|
455 |
+
" <td>Z166069305_399</td>\n",
|
456 |
+
" <td>0.029324</td>\n",
|
457 |
+
" <td>Z166069305</td>\n",
|
458 |
+
" <td>399</td>\n",
|
459 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
460 |
+
" <td>1 1 1 , 571 Plinius berichtet nach dem Xen...</td>\n",
|
461 |
+
" <td>Plinius berichtet nach dem Xenophon, die Camel...</td>\n",
|
462 |
+
" <td>Plinius berichtet nach dem Xenophon, die Camel...</td>\n",
|
463 |
+
" <td>feb1b47b-effe-4e9d-be0f-7a749fed5ec0</td>\n",
|
464 |
+
" <td>[{'text_prep': 'Ich muss hier auch bemerken, d...</td>\n",
|
465 |
+
" <td>0.865306</td>\n",
|
466 |
+
" <td>13/4</td>\n",
|
467 |
+
" <td>D19</td>\n",
|
468 |
+
" <td>3</td>\n",
|
469 |
+
" </tr>\n",
|
470 |
+
" <tr>\n",
|
471 |
+
" <th>3</th>\n",
|
472 |
+
" <td>Z166069305_415</td>\n",
|
473 |
+
" <td>0.028860</td>\n",
|
474 |
+
" <td>Z166069305</td>\n",
|
475 |
+
" <td>415</td>\n",
|
476 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
477 |
+
" <td>587 nen. Ich ritt auf fie los, aber Hufrein l...</td>\n",
|
478 |
+
" <td>nen. Ich ritt auf fie los, aber Hufrein lies m...</td>\n",
|
479 |
+
" <td>Ich ritt auf sie los, aber Hufrein lies mich r...</td>\n",
|
480 |
+
" <td>02bb750e-0993-4341-ab74-934d28fc523e</td>\n",
|
481 |
+
" <td>[{'text_prep': 'Ehe wir noch an dieser Ore anl...</td>\n",
|
482 |
+
" <td>0.864078</td>\n",
|
483 |
+
" <td>17/3</td>\n",
|
484 |
+
" <td>D19</td>\n",
|
485 |
+
" <td>4</td>\n",
|
486 |
+
" </tr>\n",
|
487 |
+
" <tr>\n",
|
488 |
+
" <th>4</th>\n",
|
489 |
+
" <td>Z166069305_220</td>\n",
|
490 |
+
" <td>0.028485</td>\n",
|
491 |
+
" <td>Z166069305</td>\n",
|
492 |
+
" <td>220</td>\n",
|
493 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
494 |
+
" <td>' 92 lungen und Bewegungen. Mit einer Phyſiog...</td>\n",
|
495 |
+
" <td>lungen und Bewegungen. Mit einer Physiognomie,...</td>\n",
|
496 |
+
" <td>Lungen und Bewegungen. Mit einer Physiognomie,...</td>\n",
|
497 |
+
" <td>7949fe4e-5534-4522-b151-35b57a733650</td>\n",
|
498 |
+
" <td>[{'text_prep': 'Die Haustiere finden nirgends ...</td>\n",
|
499 |
+
" <td>0.869331</td>\n",
|
500 |
+
" <td>6/15</td>\n",
|
501 |
+
" <td>D19</td>\n",
|
502 |
+
" <td>5</td>\n",
|
503 |
+
" </tr>\n",
|
504 |
+
" <tr>\n",
|
505 |
+
" <th>...</th>\n",
|
506 |
+
" <td>...</td>\n",
|
507 |
+
" <td>...</td>\n",
|
508 |
+
" <td>...</td>\n",
|
509 |
+
" <td>...</td>\n",
|
510 |
+
" <td>...</td>\n",
|
511 |
+
" <td>...</td>\n",
|
512 |
+
" <td>...</td>\n",
|
513 |
+
" <td>...</td>\n",
|
514 |
+
" <td>...</td>\n",
|
515 |
+
" <td>...</td>\n",
|
516 |
+
" <td>...</td>\n",
|
517 |
+
" <td>...</td>\n",
|
518 |
+
" <td>...</td>\n",
|
519 |
+
" <td>...</td>\n",
|
520 |
+
" </tr>\n",
|
521 |
+
" <tr>\n",
|
522 |
+
" <th>478</th>\n",
|
523 |
+
" <td>Z166069305_493</td>\n",
|
524 |
+
" <td>0.001901</td>\n",
|
525 |
+
" <td>Z166069305</td>\n",
|
526 |
+
" <td>493</td>\n",
|
527 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
528 |
+
" <td>Pl. 5. Fig.2. Fig.3. Fig. 1 B Riedel fee...</td>\n",
|
529 |
+
" <td>Pl. 5. Fig.2. Fig.3. Fig. 1 Riedel fee. Fig. 1...</td>\n",
|
530 |
+
" <td>Pl. 5. Fig. 2. Fig. 3. Fig. 1 Riedel fee. Fig....</td>\n",
|
531 |
+
" <td>131e6f12-db13-422f-96e4-0924fe19026b</td>\n",
|
532 |
+
" <td>[{'text_prep': 'Fig. 1 Riedel fee.'}]</td>\n",
|
533 |
+
" <td>0.819874</td>\n",
|
534 |
+
" <td>466</td>\n",
|
535 |
+
" <td>D19</td>\n",
|
536 |
+
" <td>479</td>\n",
|
537 |
+
" </tr>\n",
|
538 |
+
" <tr>\n",
|
539 |
+
" <th>479</th>\n",
|
540 |
+
" <td>Z166069305_484</td>\n",
|
541 |
+
" <td>0.001898</td>\n",
|
542 |
+
" <td>Z166069305</td>\n",
|
543 |
+
" <td>484</td>\n",
|
544 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
545 |
+
" <td>1 - 1 Seite 109 Zeile I v. u. Abanſoon lies...</td>\n",
|
546 |
+
" <td>Seite 109 Zeile I v. u. Abansoon lies as anson...</td>\n",
|
547 |
+
" <td>Seite 109 Zeile I v. u. Abends sonnig, ansonst...</td>\n",
|
548 |
+
" <td>da3ced63-91a8-4710-81b6-40da8a1cf407</td>\n",
|
549 |
+
" <td>[{'text_prep': 'Seite 109 Zeile I v. u. Abends...</td>\n",
|
550 |
+
" <td>0.818248</td>\n",
|
551 |
+
" <td>467</td>\n",
|
552 |
+
" <td>D19</td>\n",
|
553 |
+
" <td>480</td>\n",
|
554 |
+
" </tr>\n",
|
555 |
+
" <tr>\n",
|
556 |
+
" <th>480</th>\n",
|
557 |
+
" <td>Z166069305_489</td>\n",
|
558 |
+
" <td>0.001894</td>\n",
|
559 |
+
" <td>Z166069305</td>\n",
|
560 |
+
" <td>489</td>\n",
|
561 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
562 |
+
" <td>Th. PL 3 ஏப்ரதம் THUNDE 1 4 Ironis ту C...</td>\n",
|
563 |
+
" <td>Th. PL 3 THUNDE Ironis Cine kanclerte tatue be...</td>\n",
|
564 |
+
" <td>Theodor Pl. 3 Thundersturm Ironische cine kan...</td>\n",
|
565 |
+
" <td>228e98a9-2d29-4f34-a0c2-adcdd2eeff09</td>\n",
|
566 |
+
" <td>[{'text_prep': 'Theodor Pl. 3 Thundersturm Ir...</td>\n",
|
567 |
+
" <td>0.818178</td>\n",
|
568 |
+
" <td>468</td>\n",
|
569 |
+
" <td>D19</td>\n",
|
570 |
+
" <td>481</td>\n",
|
571 |
+
" </tr>\n",
|
572 |
+
" <tr>\n",
|
573 |
+
" <th>481</th>\n",
|
574 |
+
" <td>Z166069305_482</td>\n",
|
575 |
+
" <td>0.001890</td>\n",
|
576 |
+
" <td>Z166069305</td>\n",
|
577 |
+
" <td>482</td>\n",
|
578 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
579 |
+
" <td>ie \" !!! 3 iC; 1: 0 Wis . bici\" ..) .\"., 6...</td>\n",
|
580 |
+
" <td>iC; 1: 0 Wis . bici\" ..) .\"., 6,7, ART 9 * noi...</td>\n",
|
581 |
+
" <td>Ich kam also um die neunte Stunde zu Wismar.</td>\n",
|
582 |
+
" <td>9e9b9254-c675-4427-a20e-88b8d723aec3</td>\n",
|
583 |
+
" <td>[{'text_prep': 'Ich kam also um die neunte Stu...</td>\n",
|
584 |
+
" <td>0.812620</td>\n",
|
585 |
+
" <td>469</td>\n",
|
586 |
+
" <td>D19</td>\n",
|
587 |
+
" <td>482</td>\n",
|
588 |
+
" </tr>\n",
|
589 |
+
" <tr>\n",
|
590 |
+
" <th>482</th>\n",
|
591 |
+
" <td>Z166069305_499</td>\n",
|
592 |
+
" <td>0.001887</td>\n",
|
593 |
+
" <td>Z166069305</td>\n",
|
594 |
+
" <td>499</td>\n",
|
595 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
596 |
+
" <td>Österreichische Nationalbibliothek + Z166069305</td>\n",
|
597 |
+
" <td>Osterreichische Nationalbibliothek + Z166069305</td>\n",
|
598 |
+
" <td>Please provide the faulty OCR texts generated ...</td>\n",
|
599 |
+
" <td>93723fd2-f359-4251-8621-513c4f6e1128</td>\n",
|
600 |
+
" <td>[{'text_prep': '(Please paste the text)'}]</td>\n",
|
601 |
+
" <td>0.805837</td>\n",
|
602 |
+
" <td>470</td>\n",
|
603 |
+
" <td>D19</td>\n",
|
604 |
+
" <td>483</td>\n",
|
605 |
+
" </tr>\n",
|
606 |
+
" </tbody>\n",
|
607 |
+
"</table>\n",
|
608 |
+
"<p>483 rows × 14 columns</p>\n",
|
609 |
+
"</div>"
|
610 |
+
],
|
611 |
+
"text/plain": [
|
612 |
+
" document rrf_score barcode page \\\n",
|
613 |
+
"0 Z166069305_430 0.032522 Z166069305 430 \n",
|
614 |
+
"1 Z166069305_10 0.030118 Z166069305 10 \n",
|
615 |
+
"2 Z166069305_399 0.029324 Z166069305 399 \n",
|
616 |
+
"3 Z166069305_415 0.028860 Z166069305 415 \n",
|
617 |
+
"4 Z166069305_220 0.028485 Z166069305 220 \n",
|
618 |
+
".. ... ... ... ... \n",
|
619 |
+
"478 Z166069305_493 0.001901 Z166069305 493 \n",
|
620 |
+
"479 Z166069305_484 0.001898 Z166069305 484 \n",
|
621 |
+
"480 Z166069305_489 0.001894 Z166069305 489 \n",
|
622 |
+
"481 Z166069305_482 0.001890 Z166069305 482 \n",
|
623 |
+
"482 Z166069305_499 0.001887 Z166069305 499 \n",
|
624 |
+
"\n",
|
625 |
+
" iiif_link \\\n",
|
626 |
+
"0 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
627 |
+
"1 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
628 |
+
"2 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
629 |
+
"3 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
630 |
+
"4 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
631 |
+
".. ... \n",
|
632 |
+
"478 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
633 |
+
"479 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
634 |
+
"480 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
635 |
+
"481 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
636 |
+
"482 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
637 |
+
"\n",
|
638 |
+
" text_orig \\\n",
|
639 |
+
"0 402 befand mich in einiger Entfernung davon, ... \n",
|
640 |
+
"1 VI große Flinten, die ihm ſeine Diener zur Se... \n",
|
641 |
+
"2 1 1 1 , 571 Plinius berichtet nach dem Xen... \n",
|
642 |
+
"3 587 nen. Ich ritt auf fie los, aber Hufrein l... \n",
|
643 |
+
"4 ' 92 lungen und Bewegungen. Mit einer Phyſiog... \n",
|
644 |
+
".. ... \n",
|
645 |
+
"478 Pl. 5. Fig.2. Fig.3. Fig. 1 B Riedel fee... \n",
|
646 |
+
"479 1 - 1 Seite 109 Zeile I v. u. Abanſoon lies... \n",
|
647 |
+
"480 Th. PL 3 ஏப்ரதம் THUNDE 1 4 Ironis ту C... \n",
|
648 |
+
"481 ie \" !!! 3 iC; 1: 0 Wis . bici\" ..) .\"., 6... \n",
|
649 |
+
"482 Österreichische Nationalbibliothek + Z166069305 \n",
|
650 |
+
"\n",
|
651 |
+
" text_clean \\\n",
|
652 |
+
"0 befand mich in einiger Entfernung davon, und d... \n",
|
653 |
+
"1 grose Flinten, die ihm seine Diener zur Seite ... \n",
|
654 |
+
"2 Plinius berichtet nach dem Xenophon, die Camel... \n",
|
655 |
+
"3 nen. Ich ritt auf fie los, aber Hufrein lies m... \n",
|
656 |
+
"4 lungen und Bewegungen. Mit einer Physiognomie,... \n",
|
657 |
+
".. ... \n",
|
658 |
+
"478 Pl. 5. Fig.2. Fig.3. Fig. 1 Riedel fee. Fig. 1... \n",
|
659 |
+
"479 Seite 109 Zeile I v. u. Abansoon lies as anson... \n",
|
660 |
+
"480 Th. PL 3 THUNDE Ironis Cine kanclerte tatue be... \n",
|
661 |
+
"481 iC; 1: 0 Wis . bici\" ..) .\"., 6,7, ART 9 * noi... \n",
|
662 |
+
"482 Osterreichische Nationalbibliothek + Z166069305 \n",
|
663 |
+
"\n",
|
664 |
+
" text_prep \\\n",
|
665 |
+
"0 befand mich in einiger Entfernung davon, und d... \n",
|
666 |
+
"1 große Flinten, die ihm seine Diener zur Seite ... \n",
|
667 |
+
"2 Plinius berichtet nach dem Xenophon, die Camel... \n",
|
668 |
+
"3 Ich ritt auf sie los, aber Hufrein lies mich r... \n",
|
669 |
+
"4 Lungen und Bewegungen. Mit einer Physiognomie,... \n",
|
670 |
+
".. ... \n",
|
671 |
+
"478 Pl. 5. Fig. 2. Fig. 3. Fig. 1 Riedel fee. Fig.... \n",
|
672 |
+
"479 Seite 109 Zeile I v. u. Abends sonnig, ansonst... \n",
|
673 |
+
"480 Theodor Pl. 3 Thundersturm Ironische cine kan... \n",
|
674 |
+
"481 Ich kam also um die neunte Stunde zu Wismar. \n",
|
675 |
+
"482 Please provide the faulty OCR texts generated ... \n",
|
676 |
+
"\n",
|
677 |
+
" _id \\\n",
|
678 |
+
"0 7e2b21a1-4cdb-4b6a-b1c0-75c30bfe13bb \n",
|
679 |
+
"1 ab0905d4-6ca7-4e94-8fb2-3d2081632d6d \n",
|
680 |
+
"2 feb1b47b-effe-4e9d-be0f-7a749fed5ec0 \n",
|
681 |
+
"3 02bb750e-0993-4341-ab74-934d28fc523e \n",
|
682 |
+
"4 7949fe4e-5534-4522-b151-35b57a733650 \n",
|
683 |
+
".. ... \n",
|
684 |
+
"478 131e6f12-db13-422f-96e4-0924fe19026b \n",
|
685 |
+
"479 da3ced63-91a8-4710-81b6-40da8a1cf407 \n",
|
686 |
+
"480 228e98a9-2d29-4f34-a0c2-adcdd2eeff09 \n",
|
687 |
+
"481 9e9b9254-c675-4427-a20e-88b8d723aec3 \n",
|
688 |
+
"482 93723fd2-f359-4251-8621-513c4f6e1128 \n",
|
689 |
+
"\n",
|
690 |
+
" _highlights _score rank corpus \\\n",
|
691 |
+
"0 [{'text_prep': 'befand mich in einiger Entfern... 0.892140 1/2 D19 \n",
|
692 |
+
"1 [{'text_prep': 'Sein letztes Hulfritt trifft z... 0.872264 4/9 D19 \n",
|
693 |
+
"2 [{'text_prep': 'Ich muss hier auch bemerken, d... 0.865306 13/4 D19 \n",
|
694 |
+
"3 [{'text_prep': 'Ehe wir noch an dieser Ore anl... 0.864078 17/3 D19 \n",
|
695 |
+
"4 [{'text_prep': 'Die Haustiere finden nirgends ... 0.869331 6/15 D19 \n",
|
696 |
+
".. ... ... ... ... \n",
|
697 |
+
"478 [{'text_prep': 'Fig. 1 Riedel fee.'}] 0.819874 466 D19 \n",
|
698 |
+
"479 [{'text_prep': 'Seite 109 Zeile I v. u. Abends... 0.818248 467 D19 \n",
|
699 |
+
"480 [{'text_prep': 'Theodor Pl. 3 Thundersturm Ir... 0.818178 468 D19 \n",
|
700 |
+
"481 [{'text_prep': 'Ich kam also um die neunte Stu... 0.812620 469 D19 \n",
|
701 |
+
"482 [{'text_prep': '(Please paste the text)'}] 0.805837 470 D19 \n",
|
702 |
+
"\n",
|
703 |
+
" rerank \n",
|
704 |
+
"0 1 \n",
|
705 |
+
"1 2 \n",
|
706 |
+
"2 3 \n",
|
707 |
+
"3 4 \n",
|
708 |
+
"4 5 \n",
|
709 |
+
".. ... \n",
|
710 |
+
"478 479 \n",
|
711 |
+
"479 480 \n",
|
712 |
+
"480 481 \n",
|
713 |
+
"481 482 \n",
|
714 |
+
"482 483 \n",
|
715 |
+
"\n",
|
716 |
+
"[483 rows x 14 columns]"
|
717 |
+
]
|
718 |
+
},
|
719 |
+
"execution_count": 11,
|
720 |
+
"metadata": {},
|
721 |
+
"output_type": "execute_result"
|
722 |
+
}
|
723 |
+
],
|
724 |
+
"source": [
|
725 |
+
"## Reciprocal Rank Fusion (RRF)\n",
|
726 |
+
"\n",
|
727 |
+
"# Set the RRF parameter\n",
|
728 |
+
"k = 60\n",
|
729 |
+
"\n",
|
730 |
+
"# Concatenate the DataFrames to combine all rankings into one DataFrame\n",
|
731 |
+
"combined_df = pd.concat([output1, output2], ignore_index=True)\n",
|
732 |
+
"\n",
|
733 |
+
"# Initialize the RRF score column to 0\n",
|
734 |
+
"combined_df['rrf_score'] = 0\n",
|
735 |
+
"\n",
|
736 |
+
"# Function to update RRF scores\n",
|
737 |
+
"def update_rrf_scores(df, k):\n",
|
738 |
+
" # Calculate RRF score using the formula and add to the rrf_score column\n",
|
739 |
+
" df['rrf_score'] += 1 / (k + df['rank'])\n",
|
740 |
+
"\n",
|
741 |
+
"# Update RRF scores for the combined DataFrame\n",
|
742 |
+
"update_rrf_scores(combined_df, k)\n",
|
743 |
+
"\n",
|
744 |
+
"# Custom aggregation function to combine values with a '/'\n",
|
745 |
+
"def combine_values(values):\n",
|
746 |
+
" # Convert to string and join unique values with a '/' separator\n",
|
747 |
+
" return '/'.join(values.astype(str).unique())\n",
|
748 |
+
"\n",
|
749 |
+
"# Group by 'item' and sum the RRF scores for each item\n",
|
750 |
+
"final_scores_df = final_scores_df = combined_df.groupby('document', as_index=False).agg({\n",
|
751 |
+
" 'rrf_score': 'sum',\n",
|
752 |
+
" 'barcode': 'first',\n",
|
753 |
+
" 'page': 'first',\n",
|
754 |
+
" 'iiif_link': 'first',\n",
|
755 |
+
" 'text_orig': 'first',\n",
|
756 |
+
" 'text_clean': 'first',\n",
|
757 |
+
" 'text_prep': 'first',\n",
|
758 |
+
" '_id': 'first',\n",
|
759 |
+
" '_highlights': 'first',\n",
|
760 |
+
" '_score': 'first',\n",
|
761 |
+
" 'rank': combine_values,\n",
|
762 |
+
" 'corpus': 'first'\n",
|
763 |
+
"})\n",
|
764 |
+
"\n",
|
765 |
+
"# Sort by total RRF score in descending order and reset index\n",
|
766 |
+
"final_scores_df = final_scores_df.sort_values(by='rrf_score', ascending=False).reset_index(drop=True)\n",
|
767 |
+
"\n",
|
768 |
+
"# Add rerank based on the sorted order\n",
|
769 |
+
"final_scores_df['rerank'] = final_scores_df.index + 1\n",
|
770 |
+
"\n",
|
771 |
+
"# Display the final DataFrame\n",
|
772 |
+
"#print(f'Total results: {len(final_scores_df)}')\n",
|
773 |
+
"#final_scores_df\n",
|
774 |
+
"\n",
|
775 |
+
"final_scores_df"
|
776 |
+
]
|
777 |
+
},
|
778 |
+
{
|
779 |
+
"cell_type": "code",
|
780 |
+
"execution_count": null,
|
781 |
+
"id": "47aad8d3",
|
782 |
+
"metadata": {
|
783 |
+
"code_folding": [
|
784 |
+
0
|
785 |
+
]
|
786 |
+
},
|
787 |
+
"outputs": [],
|
788 |
+
"source": [
|
789 |
+
"## Multi-Term Queries\n",
|
790 |
+
"\n",
|
791 |
+
"query = 'Nur Pflanzen, Vegetation und Flora'\n",
|
792 |
+
"limit = 1000 # max limit = 1000\n",
|
793 |
+
"threshold = 0.80 # set threshold\n",
|
794 |
+
"filter_string = \"barcode:(Z166069305) OR barcode:(Z166069408)\" # Sonnini\n",
|
795 |
+
"\n",
|
796 |
+
"results = marqoClient.index(indexName).search(\n",
|
797 |
+
" {\n",
|
798 |
+
" query: 2.0,\n",
|
799 |
+
" \"Tiere oder jegliche Fauna\": -0.8,\n",
|
800 |
+
" \"wichtige Ernährung für die Bevölkerung\": +0.8,\n",
|
801 |
+
" #\"Meteorologische Beobachtungen\": -0.8,\n",
|
802 |
+
" },\n",
|
803 |
+
" limit=limit,\n",
|
804 |
+
" #filter_string=filter_string\n",
|
805 |
+
")\n",
|
806 |
+
"\n",
|
807 |
+
"output = pd.DataFrame(results[\"hits\"])\n",
|
808 |
+
"output = output[output['_score'] > threshold] # filter results above threshold\n",
|
809 |
+
"\n",
|
810 |
+
"print(f'Total results: {len(output)}')\n",
|
811 |
+
"#output"
|
812 |
+
]
|
813 |
+
},
|
814 |
+
{
|
815 |
+
"cell_type": "markdown",
|
816 |
+
"id": "7cd84368",
|
817 |
+
"metadata": {},
|
818 |
+
"source": [
|
819 |
+
"# Generate Preview"
|
820 |
+
]
|
821 |
+
},
|
822 |
+
{
|
823 |
+
"cell_type": "code",
|
824 |
+
"execution_count": 16,
|
825 |
+
"id": "a045bdfd",
|
826 |
+
"metadata": {
|
827 |
+
"code_folding": [
|
828 |
+
0
|
829 |
+
],
|
830 |
+
"scrolled": false
|
831 |
+
},
|
832 |
+
"outputs": [
|
833 |
+
{
|
834 |
+
"data": {
|
835 |
+
"application/vnd.jupyter.widget-view+json": {
|
836 |
+
"model_id": "52fc4bb25414483dad663802648a3559",
|
837 |
+
"version_major": 2,
|
838 |
+
"version_minor": 0
|
839 |
+
},
|
840 |
+
"text/plain": [
|
841 |
+
"VBox(children=(Box(children=(HBox(children=(HTML(value='<b>1</b>'), HTML(value='<i>Z166069305_430: </i><mark>b…"
|
842 |
+
]
|
843 |
+
},
|
844 |
+
"metadata": {},
|
845 |
+
"output_type": "display_data"
|
846 |
+
},
|
847 |
+
{
|
848 |
+
"data": {
|
849 |
+
"application/vnd.jupyter.widget-view+json": {
|
850 |
+
"model_id": "334de0628ea440049482be20848ffcf7",
|
851 |
+
"version_major": 2,
|
852 |
+
"version_minor": 0
|
853 |
+
},
|
854 |
+
"text/plain": [
|
855 |
+
"Output(layout=Layout(border='1px solid black', height='auto', padding='10px', width='100%'))"
|
856 |
+
]
|
857 |
+
},
|
858 |
+
"metadata": {},
|
859 |
+
"output_type": "display_data"
|
860 |
+
}
|
861 |
+
],
|
862 |
+
"source": [
|
863 |
+
"## Preview R matches\n",
|
864 |
+
"\n",
|
865 |
+
"R=20 # set number of results to be previewed\n",
|
866 |
+
"\n",
|
867 |
+
"# Function to unpack and concatenate all texts from the lists of dictionaries\n",
|
868 |
+
"def unpack_texts(series):\n",
|
869 |
+
" return series.apply(lambda x: ' '.join([d.get('text_clean', d.get('text_prep', '')) for d in x if isinstance(d, dict)]))\n",
|
870 |
+
"\n",
|
871 |
+
"# Apply the function to the '_highlights' column and create a new column 'unpacked_highlights'\n",
|
872 |
+
"final_scores_df['unpacked_highlights'] = unpack_texts(final_scores_df['_highlights'])\n",
|
873 |
+
"#print(output['_highlights'])\n",
|
874 |
+
"\n",
|
875 |
+
"text_col = 'text_prep' if 'text_prep' in final_scores_df.columns else 'text_clean'\n",
|
876 |
+
"\n",
|
877 |
+
"# Check if 'unpacked_highlights' is empty and replace it if necessary\n",
|
878 |
+
"final_scores_df['unpacked_highlights'] = final_scores_df.apply(\n",
|
879 |
+
" lambda row: row[text_col] if not row['unpacked_highlights'] else row['unpacked_highlights'],\n",
|
880 |
+
" axis=1\n",
|
881 |
+
")\n",
|
882 |
+
"\n",
|
883 |
+
"# Define a function to highlight parts of the text\n",
|
884 |
+
"def highlight_text(text, highlights):\n",
|
885 |
+
" # Ensure highlights is a list of strings\n",
|
886 |
+
" if isinstance(highlights, str):\n",
|
887 |
+
" highlights = [highlights]\n",
|
888 |
+
" # Wrap each highlight in <mark> tags\n",
|
889 |
+
" for highlight in highlights:\n",
|
890 |
+
" # Replace highlight text with a highlighted version\n",
|
891 |
+
" text = text.replace(highlight, f'<mark>{highlight}</mark>')\n",
|
892 |
+
" return text\n",
|
893 |
+
"\n",
|
894 |
+
"# Define a function to display full details of a row\n",
|
895 |
+
"def show_details(row):\n",
|
896 |
+
" details = widgets.Output()\n",
|
897 |
+
" with details:\n",
|
898 |
+
" # Use inline styles to ensure no scrolling\n",
|
899 |
+
" content = f\"\"\"\n",
|
900 |
+
" <div style=\"border: 1px solid black; padding: 10px; max-width: 100%; box-sizing: border-box;\">\n",
|
901 |
+
" <h3>Details for {row['barcode']}, {row['page']}</h3>\n",
|
902 |
+
" <p><b>Retrieved text chunk: </b><i>{row[\"unpacked_highlights\"]}</i></p>\n",
|
903 |
+
" <p>{highlight_text(row[text_col], row[\"unpacked_highlights\"])}</p>\n",
|
904 |
+
" <img src=\"{row[\"iiif_link\"]}\" alt=\"IIIF Image Preview\" style=\"max-width: 100%; height: auto;\">\n",
|
905 |
+
" <a href=\"https://digital.onb.ac.at/OnbViewer/viewer.faces?doc=ABO_%2B{row[\"barcode\"]}\" target=\"_blank\">Open ÖNB Viewer</a>\n",
|
906 |
+
" </div>\n",
|
907 |
+
" \"\"\"\n",
|
908 |
+
" display(HTML(content))\n",
|
909 |
+
" return details\n",
|
910 |
+
"\n",
|
911 |
+
"# Define a function to create a row with more information and a clickable button\n",
|
912 |
+
"def create_row(row, idx):\n",
|
913 |
+
" button = widgets.Button(description=\"Inspect\", layout=widgets.Layout(width=\"auto\"))\n",
|
914 |
+
"\n",
|
915 |
+
" # When the button is clicked, show more details\n",
|
916 |
+
" def on_button_clicked(b):\n",
|
917 |
+
" details_box.clear_output()\n",
|
918 |
+
" with details_box:\n",
|
919 |
+
" display(show_details(row))\n",
|
920 |
+
" \n",
|
921 |
+
" button.on_click(on_button_clicked)\n",
|
922 |
+
" \n",
|
923 |
+
" # Highlight the text in the 'unpacked_highlights' column\n",
|
924 |
+
" unpacked = highlight_text(row['unpacked_highlights'], row['unpacked_highlights'])\n",
|
925 |
+
" \n",
|
926 |
+
" # Create a display row with button, highlights, and iiif links\n",
|
927 |
+
" row_display = widgets.HBox([\n",
|
928 |
+
" widgets.HTML(value=f\"<b>{idx + 1}</b>\"), # Display the index + 1\n",
|
929 |
+
" widgets.HTML(\n",
|
930 |
+
" value=f\"<i>{row['document']}: </i>{unpacked}\",\n",
|
931 |
+
" layout=widgets.Layout(width='500px')\n",
|
932 |
+
" ), # Highlights as text\n",
|
933 |
+
" widgets.HTML(\n",
|
934 |
+
" value=f\"Score: {row['_score']:.3f}\",\n",
|
935 |
+
" layout=widgets.Layout(width='100px')\n",
|
936 |
+
" ), # Score as text\n",
|
937 |
+
" widgets.HTML(\n",
|
938 |
+
" value=f\"Prev. ranks: {row['rank']}\",\n",
|
939 |
+
" layout=widgets.Layout(width='100px')\n",
|
940 |
+
" ), # Ranks as text\n",
|
941 |
+
" widgets.HTML(\n",
|
942 |
+
" value=f\"Corpus: {row['corpus']}\",\n",
|
943 |
+
" layout=widgets.Layout(width='100px')\n",
|
944 |
+
" ), # Corpus as text\n",
|
945 |
+
" button # Button to view more details\n",
|
946 |
+
" ])\n",
|
947 |
+
" \n",
|
948 |
+
" # Wrap the row display in a Box with a border\n",
|
949 |
+
" boxed_row_display = widgets.Box(\n",
|
950 |
+
" [row_display],\n",
|
951 |
+
" layout=widgets.Layout(\n",
|
952 |
+
" border='1px solid black', # Set the border style\n",
|
953 |
+
" padding='10px', # Add padding inside the box\n",
|
954 |
+
" margin='5px 0px', # Add margin above and below the box\n",
|
955 |
+
" width='100%' # Ensure the box spans the full width\n",
|
956 |
+
" )\n",
|
957 |
+
" )\n",
|
958 |
+
" \n",
|
959 |
+
" return boxed_row_display\n",
|
960 |
+
"\n",
|
961 |
+
"# Create an interactive list of display rows (one for each DataFrame row)\n",
|
962 |
+
"rows = [create_row(row, idx) for idx, row in final_scores_df.head(R).iterrows()]\n",
|
963 |
+
"\n",
|
964 |
+
"# Display area for detailed view\n",
|
965 |
+
"details_box = widgets.Output()\n",
|
966 |
+
"details_box.layout = widgets.Layout(\n",
|
967 |
+
" border='1px solid black',\n",
|
968 |
+
" padding='10px',\n",
|
969 |
+
" width='100%', # Ensure full width\n",
|
970 |
+
" height='auto', # Adjust height automatically\n",
|
971 |
+
")\n",
|
972 |
+
"\n",
|
973 |
+
"# Display the rows and details box\n",
|
974 |
+
"rows_box = widgets.VBox(rows)\n",
|
975 |
+
"display(rows_box, details_box)"
|
976 |
+
]
|
977 |
+
},
|
978 |
+
{
|
979 |
+
"cell_type": "markdown",
|
980 |
+
"id": "88e0078b",
|
981 |
+
"metadata": {},
|
982 |
+
"source": [
|
983 |
+
"# Similarity Metrics with Sentence Transformer"
|
984 |
+
]
|
985 |
+
},
|
986 |
+
{
|
987 |
+
"cell_type": "code",
|
988 |
+
"execution_count": 84,
|
989 |
+
"id": "c783b576",
|
990 |
+
"metadata": {},
|
991 |
+
"outputs": [],
|
992 |
+
"source": [
|
993 |
+
"from sentence_transformers import SentenceTransformer, util"
|
994 |
+
]
|
995 |
+
},
|
996 |
+
{
|
997 |
+
"cell_type": "code",
|
998 |
+
"execution_count": 85,
|
999 |
+
"id": "b5db773f",
|
1000 |
+
"metadata": {},
|
1001 |
+
"outputs": [
|
1002 |
+
{
|
1003 |
+
"name": "stderr",
|
1004 |
+
"output_type": "stream",
|
1005 |
+
"text": [
|
1006 |
+
"C:\\Users\\vignolim\\Anaconda3\\lib\\site-packages\\transformers\\tokenization_utils_base.py:1601: FutureWarning: `clean_up_tokenization_spaces` was not set. It will be set to `True` by default. This behavior will be depracted in transformers v4.45, and will be then set to `False` by default. For more details check this issue: https://github.com/huggingface/transformers/issues/31884\n",
|
1007 |
+
" warnings.warn(\n"
|
1008 |
+
]
|
1009 |
+
}
|
1010 |
+
],
|
1011 |
+
"source": [
|
1012 |
+
"# Load a pre-trained Sentence Transformer model\n",
|
1013 |
+
"model = SentenceTransformer('paraphrase-MiniLM-L6-v2')\n",
|
1014 |
+
"\n",
|
1015 |
+
"# List of sentences\n",
|
1016 |
+
"list_of_sentences = final_scores_df['unpacked_highlights'].tolist()\n",
|
1017 |
+
"\n",
|
1018 |
+
"# Step 1: Encode the sentences and the query into embeddings\n",
|
1019 |
+
"sentence_embeddings = model.encode(list_of_sentences, convert_to_tensor=True)\n",
|
1020 |
+
"query_embedding = model.encode(query, convert_to_tensor=True)\n",
|
1021 |
+
"\n",
|
1022 |
+
"# Step 2: Compute the cosine similarities between the query and the list of sentences\n",
|
1023 |
+
"cosine_scores = util.pytorch_cos_sim(query_embedding, sentence_embeddings)\n",
|
1024 |
+
"\n",
|
1025 |
+
"# Step 3: Create a pandas DataFrame with sentences and their corresponding cosine similarity scores\n",
|
1026 |
+
"df_cosine_sim = pd.DataFrame({\n",
|
1027 |
+
" 'Sentence': list_of_sentences,\n",
|
1028 |
+
" 'Cosine Similarity': cosine_scores.squeeze().tolist()\n",
|
1029 |
+
"})"
|
1030 |
+
]
|
1031 |
+
},
|
1032 |
+
{
|
1033 |
+
"cell_type": "code",
|
1034 |
+
"execution_count": 86,
|
1035 |
+
"id": "33ef4dfa",
|
1036 |
+
"metadata": {},
|
1037 |
+
"outputs": [
|
1038 |
+
{
|
1039 |
+
"data": {
|
1040 |
+
"text/html": [
|
1041 |
+
"<div>\n",
|
1042 |
+
"<style scoped>\n",
|
1043 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
1044 |
+
" vertical-align: middle;\n",
|
1045 |
+
" }\n",
|
1046 |
+
"\n",
|
1047 |
+
" .dataframe tbody tr th {\n",
|
1048 |
+
" vertical-align: top;\n",
|
1049 |
+
" }\n",
|
1050 |
+
"\n",
|
1051 |
+
" .dataframe thead th {\n",
|
1052 |
+
" text-align: right;\n",
|
1053 |
+
" }\n",
|
1054 |
+
"</style>\n",
|
1055 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
1056 |
+
" <thead>\n",
|
1057 |
+
" <tr style=\"text-align: right;\">\n",
|
1058 |
+
" <th></th>\n",
|
1059 |
+
" <th>Sentence</th>\n",
|
1060 |
+
" <th>Cosine Similarity</th>\n",
|
1061 |
+
" </tr>\n",
|
1062 |
+
" </thead>\n",
|
1063 |
+
" <tbody>\n",
|
1064 |
+
" <tr>\n",
|
1065 |
+
" <th>0</th>\n",
|
1066 |
+
" <td>S. sono. Gmelin hat diesen Vogel in der 13.</td>\n",
|
1067 |
+
" <td>0.587100</td>\n",
|
1068 |
+
" </tr>\n",
|
1069 |
+
" <tr>\n",
|
1070 |
+
" <th>1</th>\n",
|
1071 |
+
" <td>Ich todtete eine gelbe Bachstelze ***) und ein...</td>\n",
|
1072 |
+
" <td>0.293896</td>\n",
|
1073 |
+
" </tr>\n",
|
1074 |
+
" <tr>\n",
|
1075 |
+
" <th>2</th>\n",
|
1076 |
+
" <td>Auf diesem Damme stehen von Zeit zu Zeit klein...</td>\n",
|
1077 |
+
" <td>0.347629</td>\n",
|
1078 |
+
" </tr>\n",
|
1079 |
+
" <tr>\n",
|
1080 |
+
" <th>3</th>\n",
|
1081 |
+
" <td>Indessen schienen mir diese Vögel sich mehr de...</td>\n",
|
1082 |
+
" <td>0.471382</td>\n",
|
1083 |
+
" </tr>\n",
|
1084 |
+
" <tr>\n",
|
1085 |
+
" <th>4</th>\n",
|
1086 |
+
" <td>Die Wanderung dieser Vögel nach Ägypten hat je...</td>\n",
|
1087 |
+
" <td>0.476594</td>\n",
|
1088 |
+
" </tr>\n",
|
1089 |
+
" <tr>\n",
|
1090 |
+
" <th>5</th>\n",
|
1091 |
+
" <td>Ich traf auch eine Wachtel an, die ich sogleic...</td>\n",
|
1092 |
+
" <td>0.200288</td>\n",
|
1093 |
+
" </tr>\n",
|
1094 |
+
" <tr>\n",
|
1095 |
+
" <th>6</th>\n",
|
1096 |
+
" <td>Diese Vögel sind sehr fett, ihr Fleisch ist za...</td>\n",
|
1097 |
+
" <td>0.407871</td>\n",
|
1098 |
+
" </tr>\n",
|
1099 |
+
" <tr>\n",
|
1100 |
+
" <th>7</th>\n",
|
1101 |
+
" <td>Reichhaltig war er. Kein Vogel ist in Ägypten ...</td>\n",
|
1102 |
+
" <td>0.515706</td>\n",
|
1103 |
+
" </tr>\n",
|
1104 |
+
" <tr>\n",
|
1105 |
+
" <th>8</th>\n",
|
1106 |
+
" <td>Linn. Den ersten Morgens bei einem sehr schöne...</td>\n",
|
1107 |
+
" <td>0.310032</td>\n",
|
1108 |
+
" </tr>\n",
|
1109 |
+
" <tr>\n",
|
1110 |
+
" <th>9</th>\n",
|
1111 |
+
" <td>Bearbeitung der Gegenden um den Canal Salza. F...</td>\n",
|
1112 |
+
" <td>0.271253</td>\n",
|
1113 |
+
" </tr>\n",
|
1114 |
+
" <tr>\n",
|
1115 |
+
" <th>10</th>\n",
|
1116 |
+
" <td>Diese Vögel hielten sich nicht in der Nähe von</td>\n",
|
1117 |
+
" <td>0.515144</td>\n",
|
1118 |
+
" </tr>\n",
|
1119 |
+
" <tr>\n",
|
1120 |
+
" <th>11</th>\n",
|
1121 |
+
" <td>nennt. Diese letztgenannten Vögel sprangen paa...</td>\n",
|
1122 |
+
" <td>0.336441</td>\n",
|
1123 |
+
" </tr>\n",
|
1124 |
+
" <tr>\n",
|
1125 |
+
" <th>12</th>\n",
|
1126 |
+
" <td>Binther- Nilaat- Schildfröste- te- Vogel- Raub...</td>\n",
|
1127 |
+
" <td>0.163589</td>\n",
|
1128 |
+
" </tr>\n",
|
1129 |
+
" <tr>\n",
|
1130 |
+
" <th>13</th>\n",
|
1131 |
+
" <td>Durch diese habe ich auch den Unterschied des ...</td>\n",
|
1132 |
+
" <td>0.268166</td>\n",
|
1133 |
+
" </tr>\n",
|
1134 |
+
" <tr>\n",
|
1135 |
+
" <th>14</th>\n",
|
1136 |
+
" <td>Sie sagten uns, diese Vögel kommen oft nach Ab...</td>\n",
|
1137 |
+
" <td>0.320704</td>\n",
|
1138 |
+
" </tr>\n",
|
1139 |
+
" <tr>\n",
|
1140 |
+
" <th>15</th>\n",
|
1141 |
+
" <td>Es hielt daher sehr schwer, sich einen von die...</td>\n",
|
1142 |
+
" <td>0.352532</td>\n",
|
1143 |
+
" </tr>\n",
|
1144 |
+
" <tr>\n",
|
1145 |
+
" <th>16</th>\n",
|
1146 |
+
" <td>Senesbaum Vogel Beschreibung einer Art von Fal...</td>\n",
|
1147 |
+
" <td>0.317160</td>\n",
|
1148 |
+
" </tr>\n",
|
1149 |
+
" <tr>\n",
|
1150 |
+
" <th>17</th>\n",
|
1151 |
+
" <td>Diese Vögel gehören nicht zu einerlei Art. Die...</td>\n",
|
1152 |
+
" <td>0.433839</td>\n",
|
1153 |
+
" </tr>\n",
|
1154 |
+
" <tr>\n",
|
1155 |
+
" <th>18</th>\n",
|
1156 |
+
" <td>Ich erkannte Beccafien, Feldlerchen und Sperli...</td>\n",
|
1157 |
+
" <td>0.303594</td>\n",
|
1158 |
+
" </tr>\n",
|
1159 |
+
" <tr>\n",
|
1160 |
+
" <th>19</th>\n",
|
1161 |
+
" <td>Kapitel. Natron -- Bleichen der Leinwand und d...</td>\n",
|
1162 |
+
" <td>0.265617</td>\n",
|
1163 |
+
" </tr>\n",
|
1164 |
+
" </tbody>\n",
|
1165 |
+
"</table>\n",
|
1166 |
+
"</div>"
|
1167 |
+
],
|
1168 |
+
"text/plain": [
|
1169 |
+
" Sentence Cosine Similarity\n",
|
1170 |
+
"0 S. sono. Gmelin hat diesen Vogel in der 13. 0.587100\n",
|
1171 |
+
"1 Ich todtete eine gelbe Bachstelze ***) und ein... 0.293896\n",
|
1172 |
+
"2 Auf diesem Damme stehen von Zeit zu Zeit klein... 0.347629\n",
|
1173 |
+
"3 Indessen schienen mir diese Vögel sich mehr de... 0.471382\n",
|
1174 |
+
"4 Die Wanderung dieser Vögel nach Ägypten hat je... 0.476594\n",
|
1175 |
+
"5 Ich traf auch eine Wachtel an, die ich sogleic... 0.200288\n",
|
1176 |
+
"6 Diese Vögel sind sehr fett, ihr Fleisch ist za... 0.407871\n",
|
1177 |
+
"7 Reichhaltig war er. Kein Vogel ist in Ägypten ... 0.515706\n",
|
1178 |
+
"8 Linn. Den ersten Morgens bei einem sehr schöne... 0.310032\n",
|
1179 |
+
"9 Bearbeitung der Gegenden um den Canal Salza. F... 0.271253\n",
|
1180 |
+
"10 Diese Vögel hielten sich nicht in der Nähe von 0.515144\n",
|
1181 |
+
"11 nennt. Diese letztgenannten Vögel sprangen paa... 0.336441\n",
|
1182 |
+
"12 Binther- Nilaat- Schildfröste- te- Vogel- Raub... 0.163589\n",
|
1183 |
+
"13 Durch diese habe ich auch den Unterschied des ... 0.268166\n",
|
1184 |
+
"14 Sie sagten uns, diese Vögel kommen oft nach Ab... 0.320704\n",
|
1185 |
+
"15 Es hielt daher sehr schwer, sich einen von die... 0.352532\n",
|
1186 |
+
"16 Senesbaum Vogel Beschreibung einer Art von Fal... 0.317160\n",
|
1187 |
+
"17 Diese Vögel gehören nicht zu einerlei Art. Die... 0.433839\n",
|
1188 |
+
"18 Ich erkannte Beccafien, Feldlerchen und Sperli... 0.303594\n",
|
1189 |
+
"19 Kapitel. Natron -- Bleichen der Leinwand und d... 0.265617"
|
1190 |
+
]
|
1191 |
+
},
|
1192 |
+
"execution_count": 86,
|
1193 |
+
"metadata": {},
|
1194 |
+
"output_type": "execute_result"
|
1195 |
+
}
|
1196 |
+
],
|
1197 |
+
"source": [
|
1198 |
+
"df_cosine_sim.head(R)"
|
1199 |
+
]
|
1200 |
+
},
|
1201 |
+
{
|
1202 |
+
"cell_type": "code",
|
1203 |
+
"execution_count": 87,
|
1204 |
+
"id": "c26885b7",
|
1205 |
+
"metadata": {},
|
1206 |
+
"outputs": [],
|
1207 |
+
"source": [
|
1208 |
+
"# Add SentenceTransformer Cosine Similarity score to output dataframe\n",
|
1209 |
+
"final_scores_df['ST_cosine_similarity'] = df_cosine_sim['Cosine Similarity']"
|
1210 |
+
]
|
1211 |
+
},
|
1212 |
+
{
|
1213 |
+
"cell_type": "code",
|
1214 |
+
"execution_count": 88,
|
1215 |
+
"id": "17d5abeb",
|
1216 |
+
"metadata": {},
|
1217 |
+
"outputs": [],
|
1218 |
+
"source": [
|
1219 |
+
"# Add direct link to ÖNB Viewer to output dataframe\n",
|
1220 |
+
"base_url = \"https://digital.onb.ac.at/OnbViewer/viewer.faces?doc=ABO_%2B\"\n",
|
1221 |
+
"\n",
|
1222 |
+
"# Create the 'onb_viewer_link' column by appending each 'barcode' to the base URL\n",
|
1223 |
+
"final_scores_df['onb_viewer_link'] = base_url + final_scores_df['barcode'].astype(str)"
|
1224 |
+
]
|
1225 |
+
},
|
1226 |
+
{
|
1227 |
+
"cell_type": "code",
|
1228 |
+
"execution_count": 89,
|
1229 |
+
"id": "b6a2d7e0",
|
1230 |
+
"metadata": {},
|
1231 |
+
"outputs": [
|
1232 |
+
{
|
1233 |
+
"data": {
|
1234 |
+
"text/html": [
|
1235 |
+
"<div>\n",
|
1236 |
+
"<style scoped>\n",
|
1237 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
1238 |
+
" vertical-align: middle;\n",
|
1239 |
+
" }\n",
|
1240 |
+
"\n",
|
1241 |
+
" .dataframe tbody tr th {\n",
|
1242 |
+
" vertical-align: top;\n",
|
1243 |
+
" }\n",
|
1244 |
+
"\n",
|
1245 |
+
" .dataframe thead th {\n",
|
1246 |
+
" text-align: right;\n",
|
1247 |
+
" }\n",
|
1248 |
+
"</style>\n",
|
1249 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
1250 |
+
" <thead>\n",
|
1251 |
+
" <tr style=\"text-align: right;\">\n",
|
1252 |
+
" <th></th>\n",
|
1253 |
+
" <th>document</th>\n",
|
1254 |
+
" <th>rrf_score</th>\n",
|
1255 |
+
" <th>barcode</th>\n",
|
1256 |
+
" <th>page</th>\n",
|
1257 |
+
" <th>iiif_link</th>\n",
|
1258 |
+
" <th>text_orig</th>\n",
|
1259 |
+
" <th>text_clean</th>\n",
|
1260 |
+
" <th>text_prep</th>\n",
|
1261 |
+
" <th>_id</th>\n",
|
1262 |
+
" <th>_highlights</th>\n",
|
1263 |
+
" <th>_score</th>\n",
|
1264 |
+
" <th>rank</th>\n",
|
1265 |
+
" <th>corpus</th>\n",
|
1266 |
+
" <th>rerank</th>\n",
|
1267 |
+
" <th>unpacked_highlights</th>\n",
|
1268 |
+
" <th>ST_cosine_similarity</th>\n",
|
1269 |
+
" <th>onb_viewer_link</th>\n",
|
1270 |
+
" </tr>\n",
|
1271 |
+
" </thead>\n",
|
1272 |
+
" <tbody>\n",
|
1273 |
+
" <tr>\n",
|
1274 |
+
" <th>0</th>\n",
|
1275 |
+
" <td>Z166069305_252</td>\n",
|
1276 |
+
" <td>0.032258</td>\n",
|
1277 |
+
" <td>Z166069305</td>\n",
|
1278 |
+
" <td>252</td>\n",
|
1279 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
1280 |
+
" <td>224 7 Dieſer Falke war ein Weibchen. Der Ein...</td>\n",
|
1281 |
+
" <td>Dieser Falke war ein Weibchen. Der Eingeweide ...</td>\n",
|
1282 |
+
" <td>Dieser Falke war ein Weibchen. Der Eingeweidek...</td>\n",
|
1283 |
+
" <td>8a5893c6-fde6-42f3-94e3-16db95629fe5</td>\n",
|
1284 |
+
" <td>[{'text_prep': 'S. sono. Gmelin hat diesen Vog...</td>\n",
|
1285 |
+
" <td>0.856740</td>\n",
|
1286 |
+
" <td>2</td>\n",
|
1287 |
+
" <td>D19</td>\n",
|
1288 |
+
" <td>1</td>\n",
|
1289 |
+
" <td>S. sono. Gmelin hat diesen Vogel in der 13.</td>\n",
|
1290 |
+
" <td>0.587100</td>\n",
|
1291 |
+
" <td>https://digital.onb.ac.at/OnbViewer/viewer.fac...</td>\n",
|
1292 |
+
" </tr>\n",
|
1293 |
+
" <tr>\n",
|
1294 |
+
" <th>1</th>\n",
|
1295 |
+
" <td>Z166069305_43</td>\n",
|
1296 |
+
" <td>0.031025</td>\n",
|
1297 |
+
" <td>Z166069305</td>\n",
|
1298 |
+
" <td>43</td>\n",
|
1299 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
1300 |
+
" <td>16 5 1 entgegen, indem er mit großer Heftig...</td>\n",
|
1301 |
+
" <td>entgegen, indem er mit groser Heftigkeit nach ...</td>\n",
|
1302 |
+
" <td>entgegen, indem er mit großer Heftigkeit nach ...</td>\n",
|
1303 |
+
" <td>4205ec0a-9cf3-448d-afba-3023e4c92052</td>\n",
|
1304 |
+
" <td>[{'text_prep': 'Ich todtete eine gelbe Bachste...</td>\n",
|
1305 |
+
" <td>0.852868</td>\n",
|
1306 |
+
" <td>3/6</td>\n",
|
1307 |
+
" <td>D19</td>\n",
|
1308 |
+
" <td>2</td>\n",
|
1309 |
+
" <td>Ich todtete eine gelbe Bachstelze ***) und ein...</td>\n",
|
1310 |
+
" <td>0.293896</td>\n",
|
1311 |
+
" <td>https://digital.onb.ac.at/OnbViewer/viewer.fac...</td>\n",
|
1312 |
+
" </tr>\n",
|
1313 |
+
" <tr>\n",
|
1314 |
+
" <th>2</th>\n",
|
1315 |
+
" <td>Z166069305_265</td>\n",
|
1316 |
+
" <td>0.029911</td>\n",
|
1317 |
+
" <td>Z166069305</td>\n",
|
1318 |
+
" <td>265</td>\n",
|
1319 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
1320 |
+
" <td>237 Seeufer, an welchem wir bis zum See Maadi...</td>\n",
|
1321 |
+
" <td>Seeufer, an welchem wir bis zum See Maadie hin...</td>\n",
|
1322 |
+
" <td>Seeufer, an welchem wir bis zum See Maadie hin...</td>\n",
|
1323 |
+
" <td>5fc895c1-b587-4442-a34c-dd446c3054f2</td>\n",
|
1324 |
+
" <td>[{'text_prep': 'Auf diesem Damme stehen von Ze...</td>\n",
|
1325 |
+
" <td>0.848943</td>\n",
|
1326 |
+
" <td>4/10</td>\n",
|
1327 |
+
" <td>D19</td>\n",
|
1328 |
+
" <td>3</td>\n",
|
1329 |
+
" <td>Auf diesem Damme stehen von Zeit zu Zeit klein...</td>\n",
|
1330 |
+
" <td>0.347629</td>\n",
|
1331 |
+
" <td>https://digital.onb.ac.at/OnbViewer/viewer.fac...</td>\n",
|
1332 |
+
" </tr>\n",
|
1333 |
+
" <tr>\n",
|
1334 |
+
" <th>3</th>\n",
|
1335 |
+
" <td>Z166069305_344</td>\n",
|
1336 |
+
" <td>0.029412</td>\n",
|
1337 |
+
" <td>Z166069305</td>\n",
|
1338 |
+
" <td>344</td>\n",
|
1339 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
1340 |
+
" <td>316 niederſchoß, wünſchte ich mir Glük, dieſe...</td>\n",
|
1341 |
+
" <td>niederschos, wunschte ich mir Gluk, diesen uns...</td>\n",
|
1342 |
+
" <td>Niederschoss, wünschte ich mir Glück, diesen u...</td>\n",
|
1343 |
+
" <td>a7debf83-ac70-445a-afee-5f35515b3e1d</td>\n",
|
1344 |
+
" <td>[{'text_prep': 'Indessen schienen mir diese Vö...</td>\n",
|
1345 |
+
" <td>0.843653</td>\n",
|
1346 |
+
" <td>8</td>\n",
|
1347 |
+
" <td>D19</td>\n",
|
1348 |
+
" <td>4</td>\n",
|
1349 |
+
" <td>Indessen schienen mir diese Vögel sich mehr de...</td>\n",
|
1350 |
+
" <td>0.471382</td>\n",
|
1351 |
+
" <td>https://digital.onb.ac.at/OnbViewer/viewer.fac...</td>\n",
|
1352 |
+
" </tr>\n",
|
1353 |
+
" <tr>\n",
|
1354 |
+
" <th>4</th>\n",
|
1355 |
+
" <td>Z166069305_243</td>\n",
|
1356 |
+
" <td>0.027778</td>\n",
|
1357 |
+
" <td>Z166069305</td>\n",
|
1358 |
+
" <td>243</td>\n",
|
1359 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
1360 |
+
" <td>215 don -einſam liegenden Seen beleben, bleib...</td>\n",
|
1361 |
+
" <td>don -einsam liegenden Seen beleben, bleiben di...</td>\n",
|
1362 |
+
" <td>don-einsam liegenden Seen beleben, bleiben die...</td>\n",
|
1363 |
+
" <td>58487609-e48c-47a1-8a0a-06569fef7f53</td>\n",
|
1364 |
+
" <td>[{'text_prep': 'Die Wanderung dieser Vögel nac...</td>\n",
|
1365 |
+
" <td>0.835293</td>\n",
|
1366 |
+
" <td>24/3</td>\n",
|
1367 |
+
" <td>D19</td>\n",
|
1368 |
+
" <td>5</td>\n",
|
1369 |
+
" <td>Die Wanderung dieser Vögel nach Ägypten hat je...</td>\n",
|
1370 |
+
" <td>0.476594</td>\n",
|
1371 |
+
" <td>https://digital.onb.ac.at/OnbViewer/viewer.fac...</td>\n",
|
1372 |
+
" </tr>\n",
|
1373 |
+
" <tr>\n",
|
1374 |
+
" <th>...</th>\n",
|
1375 |
+
" <td>...</td>\n",
|
1376 |
+
" <td>...</td>\n",
|
1377 |
+
" <td>...</td>\n",
|
1378 |
+
" <td>...</td>\n",
|
1379 |
+
" <td>...</td>\n",
|
1380 |
+
" <td>...</td>\n",
|
1381 |
+
" <td>...</td>\n",
|
1382 |
+
" <td>...</td>\n",
|
1383 |
+
" <td>...</td>\n",
|
1384 |
+
" <td>...</td>\n",
|
1385 |
+
" <td>...</td>\n",
|
1386 |
+
" <td>...</td>\n",
|
1387 |
+
" <td>...</td>\n",
|
1388 |
+
" <td>...</td>\n",
|
1389 |
+
" <td>...</td>\n",
|
1390 |
+
" <td>...</td>\n",
|
1391 |
+
" <td>...</td>\n",
|
1392 |
+
" </tr>\n",
|
1393 |
+
" <tr>\n",
|
1394 |
+
" <th>413</th>\n",
|
1395 |
+
" <td>Z166069305_70</td>\n",
|
1396 |
+
" <td>0.002119</td>\n",
|
1397 |
+
" <td>Z166069305</td>\n",
|
1398 |
+
" <td>70</td>\n",
|
1399 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
1400 |
+
" <td>42 ohne Thellung und ohne Abgaben die Früchte...</td>\n",
|
1401 |
+
" <td>ohne Thellung und ohne Abgaben die Fruchte ihr...</td>\n",
|
1402 |
+
" <td>ohne Thellung und ohne Abgaben die Früchte ihr...</td>\n",
|
1403 |
+
" <td>06af6d12-1ddf-492d-8848-2537dc8b4a9c</td>\n",
|
1404 |
+
" <td>[{'text_prep': 'Vorzüglich zog Frankreich groß...</td>\n",
|
1405 |
+
" <td>0.794844</td>\n",
|
1406 |
+
" <td>412</td>\n",
|
1407 |
+
" <td>D19</td>\n",
|
1408 |
+
" <td>414</td>\n",
|
1409 |
+
" <td>Vorzüglich zog Frankreich große Vorteile darau...</td>\n",
|
1410 |
+
" <td>0.237914</td>\n",
|
1411 |
+
" <td>https://digital.onb.ac.at/OnbViewer/viewer.fac...</td>\n",
|
1412 |
+
" </tr>\n",
|
1413 |
+
" <tr>\n",
|
1414 |
+
" <th>414</th>\n",
|
1415 |
+
" <td>Z166069305_299</td>\n",
|
1416 |
+
" <td>0.002114</td>\n",
|
1417 |
+
" <td>Z166069305</td>\n",
|
1418 |
+
" <td>299</td>\n",
|
1419 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
1420 |
+
" <td>271 i Auf meiner ganzen Reife habe ich und m...</td>\n",
|
1421 |
+
" <td>Auf meiner ganzen Reife habe ich und meine Rei...</td>\n",
|
1422 |
+
" <td>Auf meiner ganzen Reise habe ich und meine Gef...</td>\n",
|
1423 |
+
" <td>01d37f8c-4a15-42a2-9ef5-a73fc7739766</td>\n",
|
1424 |
+
" <td>[{'text_prep': 'Der Fluss sieht rotlich und en...</td>\n",
|
1425 |
+
" <td>0.794709</td>\n",
|
1426 |
+
" <td>413</td>\n",
|
1427 |
+
" <td>D19</td>\n",
|
1428 |
+
" <td>415</td>\n",
|
1429 |
+
" <td>Der Fluss sieht rotlich und endlich grünlich a...</td>\n",
|
1430 |
+
" <td>0.106832</td>\n",
|
1431 |
+
" <td>https://digital.onb.ac.at/OnbViewer/viewer.fac...</td>\n",
|
1432 |
+
" </tr>\n",
|
1433 |
+
" <tr>\n",
|
1434 |
+
" <th>415</th>\n",
|
1435 |
+
" <td>Z166069305_410</td>\n",
|
1436 |
+
" <td>0.002110</td>\n",
|
1437 |
+
" <td>Z166069305</td>\n",
|
1438 |
+
" <td>410</td>\n",
|
1439 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
1440 |
+
" <td>382 ilmy 1 \" mann, der feſt von dieſem Ged...</td>\n",
|
1441 |
+
" <td>ilmy mann, der fest von diesem Gedanken uberze...</td>\n",
|
1442 |
+
" <td>Ilmymann, der fest von diesem Gedanken überzeu...</td>\n",
|
1443 |
+
" <td>dde560fc-9473-4faa-9e3c-1280ba131213</td>\n",
|
1444 |
+
" <td>[{'text_prep': 'Sie hielten mich in ihrer Einb...</td>\n",
|
1445 |
+
" <td>0.791636</td>\n",
|
1446 |
+
" <td>414</td>\n",
|
1447 |
+
" <td>D19</td>\n",
|
1448 |
+
" <td>416</td>\n",
|
1449 |
+
" <td>Sie hielten mich in ihrer Einbildung für einen...</td>\n",
|
1450 |
+
" <td>0.140597</td>\n",
|
1451 |
+
" <td>https://digital.onb.ac.at/OnbViewer/viewer.fac...</td>\n",
|
1452 |
+
" </tr>\n",
|
1453 |
+
" <tr>\n",
|
1454 |
+
" <th>416</th>\n",
|
1455 |
+
" <td>Z166069305_75</td>\n",
|
1456 |
+
" <td>0.002105</td>\n",
|
1457 |
+
" <td>Z166069305</td>\n",
|
1458 |
+
" <td>75</td>\n",
|
1459 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
1460 |
+
" <td>47 3 dle ich hier gemacht habe, werden iðre ...</td>\n",
|
1461 |
+
" <td>dle ich hier gemacht habe, werden ire Stelle i...</td>\n",
|
1462 |
+
" <td>Die ich hier gemacht habe, werden ihre Stelle ...</td>\n",
|
1463 |
+
" <td>32dd47f1-c7f4-4d4a-bb50-f428f79fce3b</td>\n",
|
1464 |
+
" <td>[{'text_prep': 'Die ich hier gemacht habe, wer...</td>\n",
|
1465 |
+
" <td>0.791505</td>\n",
|
1466 |
+
" <td>415</td>\n",
|
1467 |
+
" <td>D19</td>\n",
|
1468 |
+
" <td>417</td>\n",
|
1469 |
+
" <td>Die ich hier gemacht habe, werden ihre Stelle ...</td>\n",
|
1470 |
+
" <td>0.136045</td>\n",
|
1471 |
+
" <td>https://digital.onb.ac.at/OnbViewer/viewer.fac...</td>\n",
|
1472 |
+
" </tr>\n",
|
1473 |
+
" <tr>\n",
|
1474 |
+
" <th>417</th>\n",
|
1475 |
+
" <td>Z166069305_6</td>\n",
|
1476 |
+
" <td>0.002101</td>\n",
|
1477 |
+
" <td>Z166069305</td>\n",
|
1478 |
+
" <td>6</td>\n",
|
1479 |
+
" <td>https://iiif.onb.ac.at/images/ABO/Z166069305/0...</td>\n",
|
1480 |
+
" <td>2125 murid gobiothers conale ܪܝ ، فرد. برد...</td>\n",
|
1481 |
+
" <td>2125 murid gobiothers conale KOENISE KAISERLIC...</td>\n",
|
1482 |
+
" <td>2255 Murdoch Gouverneurs Comte KOENIGSE KAISE...</td>\n",
|
1483 |
+
" <td>21bb54fb-3e4e-456c-9605-f89970db24b7</td>\n",
|
1484 |
+
" <td>[{'text_prep': '2255 Murdoch Gouverneurs Comt...</td>\n",
|
1485 |
+
" <td>0.791373</td>\n",
|
1486 |
+
" <td>416</td>\n",
|
1487 |
+
" <td>D19</td>\n",
|
1488 |
+
" <td>418</td>\n",
|
1489 |
+
" <td>2255 Murdoch Gouverneurs Comte KOENIGSE KAISE...</td>\n",
|
1490 |
+
" <td>0.109442</td>\n",
|
1491 |
+
" <td>https://digital.onb.ac.at/OnbViewer/viewer.fac...</td>\n",
|
1492 |
+
" </tr>\n",
|
1493 |
+
" </tbody>\n",
|
1494 |
+
"</table>\n",
|
1495 |
+
"<p>418 rows × 17 columns</p>\n",
|
1496 |
+
"</div>"
|
1497 |
+
],
|
1498 |
+
"text/plain": [
|
1499 |
+
" document rrf_score barcode page \\\n",
|
1500 |
+
"0 Z166069305_252 0.032258 Z166069305 252 \n",
|
1501 |
+
"1 Z166069305_43 0.031025 Z166069305 43 \n",
|
1502 |
+
"2 Z166069305_265 0.029911 Z166069305 265 \n",
|
1503 |
+
"3 Z166069305_344 0.029412 Z166069305 344 \n",
|
1504 |
+
"4 Z166069305_243 0.027778 Z166069305 243 \n",
|
1505 |
+
".. ... ... ... ... \n",
|
1506 |
+
"413 Z166069305_70 0.002119 Z166069305 70 \n",
|
1507 |
+
"414 Z166069305_299 0.002114 Z166069305 299 \n",
|
1508 |
+
"415 Z166069305_410 0.002110 Z166069305 410 \n",
|
1509 |
+
"416 Z166069305_75 0.002105 Z166069305 75 \n",
|
1510 |
+
"417 Z166069305_6 0.002101 Z166069305 6 \n",
|
1511 |
+
"\n",
|
1512 |
+
" iiif_link \\\n",
|
1513 |
+
"0 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
1514 |
+
"1 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
1515 |
+
"2 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
1516 |
+
"3 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
1517 |
+
"4 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
1518 |
+
".. ... \n",
|
1519 |
+
"413 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
1520 |
+
"414 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
1521 |
+
"415 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
1522 |
+
"416 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
1523 |
+
"417 https://iiif.onb.ac.at/images/ABO/Z166069305/0... \n",
|
1524 |
+
"\n",
|
1525 |
+
" text_orig \\\n",
|
1526 |
+
"0 224 7 Dieſer Falke war ein Weibchen. Der Ein... \n",
|
1527 |
+
"1 16 5 1 entgegen, indem er mit großer Heftig... \n",
|
1528 |
+
"2 237 Seeufer, an welchem wir bis zum See Maadi... \n",
|
1529 |
+
"3 316 niederſchoß, wünſchte ich mir Glük, dieſe... \n",
|
1530 |
+
"4 215 don -einſam liegenden Seen beleben, bleib... \n",
|
1531 |
+
".. ... \n",
|
1532 |
+
"413 42 ohne Thellung und ohne Abgaben die Früchte... \n",
|
1533 |
+
"414 271 i Auf meiner ganzen Reife habe ich und m... \n",
|
1534 |
+
"415 382 ilmy 1 \" mann, der feſt von dieſem Ged... \n",
|
1535 |
+
"416 47 3 dle ich hier gemacht habe, werden iðre ... \n",
|
1536 |
+
"417 2125 murid gobiothers conale ܪܝ ، فرد. برد... \n",
|
1537 |
+
"\n",
|
1538 |
+
" text_clean \\\n",
|
1539 |
+
"0 Dieser Falke war ein Weibchen. Der Eingeweide ... \n",
|
1540 |
+
"1 entgegen, indem er mit groser Heftigkeit nach ... \n",
|
1541 |
+
"2 Seeufer, an welchem wir bis zum See Maadie hin... \n",
|
1542 |
+
"3 niederschos, wunschte ich mir Gluk, diesen uns... \n",
|
1543 |
+
"4 don -einsam liegenden Seen beleben, bleiben di... \n",
|
1544 |
+
".. ... \n",
|
1545 |
+
"413 ohne Thellung und ohne Abgaben die Fruchte ihr... \n",
|
1546 |
+
"414 Auf meiner ganzen Reife habe ich und meine Rei... \n",
|
1547 |
+
"415 ilmy mann, der fest von diesem Gedanken uberze... \n",
|
1548 |
+
"416 dle ich hier gemacht habe, werden ire Stelle i... \n",
|
1549 |
+
"417 2125 murid gobiothers conale KOENISE KAISERLIC... \n",
|
1550 |
+
"\n",
|
1551 |
+
" text_prep \\\n",
|
1552 |
+
"0 Dieser Falke war ein Weibchen. Der Eingeweidek... \n",
|
1553 |
+
"1 entgegen, indem er mit großer Heftigkeit nach ... \n",
|
1554 |
+
"2 Seeufer, an welchem wir bis zum See Maadie hin... \n",
|
1555 |
+
"3 Niederschoss, wünschte ich mir Glück, diesen u... \n",
|
1556 |
+
"4 don-einsam liegenden Seen beleben, bleiben die... \n",
|
1557 |
+
".. ... \n",
|
1558 |
+
"413 ohne Thellung und ohne Abgaben die Früchte ihr... \n",
|
1559 |
+
"414 Auf meiner ganzen Reise habe ich und meine Gef... \n",
|
1560 |
+
"415 Ilmymann, der fest von diesem Gedanken überzeu... \n",
|
1561 |
+
"416 Die ich hier gemacht habe, werden ihre Stelle ... \n",
|
1562 |
+
"417 2255 Murdoch Gouverneurs Comte KOENIGSE KAISE... \n",
|
1563 |
+
"\n",
|
1564 |
+
" _id \\\n",
|
1565 |
+
"0 8a5893c6-fde6-42f3-94e3-16db95629fe5 \n",
|
1566 |
+
"1 4205ec0a-9cf3-448d-afba-3023e4c92052 \n",
|
1567 |
+
"2 5fc895c1-b587-4442-a34c-dd446c3054f2 \n",
|
1568 |
+
"3 a7debf83-ac70-445a-afee-5f35515b3e1d \n",
|
1569 |
+
"4 58487609-e48c-47a1-8a0a-06569fef7f53 \n",
|
1570 |
+
".. ... \n",
|
1571 |
+
"413 06af6d12-1ddf-492d-8848-2537dc8b4a9c \n",
|
1572 |
+
"414 01d37f8c-4a15-42a2-9ef5-a73fc7739766 \n",
|
1573 |
+
"415 dde560fc-9473-4faa-9e3c-1280ba131213 \n",
|
1574 |
+
"416 32dd47f1-c7f4-4d4a-bb50-f428f79fce3b \n",
|
1575 |
+
"417 21bb54fb-3e4e-456c-9605-f89970db24b7 \n",
|
1576 |
+
"\n",
|
1577 |
+
" _highlights _score rank corpus \\\n",
|
1578 |
+
"0 [{'text_prep': 'S. sono. Gmelin hat diesen Vog... 0.856740 2 D19 \n",
|
1579 |
+
"1 [{'text_prep': 'Ich todtete eine gelbe Bachste... 0.852868 3/6 D19 \n",
|
1580 |
+
"2 [{'text_prep': 'Auf diesem Damme stehen von Ze... 0.848943 4/10 D19 \n",
|
1581 |
+
"3 [{'text_prep': 'Indessen schienen mir diese Vö... 0.843653 8 D19 \n",
|
1582 |
+
"4 [{'text_prep': 'Die Wanderung dieser Vögel nac... 0.835293 24/3 D19 \n",
|
1583 |
+
".. ... ... ... ... \n",
|
1584 |
+
"413 [{'text_prep': 'Vorzüglich zog Frankreich groß... 0.794844 412 D19 \n",
|
1585 |
+
"414 [{'text_prep': 'Der Fluss sieht rotlich und en... 0.794709 413 D19 \n",
|
1586 |
+
"415 [{'text_prep': 'Sie hielten mich in ihrer Einb... 0.791636 414 D19 \n",
|
1587 |
+
"416 [{'text_prep': 'Die ich hier gemacht habe, wer... 0.791505 415 D19 \n",
|
1588 |
+
"417 [{'text_prep': '2255 Murdoch Gouverneurs Comt... 0.791373 416 D19 \n",
|
1589 |
+
"\n",
|
1590 |
+
" rerank unpacked_highlights \\\n",
|
1591 |
+
"0 1 S. sono. Gmelin hat diesen Vogel in der 13. \n",
|
1592 |
+
"1 2 Ich todtete eine gelbe Bachstelze ***) und ein... \n",
|
1593 |
+
"2 3 Auf diesem Damme stehen von Zeit zu Zeit klein... \n",
|
1594 |
+
"3 4 Indessen schienen mir diese Vögel sich mehr de... \n",
|
1595 |
+
"4 5 Die Wanderung dieser Vögel nach Ägypten hat je... \n",
|
1596 |
+
".. ... ... \n",
|
1597 |
+
"413 414 Vorzüglich zog Frankreich große Vorteile darau... \n",
|
1598 |
+
"414 415 Der Fluss sieht rotlich und endlich grünlich a... \n",
|
1599 |
+
"415 416 Sie hielten mich in ihrer Einbildung für einen... \n",
|
1600 |
+
"416 417 Die ich hier gemacht habe, werden ihre Stelle ... \n",
|
1601 |
+
"417 418 2255 Murdoch Gouverneurs Comte KOENIGSE KAISE... \n",
|
1602 |
+
"\n",
|
1603 |
+
" ST_cosine_similarity onb_viewer_link \n",
|
1604 |
+
"0 0.587100 https://digital.onb.ac.at/OnbViewer/viewer.fac... \n",
|
1605 |
+
"1 0.293896 https://digital.onb.ac.at/OnbViewer/viewer.fac... \n",
|
1606 |
+
"2 0.347629 https://digital.onb.ac.at/OnbViewer/viewer.fac... \n",
|
1607 |
+
"3 0.471382 https://digital.onb.ac.at/OnbViewer/viewer.fac... \n",
|
1608 |
+
"4 0.476594 https://digital.onb.ac.at/OnbViewer/viewer.fac... \n",
|
1609 |
+
".. ... ... \n",
|
1610 |
+
"413 0.237914 https://digital.onb.ac.at/OnbViewer/viewer.fac... \n",
|
1611 |
+
"414 0.106832 https://digital.onb.ac.at/OnbViewer/viewer.fac... \n",
|
1612 |
+
"415 0.140597 https://digital.onb.ac.at/OnbViewer/viewer.fac... \n",
|
1613 |
+
"416 0.136045 https://digital.onb.ac.at/OnbViewer/viewer.fac... \n",
|
1614 |
+
"417 0.109442 https://digital.onb.ac.at/OnbViewer/viewer.fac... \n",
|
1615 |
+
"\n",
|
1616 |
+
"[418 rows x 17 columns]"
|
1617 |
+
]
|
1618 |
+
},
|
1619 |
+
"execution_count": 89,
|
1620 |
+
"metadata": {},
|
1621 |
+
"output_type": "execute_result"
|
1622 |
+
}
|
1623 |
+
],
|
1624 |
+
"source": [
|
1625 |
+
"final_scores_df"
|
1626 |
+
]
|
1627 |
+
},
|
1628 |
+
{
|
1629 |
+
"cell_type": "code",
|
1630 |
+
"execution_count": 90,
|
1631 |
+
"id": "931d1a7d",
|
1632 |
+
"metadata": {},
|
1633 |
+
"outputs": [],
|
1634 |
+
"source": [
|
1635 |
+
"# Save DataFrame to CSV\n",
|
1636 |
+
"final_scores_df.to_csv('data/sonnini_llm_corrected/i_onit-sonnini-DHd2025-prep-q_Pferd, Pferde.csv', index=False)\n",
|
1637 |
+
"\n",
|
1638 |
+
"# Save DataFrame to Excel\n",
|
1639 |
+
"final_scores_df.to_excel('data/sonnini_llm_corrected/i_onit-sonnini-DHd2025-prep-q_Pferd, Pferde.xlsx', index=False, engine='openpyxl')"
|
1640 |
+
]
|
1641 |
+
},
|
1642 |
+
{
|
1643 |
+
"cell_type": "code",
|
1644 |
+
"execution_count": null,
|
1645 |
+
"id": "4f4a0175",
|
1646 |
+
"metadata": {},
|
1647 |
+
"outputs": [],
|
1648 |
+
"source": []
|
1649 |
+
}
|
1650 |
+
],
|
1651 |
+
"metadata": {
|
1652 |
+
"kernelspec": {
|
1653 |
+
"display_name": "Python 3 (ipykernel)",
|
1654 |
+
"language": "python",
|
1655 |
+
"name": "python3"
|
1656 |
+
},
|
1657 |
+
"language_info": {
|
1658 |
+
"codemirror_mode": {
|
1659 |
+
"name": "ipython",
|
1660 |
+
"version": 3
|
1661 |
+
},
|
1662 |
+
"file_extension": ".py",
|
1663 |
+
"mimetype": "text/x-python",
|
1664 |
+
"name": "python",
|
1665 |
+
"nbconvert_exporter": "python",
|
1666 |
+
"pygments_lexer": "ipython3",
|
1667 |
+
"version": "3.9.12"
|
1668 |
+
},
|
1669 |
+
"toc": {
|
1670 |
+
"base_numbering": 1,
|
1671 |
+
"nav_menu": {},
|
1672 |
+
"number_sections": true,
|
1673 |
+
"sideBar": true,
|
1674 |
+
"skip_h1_title": false,
|
1675 |
+
"title_cell": "Table of Contents",
|
1676 |
+
"title_sidebar": "Contents",
|
1677 |
+
"toc_cell": false,
|
1678 |
+
"toc_position": {},
|
1679 |
+
"toc_section_display": true,
|
1680 |
+
"toc_window_display": false
|
1681 |
+
}
|
1682 |
+
},
|
1683 |
+
"nbformat": 4,
|
1684 |
+
"nbformat_minor": 5
|
1685 |
+
}
|
src/indexing/index_data.py
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
This script creates a Marqo index of preprocessed and original OCR texts. Each page is indexed as a document that is split into 2 sentences long vectors.
|
3 |
+
The model used for sentence embedding is https://huggingface.co/flax-sentence-embeddings/all_datasets_v4_mpnet-base.
|
4 |
+
|
5 |
+
Code by Michela Vignoli. Parts of this code were developed with assistance from Simon König.
|
6 |
+
"""
|
7 |
+
|
8 |
+
from pprint import pprint
|
9 |
+
import csv
|
10 |
+
import marqo as mq
|
11 |
+
|
12 |
+
##
|
13 |
+
## Connect to Marqo
|
14 |
+
##
|
15 |
+
|
16 |
+
MARQO_URL = "http://10.103.251.104:8882"
|
17 |
+
marqoClient = mq.Client(url=MARQO_URL)
|
18 |
+
#pprint(marqoClient.get_indexes())
|
19 |
+
|
20 |
+
##
|
21 |
+
## Index settings
|
22 |
+
##
|
23 |
+
|
24 |
+
settings = {
|
25 |
+
"textPreprocessing": {
|
26 |
+
"splitLength": 2,
|
27 |
+
"splitOverlap": 0,
|
28 |
+
"splitMethod": "sentence",
|
29 |
+
},
|
30 |
+
}
|
31 |
+
|
32 |
+
##
|
33 |
+
## Ask if index exists, if not create it
|
34 |
+
##
|
35 |
+
|
36 |
+
indexName = "onit-sonnini-DHd2025-clean"
|
37 |
+
print("Indexname: ", indexName)
|
38 |
+
current_indexes = [d["indexName"] for d in marqoClient.get_indexes()["results"]]
|
39 |
+
if indexName in current_indexes:
|
40 |
+
print(f"Index already exists: {indexName} ")
|
41 |
+
# Set indexName as the current index
|
42 |
+
print(f"Defaulting to index connection. Index connected: {indexName} ")
|
43 |
+
else: # Create a new index
|
44 |
+
print(f"Index does not exist: {indexName} ")
|
45 |
+
print(f"Creating index: {indexName} ")
|
46 |
+
marqoClient.create_index(
|
47 |
+
indexName,
|
48 |
+
model="flax-sentence-embeddings/all_datasets_v4_mpnet-base",
|
49 |
+
settings_dict=settings
|
50 |
+
)
|
51 |
+
|
52 |
+
## List of models integrated in Marqo: https://docs.marqo.ai/latest/models/marqo/list-of-models/
|
53 |
+
|
54 |
+
pprint(marqoClient.get_indexes())
|
55 |
+
|
56 |
+
##
|
57 |
+
## Load dict of data
|
58 |
+
##
|
59 |
+
|
60 |
+
|
61 |
+
# Load list of dictionaries with each dictionary containing keys: text, barcode, page
|
62 |
+
# CSV path
|
63 |
+
csv_file = 'data/DHd_index-cleaned.csv'
|
64 |
+
|
65 |
+
# Read data from CSV file into a list of dictionaries
|
66 |
+
with open(csv_file, mode='r', encoding='utf-8') as file:
|
67 |
+
reader = csv.DictReader(file)
|
68 |
+
animal_descriptions = [row for row in reader]
|
69 |
+
|
70 |
+
# Function to clean text by replacing \n with spaces
|
71 |
+
def clean_text(text):
|
72 |
+
return text.replace('\n', ' ').strip()
|
73 |
+
|
74 |
+
# Clean the 'text' field in each dictionary
|
75 |
+
for entry in animal_descriptions:
|
76 |
+
entry['text_orig'] = clean_text(entry['text_orig'])
|
77 |
+
entry['text_clean'] = clean_text(entry['text_clean'])
|
78 |
+
entry['text_prep'] = clean_text(entry['text_prep'])
|
79 |
+
|
80 |
+
pprint(animal_descriptions[:3])
|
81 |
+
|
82 |
+
##
|
83 |
+
## Add documents to the index
|
84 |
+
##
|
85 |
+
|
86 |
+
print(f"Indexing data...")
|
87 |
+
# Define client_batch_size
|
88 |
+
client_batch_size = 128
|
89 |
+
|
90 |
+
# Indexing
|
91 |
+
marqoClient.index(indexName).add_documents(
|
92 |
+
animal_descriptions,
|
93 |
+
client_batch_size=client_batch_size,
|
94 |
+
tensor_fields=["text_clean"],
|
95 |
+
)
|
96 |
+
|
97 |
+
print(f"Data has been indexed in {indexName}")
|
src/preprocessing/clean_books.py
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
This script cleans the OCR files, so that we have uniform documents with the same pre-processing applied to each of
|
3 |
+
them. For every book, a new document is created so that the original file is always available for cross-checking etc.
|
4 |
+
|
5 |
+
Code adapted from Travelogues project, by Jan Rörden. Source: https://github.com/travelogues/scripts/blob/master/groundtruth/
|
6 |
+
|
7 |
+
"""
|
8 |
+
|
9 |
+
import os
|
10 |
+
import re
|
11 |
+
import string
|
12 |
+
import unicodedata
|
13 |
+
from tqdm import tqdm
|
14 |
+
|
15 |
+
|
16 |
+
# directories
|
17 |
+
books_original_dir = 'source/path/'
|
18 |
+
output_dir = 'output/path/'
|
19 |
+
|
20 |
+
# Ensure the cleaned directory exists
|
21 |
+
os.makedirs(output_dir, exist_ok=True)
|
22 |
+
|
23 |
+
# Function to remove accents and umlauts
|
24 |
+
def remove_accents(input_str):
|
25 |
+
# Normalize to decompose accents
|
26 |
+
nfkd_form = unicodedata.normalize('NFKD', input_str)
|
27 |
+
# Filter out diacritical marks
|
28 |
+
return ''.join([c for c in nfkd_form if not unicodedata.combining(c)])
|
29 |
+
|
30 |
+
for fname in tqdm(sorted(os.listdir(books_original_dir))):
|
31 |
+
# Save the current id for file naming later
|
32 |
+
current_book_id = fname[:-4]
|
33 |
+
|
34 |
+
# Process only .txt files
|
35 |
+
if fname.endswith('.txt'):
|
36 |
+
with open(os.path.join(books_original_dir, fname), 'r', encoding='utf-8') as f:
|
37 |
+
cleaned_lines = []
|
38 |
+
page_lines = []
|
39 |
+
|
40 |
+
for line in f:
|
41 |
+
# Replace long s and ß with normal s
|
42 |
+
clean_line = re.sub(r'[ſß]', 's', line)
|
43 |
+
|
44 |
+
# Remove accents and umlauts
|
45 |
+
clean_line = remove_accents(clean_line)
|
46 |
+
|
47 |
+
# Remove all non-word characters except whitespace and punctuation
|
48 |
+
clean_line = re.sub(r'[^a-zA-Z0-9\s' + re.escape(string.punctuation) + ']', '', clean_line)
|
49 |
+
|
50 |
+
# Convert to lowercase
|
51 |
+
#clean_line = clean_line.lower()
|
52 |
+
|
53 |
+
# Strip trailing spaces but keep line breaks
|
54 |
+
clean_line = clean_line.rstrip()
|
55 |
+
|
56 |
+
# Exclude lines based on criteria
|
57 |
+
if len(clean_line) < 3 or clean_line.isdigit() or not re.search(r'[a-zA-Z]', clean_line):
|
58 |
+
continue # Skip the line
|
59 |
+
|
60 |
+
# Check for a new page indicated by a blank line
|
61 |
+
if clean_line == "":
|
62 |
+
# Handle empty pages
|
63 |
+
if not page_lines or page_lines[0].startswith('statuscode') or page_lines[0].startswith('<html>'):
|
64 |
+
cleaned_lines.append("<empty page>")
|
65 |
+
else:
|
66 |
+
cleaned_lines.extend(page_lines)
|
67 |
+
page_lines = []
|
68 |
+
else:
|
69 |
+
page_lines.append(clean_line)
|
70 |
+
|
71 |
+
# Handle the last page if the file ends without a blank line
|
72 |
+
if not page_lines or page_lines[0].startswith('statuscode') or page_lines[0].startswith('<html>'):
|
73 |
+
cleaned_lines.append("<empty page>")
|
74 |
+
else:
|
75 |
+
cleaned_lines.extend(page_lines)
|
76 |
+
|
77 |
+
# Save the cleaned text to a new file, retaining line breaks
|
78 |
+
cleaned_file_path = os.path.join(output_dir, f"{current_book_id}_cleaned.txt")
|
79 |
+
with open(cleaned_file_path, 'w', encoding='utf-8') as cleaned_file:
|
80 |
+
cleaned_file.write('\n'.join(cleaned_lines)) # Write lines with original line breaks
|
src/preprocessing/llm_keywords.py
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
This script parses through the cleaned texts and prompts Ollama to create lists of keywords of the contents from each page while focusing on particular topics of interest.
|
3 |
+
It was tested with German texts from the 17th, 18th, and 19th century.
|
4 |
+
|
5 |
+
Code by Michela Vignoli. Parts of this code were developed with assistance from GPT-4 and GPT-3 (free version).
|
6 |
+
"""
|
7 |
+
|
8 |
+
import json
|
9 |
+
import subprocess
|
10 |
+
import os
|
11 |
+
import time
|
12 |
+
|
13 |
+
def get_data(root_folder, extension='.txt'):
|
14 |
+
data = []
|
15 |
+
# Walk through all folders and files in the root directory
|
16 |
+
for folder, _, files in os.walk(root_folder):
|
17 |
+
for file in files:
|
18 |
+
if file.endswith(extension):
|
19 |
+
file_path = os.path.join(folder, file)
|
20 |
+
filename = os.path.splitext(os.path.basename(file_path))[0]
|
21 |
+
folder_path = os.path.join(f'preprocessed/{folder}_keywords')
|
22 |
+
#print(f"Found file: {file_path}")
|
23 |
+
|
24 |
+
# Read the file content
|
25 |
+
with open(file_path, 'r', encoding="utf-8") as f:
|
26 |
+
text = f.read()
|
27 |
+
data.append({"path": folder_path, "text": text, "filename": filename})
|
28 |
+
return data
|
29 |
+
|
30 |
+
def extract_corrected_text(raw_response):
|
31 |
+
# Split the response by newline to handle multiple JSON objects
|
32 |
+
lines = raw_response.strip().split('\n')
|
33 |
+
response_segments = []
|
34 |
+
|
35 |
+
# Process each line as a JSON object
|
36 |
+
for line in lines:
|
37 |
+
try:
|
38 |
+
json_obj = json.loads(line)
|
39 |
+
response_segment = json_obj.get('response', '')
|
40 |
+
response_segments.append(response_segment)
|
41 |
+
except json.JSONDecodeError:
|
42 |
+
print(f"Skipping invalid JSON line: {line}")
|
43 |
+
|
44 |
+
# Join all response segments into a single text
|
45 |
+
full_text = ''.join(response_segments)
|
46 |
+
return full_text
|
47 |
+
|
48 |
+
def correct_text_with_llm(text, retries=3):
|
49 |
+
print('-------------------------')
|
50 |
+
print('Processing LLM request...')
|
51 |
+
prompt = "You are a historian expert in historical German texts. From the following faulty OCR texts generated from historical traveolgues printed from the 17-19th century, create a list of keywords summarizing the animals, plants, landscapes, and maps mentioned in the text. If animals, plants, landscapes, or maps are mentioned in the text, include the aforementioned general keywords and more specific ones afterwards. If none of these is mentioned in the text, add up to three keywords describing the content of the page.\n\n" + text
|
52 |
+
|
53 |
+
# Prepare the cURL command
|
54 |
+
curl_command = [
|
55 |
+
'curl',
|
56 |
+
'http://your.ip:port/api/generate',
|
57 |
+
'-d', json.dumps({
|
58 |
+
"model": "llama3.1:70b",
|
59 |
+
"prompt": prompt
|
60 |
+
}),
|
61 |
+
'-H', 'Content-Type: application/json'
|
62 |
+
]
|
63 |
+
|
64 |
+
for attempt in range(retries):
|
65 |
+
print(f"Attempt {attempt + 1} of {retries}: Processing LLM request...")
|
66 |
+
result = subprocess.run(curl_command, capture_output=True, text=True, encoding='utf-8')
|
67 |
+
|
68 |
+
if result.returncode != 0:
|
69 |
+
print(f"Error: cURL command failed with error: {result.stderr}")
|
70 |
+
time.sleep(2) # Wait before retrying
|
71 |
+
continue
|
72 |
+
|
73 |
+
corrected_text = extract_corrected_text(result.stdout)
|
74 |
+
print("Corrected text:", corrected_text)
|
75 |
+
|
76 |
+
if corrected_text:
|
77 |
+
return corrected_text
|
78 |
+
|
79 |
+
print("Invalid response, retrying...")
|
80 |
+
time.sleep(2) # Wait before retrying
|
81 |
+
|
82 |
+
print(f"Failed to get a valid response from the LLM API.")
|
83 |
+
return None
|
84 |
+
|
85 |
+
|
86 |
+
def process_txt(root_folder):
|
87 |
+
# Read text files from the root folder
|
88 |
+
data = get_data(root_folder)
|
89 |
+
|
90 |
+
# Process each file in the data list
|
91 |
+
for item in data:
|
92 |
+
text = item["text"]
|
93 |
+
folder_path = item["path"]
|
94 |
+
filename = item["filename"]
|
95 |
+
|
96 |
+
# Define the output path for the corrected text file
|
97 |
+
output_path = os.path.join(folder_path, f"{filename}_keywords.txt")
|
98 |
+
|
99 |
+
# Create the necessary directories if they don't exist
|
100 |
+
os.makedirs(folder_path, exist_ok=True)
|
101 |
+
|
102 |
+
# Apply the LLM correction function
|
103 |
+
corrected_text = correct_text_with_llm(text)
|
104 |
+
|
105 |
+
# Determine the appropriate output path based on whether correction succeeded
|
106 |
+
if corrected_text:
|
107 |
+
# Save the corrected text into a .txt file
|
108 |
+
with open(output_path, "w", encoding="utf-8") as file:
|
109 |
+
file.write(corrected_text)
|
110 |
+
print(f"Processed and corrected text saved to {output_path}")
|
111 |
+
else:
|
112 |
+
# Save the original text into a .txt file with a different name if correction failed
|
113 |
+
output_path = os.path.join(folder_path, f"{filename}_FAILED.txt")
|
114 |
+
with open(output_path, "w", encoding="utf-8") as file:
|
115 |
+
file.write(text)
|
116 |
+
print(f"Failed to correct text. Original text saved to {output_path}")
|
117 |
+
|
118 |
+
# Example usage
|
119 |
+
root_folder = 'source/path/'
|
120 |
+
process_txt(root_folder)
|
src/preprocessing/llm_preprocessing.py
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
This script parses through the cleaned texts and prompts Ollama to create transcriptions of the faulty OCR texts staying as close as possible to the original wording.
|
3 |
+
It was tested with German texts from the 17th, 18th, and 19th century.
|
4 |
+
|
5 |
+
Code by Michela Vignoli. Parts of this code were developed with assistance from GPT-4 and GPT-3 (free version).
|
6 |
+
"""
|
7 |
+
|
8 |
+
import json
|
9 |
+
import subprocess
|
10 |
+
import os
|
11 |
+
import time
|
12 |
+
|
13 |
+
def get_data(root_folder, extension='.txt'):
|
14 |
+
data = []
|
15 |
+
# Walk through all folders and files in the root directory
|
16 |
+
for folder, _, files in os.walk(root_folder):
|
17 |
+
for file in files:
|
18 |
+
if file.endswith(extension):
|
19 |
+
file_path = os.path.join(folder, file)
|
20 |
+
filename = os.path.splitext(os.path.basename(file_path))[0]
|
21 |
+
folder_path = os.path.join(f'preprocessed/{folder}_preprocessed')
|
22 |
+
#print(f"Found file: {file_path}")
|
23 |
+
|
24 |
+
# Read the file content with detected encoding
|
25 |
+
with open(file_path, 'r', encoding="utf-8") as f:
|
26 |
+
text = f.read()
|
27 |
+
data.append({"path": folder_path, "text": text, "filename": filename})
|
28 |
+
return data
|
29 |
+
|
30 |
+
def extract_corrected_text(raw_response):
|
31 |
+
# Split the response by newline to handle multiple JSON objects
|
32 |
+
lines = raw_response.strip().split('\n')
|
33 |
+
response_segments = []
|
34 |
+
|
35 |
+
# Process each line as a JSON object
|
36 |
+
for line in lines:
|
37 |
+
try:
|
38 |
+
json_obj = json.loads(line)
|
39 |
+
response_segment = json_obj.get('response', '')
|
40 |
+
response_segments.append(response_segment)
|
41 |
+
except json.JSONDecodeError:
|
42 |
+
print(f"Skipping invalid JSON line: {line}")
|
43 |
+
|
44 |
+
# Join all response segments into a single text
|
45 |
+
full_text = ''.join(response_segments)
|
46 |
+
return full_text
|
47 |
+
|
48 |
+
def correct_text_with_llm(text, retries=3):
|
49 |
+
print('-------------------------')
|
50 |
+
print('Processing LLM request...')
|
51 |
+
prompt = "You are a historian expert in historical German texts. Correct the following faulty OCR texts generated from historical traveolgues printed from the 17-19th century. Remain as closely to the original, historical wording as possible while correcting all the errors from the faulty OCR. Output the corrected text by removing the unnecessary line breaks in the pages, where full sentences occur. Leave the line breaks in other pages. Only output the corrected text without further comments, explanations, or information. Omit Corrected text: before the actual text.\n\n" + text
|
52 |
+
|
53 |
+
# Prepare the cURL command
|
54 |
+
curl_command = [
|
55 |
+
'curl',
|
56 |
+
'http://your.ip:port/api/generate',
|
57 |
+
'-d', json.dumps({
|
58 |
+
"model": "llama3.1:70b",
|
59 |
+
"prompt": prompt
|
60 |
+
}),
|
61 |
+
'-H', 'Content-Type: application/json'
|
62 |
+
]
|
63 |
+
|
64 |
+
for attempt in range(retries):
|
65 |
+
print(f"Attempt {attempt + 1} of {retries}: Processing LLM request...")
|
66 |
+
result = subprocess.run(curl_command, capture_output=True, text=True, encoding='utf-8')
|
67 |
+
|
68 |
+
if result.returncode != 0:
|
69 |
+
print(f"Error: cURL command failed with error: {result.stderr}")
|
70 |
+
time.sleep(2) # Wait before retrying
|
71 |
+
continue
|
72 |
+
|
73 |
+
corrected_text = extract_corrected_text(result.stdout)
|
74 |
+
print("Corrected text:", corrected_text)
|
75 |
+
|
76 |
+
if corrected_text:
|
77 |
+
return corrected_text
|
78 |
+
|
79 |
+
print("Invalid response, retrying...")
|
80 |
+
time.sleep(2) # Wait before retrying
|
81 |
+
|
82 |
+
print(f"Failed to get a valid response from the LLM API.")
|
83 |
+
return None
|
84 |
+
|
85 |
+
|
86 |
+
def process_txt(root_folder):
|
87 |
+
# Read text files from the root folder
|
88 |
+
data = get_data(root_folder)
|
89 |
+
|
90 |
+
# Process each file in the data list
|
91 |
+
for item in data:
|
92 |
+
text = item["text"]
|
93 |
+
folder_path = item["path"]
|
94 |
+
filename = item["filename"]
|
95 |
+
|
96 |
+
# Define the output path for the corrected text file
|
97 |
+
output_path = os.path.join(folder_path, f"{filename}_corrected.txt")
|
98 |
+
|
99 |
+
# Create the necessary directories if they don't exist
|
100 |
+
os.makedirs(folder_path, exist_ok=True)
|
101 |
+
|
102 |
+
# Apply the LLM correction function
|
103 |
+
corrected_text = correct_text_with_llm(text)
|
104 |
+
|
105 |
+
# Determine the appropriate output path based on whether correction succeeded
|
106 |
+
if corrected_text:
|
107 |
+
# Save the corrected text into a .txt file
|
108 |
+
with open(output_path, "w", encoding="utf-8") as file:
|
109 |
+
file.write(corrected_text)
|
110 |
+
print(f"Processed and corrected text saved to {output_path}")
|
111 |
+
else:
|
112 |
+
# Save the original text into a .txt file with a different name if correction failed
|
113 |
+
output_path = os.path.join(folder_path, f"{filename}_FAILED.txt")
|
114 |
+
with open(output_path, "w", encoding="utf-8") as file:
|
115 |
+
file.write(text)
|
116 |
+
print(f"Failed to correct text. Original text saved to {output_path}")
|
117 |
+
|
118 |
+
# Example usage
|
119 |
+
root_folder = 'source/folder/'
|
120 |
+
process_txt(root_folder)
|
src/utils/annotations_preprocessing.py
ADDED
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
This script matches the text annotations created on the original OCR files to the cleaned version of the text.
|
3 |
+
The annotations were created with Recogito https://recogito.pelagios.org/.
|
4 |
+
|
5 |
+
Code by Michela Vignoli. Parts of this code were developed with assistance from GPT-4 and GPT-3 (free version).
|
6 |
+
"""
|
7 |
+
|
8 |
+
## Import packages ##
|
9 |
+
|
10 |
+
import pandas as pd
|
11 |
+
import os
|
12 |
+
import re
|
13 |
+
from typing import Union
|
14 |
+
|
15 |
+
|
16 |
+
## Import annotations from Recogito ##
|
17 |
+
|
18 |
+
path_1 = "source/path/"
|
19 |
+
filename_1 = 'jiggvn0g5pgx34.csv'
|
20 |
+
|
21 |
+
# Function to reformat the labels
|
22 |
+
def reformat_labels(label_str):
|
23 |
+
labels = str(label_str).split('|') # Split the string by '|'
|
24 |
+
reformatted = ', '.join([f"'{label}'" for label in labels]) # Enclose each label in ''
|
25 |
+
return reformatted
|
26 |
+
|
27 |
+
df1 = pd.read_csv(os.path.join(path_1, filename_1))[["UUID", "FILE", "QUOTE_TRANSCRIPTION", "ANCHOR", "COMMENTS", "TAGS"]]
|
28 |
+
|
29 |
+
# Apply the function to the 'labels' column
|
30 |
+
df1['TAGS'] = df1['TAGS'].apply(reformat_labels)
|
31 |
+
|
32 |
+
|
33 |
+
## Extract page numbers from merged OCR text file ##
|
34 |
+
|
35 |
+
# Read the entire text file into a single string
|
36 |
+
with open('source/path/Z255430508_clean_merged.txt', 'r', encoding='utf-8') as file1:
|
37 |
+
text_content1 = file1.read()
|
38 |
+
|
39 |
+
|
40 |
+
# Function to find a number in the preceding character sequence
|
41 |
+
def find_number_before_position(text: str, position: int, search_length: int = 10000) -> Union[str, str]:
|
42 |
+
"""
|
43 |
+
Finds the last number following 'page' in the text preceding or succeeding the given position.
|
44 |
+
|
45 |
+
Parameters:
|
46 |
+
- text (str): The full text to search within.
|
47 |
+
- position (int): The position in the text to search around.
|
48 |
+
- search_length (int): The length of text to search before or after the position.
|
49 |
+
|
50 |
+
Returns:
|
51 |
+
- Union[str, str]: The last number found after 'page' in the preceding or succeeding text,
|
52 |
+
or a warning message "Warning: No matches found. Check!" if no match is found.
|
53 |
+
"""
|
54 |
+
if not isinstance(text, str):
|
55 |
+
raise ValueError("text must be a string")
|
56 |
+
if not isinstance(position, int) or position < 0:
|
57 |
+
raise ValueError("position must be a non-negative integer")
|
58 |
+
if not isinstance(search_length, int) or search_length < 0:
|
59 |
+
raise ValueError("search_length must be a non-negative integer")
|
60 |
+
|
61 |
+
if position == 0:
|
62 |
+
# Search after the position
|
63 |
+
following_text = text[position:position + search_length]
|
64 |
+
matches = re.findall(r'page(\d+)', following_text)
|
65 |
+
if matches:
|
66 |
+
return matches[0] # Return the first match found
|
67 |
+
else:
|
68 |
+
return "Check!"
|
69 |
+
else:
|
70 |
+
# Search before the position
|
71 |
+
start_position = max(0, position - search_length)
|
72 |
+
preceding_text = text[start_position:position]
|
73 |
+
matches = re.findall(r'page(\d+)', preceding_text)
|
74 |
+
if matches:
|
75 |
+
return matches[-1] # Return the last match found
|
76 |
+
else:
|
77 |
+
return "Check!"
|
78 |
+
|
79 |
+
# Apply the function to each row in the DataFrames
|
80 |
+
df1['PAGE'] = pd.to_numeric(df1['ANCHOR'].str.extract(r'(\d+)')[0], errors='coerce').apply(lambda x: find_number_before_position(text_content1, x))
|
81 |
+
|
82 |
+
|
83 |
+
## Annotation analysis ##
|
84 |
+
|
85 |
+
# Split the labels by '|' and flatten the list of lists into a single list
|
86 |
+
all_labels = df1['TAGS'].str.split(', ').sum()
|
87 |
+
|
88 |
+
# Count the occurrences of each label
|
89 |
+
label_counts = pd.Series(all_labels).value_counts()
|
90 |
+
|
91 |
+
print(label_counts)
|
92 |
+
|
src/utils/extract_data.py
ADDED
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
This script creates a CSV with all data to be indexed on the Marqo server.
|
3 |
+
|
4 |
+
Code by Michela Vignoli. Parts of this code were developed with assistance from GPT-4 and GPT-3 (free version).
|
5 |
+
"""
|
6 |
+
|
7 |
+
import os
|
8 |
+
import csv
|
9 |
+
import chardet
|
10 |
+
from tqdm import tqdm
|
11 |
+
|
12 |
+
# Helper function to get all file paths with a specific extension in a folder
|
13 |
+
def collect_files(folder, extension=".txt"):
|
14 |
+
file_paths = []
|
15 |
+
for root, _, files in os.walk(folder):
|
16 |
+
for file in files:
|
17 |
+
if file.endswith(extension):
|
18 |
+
file_paths.append(os.path.join(root, file))
|
19 |
+
return file_paths
|
20 |
+
|
21 |
+
# Function to process files and extract their text
|
22 |
+
def process_file(file_path):
|
23 |
+
try:
|
24 |
+
# Detect encoding
|
25 |
+
with open(file_path, 'rb') as f:
|
26 |
+
result = chardet.detect(f.read())
|
27 |
+
encoding = result['encoding']
|
28 |
+
|
29 |
+
# Read the file
|
30 |
+
with open(file_path, 'r', encoding=encoding) as f:
|
31 |
+
return f.read()
|
32 |
+
except Exception as e:
|
33 |
+
print(f"Error processing {file_path}: {e}")
|
34 |
+
return None
|
35 |
+
|
36 |
+
# Combine data from clean, orig, and prep folders
|
37 |
+
def combine_data(clean_files, orig_files, prep_files):
|
38 |
+
combined_data = []
|
39 |
+
|
40 |
+
# Index files by (barcode, page) for matching
|
41 |
+
def index_files(files):
|
42 |
+
indexed = {}
|
43 |
+
for file in files:
|
44 |
+
barcode = os.path.basename(os.path.dirname(file))[:10]
|
45 |
+
page = os.path.basename(file)[:5]
|
46 |
+
indexed[(barcode, page)] = file
|
47 |
+
return indexed
|
48 |
+
|
49 |
+
clean_index = index_files(clean_files)
|
50 |
+
orig_index = index_files(orig_files)
|
51 |
+
prep_index = index_files(prep_files)
|
52 |
+
|
53 |
+
# Process files and combine data
|
54 |
+
for key in tqdm(clean_index.keys(), desc="Combining data", unit="file"):
|
55 |
+
clean_file = clean_index.get(key)
|
56 |
+
orig_file = orig_index.get(key)
|
57 |
+
prep_file = prep_index.get(key)
|
58 |
+
|
59 |
+
# Extract text
|
60 |
+
text_clean = process_file(clean_file) if clean_file else None
|
61 |
+
text_orig = process_file(orig_file) if orig_file else None
|
62 |
+
text_prep = process_file(prep_file) if prep_file else None
|
63 |
+
|
64 |
+
# Add combined data row
|
65 |
+
barcode, page = key
|
66 |
+
page_url = page[:5].zfill(8)
|
67 |
+
iiif_link = f"https://iiif.onb.ac.at/images/ABO/{barcode}/{page_url}/full/full/0/native.jpg"
|
68 |
+
|
69 |
+
combined_data.append({
|
70 |
+
"barcode": barcode,
|
71 |
+
"page": page,
|
72 |
+
"iiif_link": iiif_link,
|
73 |
+
"text_clean": text_clean,
|
74 |
+
"text_orig": text_orig,
|
75 |
+
"text_prep": text_prep,
|
76 |
+
})
|
77 |
+
|
78 |
+
return combined_data
|
79 |
+
|
80 |
+
# Lists of folders to process
|
81 |
+
clean_folders = [
|
82 |
+
'source/path/DHd 2025 dataset/Sonnini Z166069305/Z166069305_clean/',
|
83 |
+
]
|
84 |
+
orig_folders = [
|
85 |
+
"source/path/02-texts/D19/Z166069305",
|
86 |
+
]
|
87 |
+
prep_folders = [
|
88 |
+
'source/path/DHd 2025 dataset/Sonnini Z166069305/Z166069305_clean_preprocessed/',
|
89 |
+
]
|
90 |
+
|
91 |
+
# Collect file paths
|
92 |
+
clean_files = [file for folder in clean_folders for file in collect_files(folder)]
|
93 |
+
orig_files = [file for folder in orig_folders for file in collect_files(folder)]
|
94 |
+
prep_files = [file for folder in prep_folders for file in collect_files(folder)]
|
95 |
+
|
96 |
+
# Combine data from all folders
|
97 |
+
all_data = combine_data(clean_files, orig_files, prep_files)
|
98 |
+
|
99 |
+
# Specify the file path and create the directory if it does not exist
|
100 |
+
csv_file = 'output/path/DHd_index.csv'
|
101 |
+
os.makedirs(os.path.dirname(csv_file), exist_ok=True)
|
102 |
+
|
103 |
+
# Write data to CSV file
|
104 |
+
with open(csv_file, mode='w', newline='', encoding='utf-8') as file:
|
105 |
+
writer = csv.DictWriter(file, fieldnames=["barcode", "page", "iiif_link", "text_clean", "text_orig", "text_prep"])
|
106 |
+
writer.writeheader()
|
107 |
+
writer.writerows(all_data)
|
108 |
+
|
109 |
+
#### IMPORTANT ####
|
110 |
+
#### Data Cleaning Needed after storing the file ####
|
111 |
+
|
112 |
+
"""
|
113 |
+
# Clean data
|
114 |
+
# Specify columns to check and update
|
115 |
+
columns_to_check = ["text_clean", "text_prep"]
|
116 |
+
|
117 |
+
# Check for rows where any of the columns contain "status code" or "empty page"
|
118 |
+
rows_to_update = index_DHd[columns_to_check].applymap(lambda x: any(keyword in str(x) for keyword in ["status code", "empty page"])).any(axis=1)
|
119 |
+
|
120 |
+
# Replace content in the specified columns for the identified rows
|
121 |
+
index_DHd.loc[rows_to_update, columns_to_check] = "<empty page>
|
122 |
+
|
123 |
+
# Remove artifacts from the LLM generation process
|
124 |
+
index_DHd['text_prep'] = index_DHd['text_prep'].str.strip("Here is the corrected text:")
|
125 |
+
|
126 |
+
""""
|
127 |
+
|
128 |
+
print(f"Data from all folders has been written to {csv_file}")
|