File size: 10,955 Bytes
9be760a
 
 
 
 
7b3be45
6c0aa26
9be760a
 
7445a27
e8bf2aa
9be760a
6c0aa26
940c3be
6c0aa26
 
 
 
e8bf2aa
9be760a
7b3be45
 
 
9be760a
e8bf2aa
 
 
 
9be760a
 
e8bf2aa
 
 
 
 
6c0aa26
e8bf2aa
 
 
 
 
 
 
6c0aa26
e8bf2aa
 
6c0aa26
e8bf2aa
 
6c0aa26
e8bf2aa
9be760a
e8bf2aa
9be760a
e8bf2aa
 
 
 
 
 
 
 
 
 
7445a27
e8bf2aa
 
 
 
 
 
7445a27
e8bf2aa
 
7b3be45
 
 
 
 
6dbcc77
e8bf2aa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6c0aa26
 
e8bf2aa
 
 
 
6c0aa26
 
e8bf2aa
 
 
 
 
 
 
 
 
 
 
 
 
6c0aa26
 
 
e8bf2aa
702dd9c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6c0aa26
 
e8bf2aa
 
 
 
 
 
 
 
 
9be760a
 
e8bf2aa
7b3be45
 
e8bf2aa
 
 
7b3be45
 
 
 
e8bf2aa
 
 
 
 
 
 
7b3be45
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d7a708b
 
 
 
 
 
 
 
 
 
 
 
7b3be45
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6c0aa26
 
e8bf2aa
 
 
 
7b3be45
38a1a88
7b3be45
 
f9fb397
7b3be45
 
74071e5
7b3be45
 
e8bf2aa
 
 
 
 
 
 
 
 
 
 
9be760a
 
7b3be45
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
import gradio as gr
import json
import re
import sqlite3
import logging
import asyncio
from collections import defaultdict
from util import process_json_files
from gematria import calculate_gematria
from deep_translator import GoogleTranslator, exceptions
from urllib.parse import quote_plus

# Set up logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')

# Global variables for database connection and translator
conn = None
translator = None
book_names = {}  # Dictionary to store book names

# Global variable to track ongoing search tasks
ongoing_search_task = None

def flatten_text(text):
    """Helper function to flatten nested lists into a single list."""
    if isinstance(text, list):
        return " ".join(flatten_text(item) if isinstance(item, list) else item for item in text)
    return text

def initialize_database():
    """Initializes the SQLite database."""
    global conn
    conn = sqlite3.connect('gematria.db')
    c = conn.cursor()
    c.execute('''
    CREATE TABLE IF NOT EXISTS results (
        gematria_sum INTEGER,
        words TEXT UNIQUE,
        translation TEXT,
        book INTEGER,
        chapter INTEGER,
        verse INTEGER,
        PRIMARY KEY (words, book, chapter, verse)
    )
    ''')
    c.execute('''
    CREATE TABLE IF NOT EXISTS processed_books (
        book INTEGER PRIMARY KEY,
        max_phrase_length INTEGER
    )
    ''')
    conn.commit()
    logging.info("Database initialized.")

def initialize_translator():
    """Initializes the Google Translator."""
    global translator
    translator = GoogleTranslator(source='iw', target='en')
    logging.info("Translator initialized.")

def insert_phrase_to_db(gematria_sum, phrase_candidate, book, chapter, verse):
    """Inserts a phrase and its Gematria value into the database."""
    global conn
    c = conn.cursor()
    try:
        c.execute('''
            INSERT INTO results (gematria_sum, words, book, chapter, verse)
            VALUES (?, ?, ?, ?, ?)
        ''', (gematria_sum, phrase_candidate, book, chapter, verse))
        conn.commit()
        logging.debug(f"Inserted phrase: {phrase_candidate} (Gematria: {gematria_sum}) at {book}:{chapter}:{verse}")
    except sqlite3.IntegrityError:
        logging.debug(f"Phrase already exists: {phrase_candidate} (Gematria: {gematria_sum}) at {book}:{chapter}:{verse}")

async def populate_database_async(tanach_texts, max_phrase_length=1):
    """Asynchronous version of populate_database for concurrent execution."""
    # Database operations and logging are not thread-safe, so we run them in the main thread
    await asyncio.to_thread(populate_database, tanach_texts, max_phrase_length)

def populate_database(tanach_texts, max_phrase_length=1):
    """Populates the database with phrases from the Tanach and their Gematria values."""
    global conn, book_names
    logging.info("Populating database...")
    c = conn.cursor()

    for book_id, text in tanach_texts:  # Unpack the tuple (book_id, text)
        c.execute('''SELECT max_phrase_length FROM processed_books WHERE book = ?''', (book_id,))
        result = c.fetchone()
        if result and result[0] >= max_phrase_length:
            logging.info(f"Skipping book {book_id}: Already processed with max_phrase_length {result[0]}")
            continue

        logging.info(f"Processing book {book_id} with max_phrase_length {max_phrase_length}")
        if 'text' not in text or not isinstance(text['text'], list):
            logging.warning(f"Skipping book {book_id} due to missing or invalid 'text' field.")
            continue

        title = text.get('title', 'Unknown')
        book_names[book_id] = title  # Store book name

        chapters = text['text']
        for chapter_id, chapter in enumerate(chapters):
            if not isinstance(chapter, list):
                logging.warning(f"Skipping chapter {chapter_id} in book {title} due to invalid format.")
                continue
            for verse_id, verse in enumerate(chapter):
                verse_text = flatten_text(verse)
                verse_text = re.sub(r"[^\u05D0-\u05EA ]+", "", verse_text)
                verse_text = re.sub(r" +", " ", verse_text)
                words = verse_text.split()
                for length in range(1, max_phrase_length + 1):
                    for start in range(len(words) - length + 1):
                        phrase_candidate = " ".join(words[start:start + length])
                        gematria_sum = calculate_gematria(phrase_candidate.replace(" ", ""))
                        insert_phrase_to_db(gematria_sum, phrase_candidate, book_id, chapter_id + 1, verse_id + 1)
        try:
            c.execute('''INSERT INTO processed_books (book, max_phrase_length) VALUES (?, ?)''', (book_id, max_phrase_length))
        except sqlite3.IntegrityError:
            c.execute('''UPDATE processed_books SET max_phrase_length = ? WHERE book = ?''', (max_phrase_length, book_id))
        conn.commit()
    logging.info("Database population complete.")

def get_translation(phrase):
    """Retrieves or generates the English translation of a Hebrew phrase."""
    global translator, conn
    c = conn.cursor()
    c.execute('''
    SELECT translation FROM results
    WHERE words = ?
    ''', (phrase,))
    result = c.fetchone()
    if result and result[0]:
        return result[0]
    else:
        translation = translate_and_store(phrase)
        c.execute('''
            UPDATE results
            SET translation = ?
            WHERE words = ?
        ''', (translation, phrase))
        conn.commit()
        return translation


def translate_and_store(phrase):
    global translator
    max_retries = 3  # You can adjust the number of retries
    retries = 0

    while retries < max_retries:
        try:
            translation = translator.translate(phrase)
            logging.debug(f"Translated phrase: {translation}")
            return translation
        except (exceptions.TranslationNotFound, exceptions.NotValidPayload,
                exceptions.ServerException, exceptions.RequestError, requests.exceptions.ConnectionError) as e:  # Add ConnectionError
            retries += 1
            logging.warning(f"Error translating phrase '{phrase}': {e}. Retrying... ({retries}/{max_retries})")

    logging.error(f"Failed to translate phrase '{phrase}' after {max_retries} retries.")
    return "[Translation Error]" 

def search_gematria_in_db(gematria_sum):
    """Searches the database for phrases with a given Gematria value."""
    global conn
    c = conn.cursor()
    c.execute('''
    SELECT words, book, chapter, verse FROM results WHERE gematria_sum = ?
    ''', (gematria_sum,))
    results = c.fetchall()
    logging.debug(f"Found {len(results)} matching phrases for Gematria: {gematria_sum}")
    return results

def gematria_search_interface(phrase):
    """The main function for the Gradio interface."""
    global ongoing_search_task, conn, book_names

    if not phrase.strip():
        return "Please enter a phrase."

    # Cancel any ongoing search task
    if ongoing_search_task is not None and not ongoing_search_task.done():
        ongoing_search_task.cancel()

    # Create database connection inside the function
    conn = sqlite3.connect('gematria.db')
    c = conn.cursor()

    phrase_gematria = calculate_gematria(phrase.replace(" ", ""))
    logging.info(f"Searching for phrases with Gematria: {phrase_gematria}")

    # Start the search asynchronously
    async def search_task():
        matching_phrases = search_gematria_in_db(phrase_gematria)

        if not matching_phrases:
            return "No matching phrases found."

        # Sort and group results
        sorted_phrases = sorted(matching_phrases, key=lambda x: (x[1], x[2], x[3]))
        results_by_book = defaultdict(list)
        for words, book, chapter, verse in sorted_phrases:
            results_by_book[book].append((words, chapter, verse))

        # Format results for display
        results = []
        results.append("<div class='results-container'>")
        for book, phrases in results_by_book.items():
            results.append(f"<h4>Book: {book_names.get(book, 'Unknown')}</h4>")
            for words, chapter, verse in phrases:
                translation = get_translation(words)
                book_name_english = book_names.get(book, 'Unknown')
                link = f"https://www.biblegateway.com/passage/?search={quote_plus(book_name_english)}+{chapter}%3A{verse}"

                results.append(f"""
                <div class='result-item'>
                    <p>Chapter: {chapter}, Verse: {verse}</p>
                    <p class='hebrew-phrase'>Hebrew Phrase: {words}</p>
                    <p>Translation: {translation}</p>
                    <a href='{link}' target='_blank' class='bible-link'>[See on Bible Gateway]</a>
                </div>
                """)
        results.append("</div>")

        # Add CSS styling
        style = """
        <style>
            .results-container {
                display: grid;
                grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); 
                gap: 20px;
            }

            .result-item {
                border: 1px solid #ccc;
                padding: 15px;
                border-radius: 5px;
                box-shadow: 2px 2px 5px rgba(0, 0, 0, 0.1);
            }

            .hebrew-phrase {
                font-family: 'SBL Hebrew', 'Ezra SIL', serif; 
                direction: rtl; 
            }

            .bible-link {
                display: block;
                margin-top: 10px;
                color: #007bff; 
                text-decoration: none;
            }
        </style>
        """

        return style + "\n".join(results)

    ongoing_search_task = asyncio.create_task(search_task())
    return await ongoing_search_task

def run_app():
    """Initializes and launches the Gradio app."""
    initialize_database()
    initialize_translator()

    # Start database population in the background
    tanach_texts = process_json_files(1, 39)  # Process all books
    asyncio.create_task(populate_database_async(tanach_texts, max_phrase_length=1))

    tanach_texts = process_json_files(1, 1)  # Process all books
    asyncio.create_task(populate_database_async(tanach_texts, max_phrase_length=4))

    tanach_texts = process_json_files(27, 27)  # Process all books
    asyncio.create_task(populate_database_async(tanach_texts, max_phrase_length=4))


    iface = gr.Interface(
        fn=gematria_search_interface,
        inputs=gr.Textbox(label="Enter phrase"),
        outputs=gr.HTML(label="Results"),
        title="Gematria Search in Tanach",
        description="Search for phrases in the Tanach that have the same Gematria value.",
        live=False,
        allow_flagging="never"
    )
    iface.launch()

if __name__ == "__main__":
    asyncio.run(run_app())