Spaces:
Sleeping
Sleeping
import json | |
import logging | |
import argparse | |
import sys | |
import os | |
import math | |
import pickle | |
from deep_translator import GoogleTranslator | |
from gematria import calculate_gematria | |
from collections import defaultdict | |
# --- Konfiguration --- | |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') | |
BOOK_RANGE = range(1, 40) | |
CACHE_FILE = "tanakh_phrasedict.cache" | |
# --- Kernfunktionen --- | |
def get_power_result(total_sum, query_value): | |
"""Berechnet das Potenz-Ergebnis basierend auf dem höchsten möglichen Exponenten.""" | |
if total_sum <= 0 or query_value <= 1 or query_value > total_sum: | |
return 1 # Fallback auf Potenz 1 (query_value^0) | |
try: | |
exponent = int(math.floor(math.log(total_sum, query_value))) | |
return query_value ** exponent | |
except (ValueError, OverflowError): | |
return 1 | |
def load_phrase_dictionary(): | |
if not os.path.exists(CACHE_FILE): | |
sys.exit(f"FEHLER: Cache-Datei '{CACHE_FILE}' nicht gefunden. Bitte 'build_indices.py' ausführen.") | |
logging.info(f"Lade Phrasen-Wörterbuch aus Cache: {CACHE_FILE}") | |
try: | |
with open(CACHE_FILE, 'rb') as f: | |
return pickle.load(f) | |
except Exception as e: | |
sys.exit(f"FEHLER: Cache-Datei ist korrupt. Bitte löschen und 'build_indices.py' erneut ausführen. Fehler: {e}") | |
def find_all_matching_phrases(target_sum, phrase_dictionary): | |
return phrase_dictionary.get(target_sum, []) | |
# --- Hauptprogramm --- | |
def main(args): | |
phrase_dictionary = load_phrase_dictionary() | |
query_value = calculate_gematria(args.query) | |
if query_value <= 1: | |
sys.exit(f"Anfrage '{args.query}' hat einen ungültigen Gematria-Wert ({query_value}).") | |
translator = None | |
if args.translate: | |
try: | |
# FIX: Der korrekte Code für Hebräisch ist 'iw' | |
translator = GoogleTranslator(source='iw', target='en') | |
except Exception as e: | |
logging.error(f"Konnte Übersetzer nicht initialisieren: {e}") | |
logging.info(f"Starte Orakel-Analyse für '{args.query}' (G:{query_value}) mit Bitplane-Tiefe {args.xor_depth}...") | |
print("\n" + "="*20 + f" ORAKEL-ANTWORTEN FÜR '{args.query}' " + "="*20) | |
verses_processed = 0 | |
resonance_count = 0 | |
for book_num in BOOK_RANGE: | |
if args.process_verses and verses_processed >= args.process_verses: break | |
filepath = f"texts/torah/{book_num:02}.json" | |
try: | |
with open(filepath, 'r', encoding='utf-8') as file: | |
data = json.load(file) | |
for chap_idx, chapter in enumerate(data.get("text", []), start=1): | |
if args.process_verses and verses_processed >= args.process_verses: break | |
for verse_idx, verse_text in enumerate(chapter, start=1): | |
if args.process_verses and verses_processed >= args.process_verses: break | |
verses_processed += 1 | |
verse_sum = calculate_gematria(verse_text) | |
if verse_sum <= 1: continue | |
# *** FINALE LOGIK: Bitplane-Tiefe der Potenz *** | |
power_result = get_power_result(verse_sum, query_value) | |
header_printed = False | |
# Iteriere durch die Bit-Ebenen von 0 bis zur angegebenen Tiefe | |
for depth in range(args.xor_depth): | |
bit_mask = 1 << depth | |
# Wir prüfen nur Ebenen, bei denen das Bit im Potenzergebnis auch gesetzt ist | |
bitplane_value = power_result & bit_mask | |
if bitplane_value == 0: | |
continue | |
target_sum = verse_sum ^ bitplane_value | |
all_matches = find_all_matching_phrases(target_sum, phrase_dictionary) | |
if all_matches: | |
if not header_printed: | |
resonance_count += 1 | |
verse_ref = f"B{book_num:02d}, K{chap_idx}, V{verse_idx}" | |
print(f"\n--- Resonanz #{resonance_count} in [{verse_ref}] (G_sum:{verse_sum}) ---") | |
print(f"Originalvers: {verse_text.strip()}") | |
print(f" [INFO] Potenzergebnis des Queries: {power_result}") | |
header_printed = True | |
all_matches.sort(key=lambda p: (p.get('freq', 0) / p.get('words', 99)), reverse=True) | |
matches_to_show = all_matches[:args.results_per_verse] | |
print(f" ↳ Bitplane-Tiefe {depth}: [G_sum:{verse_sum}] ^ (Bit {depth} von {power_result}) → [G_ziel:{target_sum}]") | |
for match in matches_to_show: | |
translation = "" | |
if translator: | |
try: translation = translator.translate(match['text']) | |
except Exception: translation = "[Übersetzung fehlgeschlagen]" | |
score = (match.get('freq', 0) / match.get('words', 99)) | |
info = f"(Wörter: {match.get('words', 'N/A')}, Freq: {match.get('freq', 'N/A')}, Score: {score:.2f})" | |
print(f" - {match['text']} {info}") | |
if translation: | |
print(f" ↳ Interpretation: \"{translation}\"") | |
except FileNotFoundError: continue | |
logging.info(f"Analyse abgeschlossen. {resonance_count} Resonanz-Verse in {verses_processed} analysierten Versen gefunden.") | |
if __name__ == "__main__": | |
parser = argparse.ArgumentParser(description="Tanakh Universal Resonance Analyzer mit Bitplane-Tiefe.") | |
parser.add_argument("query", type=str, help="Die Abfragephrase (z.B. 'יהוה').") | |
parser.add_argument("--translate", action="store_true", help="Aktiviert die automatische Übersetzung.") | |
parser.add_argument("--process-verses", type=int, help="Maximale Anzahl der zu analysierenden Start-Verse.") | |
parser.add_argument("--results-per-verse", type=int, default=3, help="Maximale Orakel-Antworten pro gefundener Resonanz (Standard: 3).") | |
parser.add_argument("--xor-depth", type=int, default=16, help="Maximale zu prüfende Bitplane-Tiefe (z.B. 16 für 16-Bit Zahlen, Standard: 16).") | |
args = parser.parse_args() | |
main(args) | |