File size: 5,603 Bytes
f054e62
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
import json
import logging
import argparse
import sys
import os
import re
import math
import numpy as np
from gematria import calculate_gematria

# --- Konfiguration ---
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
BOOK_RANGE = range(1, 40)
INDICES_DIR = "indices_by_book"
HOLOGRAPHIC_STATE_SIZE_BITS = 4096
BITS_PER_CHAR = 16 # Notwendig für die Umwandlung von Zahlen in Binärstrings

# --- Kernfunktionen ---

def xor_with_highest_power(total_sum, query_value):
    """Ihre XOR-Logik."""
    if total_sum <= 0 or query_value <= 1: return None
    if query_value > total_sum: power = 1
    else:
        exponent = math.floor(math.log(total_sum, query_value))
        power = query_value ** exponent
    return total_sum ^ power

def prepare_phrase_inventory(all_indices):
    """Erstellt ein flaches, nach Bedeutung sortiertes Inventar aller Phrasen."""
    logging.info("Erstelle ein Inventar aller Phrasen aus 39 Büchern...")
    inventory = []
    seen_phrases = set()
    for book_num, index in all_indices.items():
        for gematria_val_str, data in index.items():
            gematria_val = int(gematria_val_str)
            pagerank = data.get('pagerank', 0)
            for phrase_data in data.get('phrases', []):
                phrase_text = phrase_data['text']
                if phrase_text not in seen_phrases:
                    count = phrase_data.get('count', 1)
                    score = pagerank / count if count > 0 else 0
                    inventory.append({"text": phrase_text, "gematria": gematria_val, "score": score})
                    seen_phrases.add(phrase_text)
    inventory.sort(key=lambda x: (-x['score'], x['gematria']))
    logging.info(f"{len(inventory)} einzigartige Phrasen im Inventar gefunden.")
    return inventory

def find_phrase_combination(target_sum, inventory):
    """Findet eine bedeutungsvolle Kombination von Phrasen für eine Ziel-Summe."""
    combination = []
    current_sum = 0
    for item in inventory:
        if current_sum + item['gematria'] <= target_sum:
            combination.append(item)
            current_sum += item['gematria']
    return combination, current_sum

# --- Hauptprogramm ---

def main(args):
    # Lade alle Indizes für die finale Dekodierung
    all_indices = {}
    for i in BOOK_RANGE:
        index_path = os.path.join(INDICES_DIR, f"book_{i:02}_index.json")
        if os.path.exists(index_path):
            with open(index_path, 'r', encoding='utf-8') as f:
                all_indices[i] = json.load(f)
    if not all_indices:
        sys.exit("Keine Index-Dateien gefunden. Bitte 'build_indices.py' ausführen.")

    # 1. Bereite das Phrasen-Inventar vor
    phrase_inventory = prepare_phrase_inventory(all_indices)

    # 2. Berechne Gematria-Wert der Anfrage
    query_value = calculate_gematria(args.query)
    if query_value <= 1:
        sys.exit(f"Anfrage '{args.query}' hat einen ungültigen Gematria-Wert ({query_value}).")

    # 3. Sequentielle Faltung aller Vers-Resonanzen
    logging.info(f"Starte sequentielle Vers-Faltung für '{args.query}' (Gematria: {query_value})...")
    final_state = np.zeros(HOLOGRAPHIC_STATE_SIZE_BITS, dtype=np.int8)

    for book_num in BOOK_RANGE:
        try:
            with open(f"texts/torah/{book_num:02}.json", 'r', encoding='utf-8') as file:
                data = json.load(file)
                logging.debug(f"Verarbeite Buch {book_num:02}...")
                for chapter in data.get("text", []):
                    for verse in chapter:
                        verse_sum = calculate_gematria(verse)
                        if verse_sum <= 1: continue

                        resonance_val = xor_with_highest_power(verse_sum, query_value)
                        if resonance_val is None: continue

                        # Wandle das Resonanz-Ergebnis in einen Binärstring um und falte es
                        resonance_binary = format(resonance_val, f'0{HOLOGRAPHIC_STATE_SIZE_BITS}b')
                        resonance_array = np.array(list(resonance_binary), dtype=np.int8)

                        final_state = np.bitwise_xor(final_state, resonance_array)
        except FileNotFoundError:
            continue

    final_state_str = "".join(final_state.astype(str))
    logging.info("Finale holographische Resonanz-State wurde erstellt.")

    # 4. Dekodiere den finalen State
    print("\n" + "="*15 + f" FINALE SYNTHESE FÜR '{args.query}' " + "="*15)

    # Zerlege den finalen State in 256 Gematria-Werte
    for i in range(0, HOLOGRAPHIC_STATE_SIZE_BITS, BITS_PER_CHAR):
        target_sum = int(final_state_str[i:i+BITS_PER_CHAR], 2)
        if target_sum == 0: continue

        print(f"\n--- Dekodiere Resonanz-Block #{i//BITS_PER_CHAR + 1} (Ziel-Summe: {target_sum}) ---")

        combination, achieved_sum = find_phrase_combination(target_sum, phrase_inventory)

        if combination:
            for item in combination:
                print(f"- {item['text']:<25} (G: {item['gematria']})")
            print(f"  -> Erreichte Summe: {achieved_sum} / {target_sum}")
        else:
            print("- Keine Kombination gefunden.")


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="Tanakh Sequential Verse-Folding Analyzer.")
    parser.add_argument("query", type=str, help="Die Abfragephrase (z.B. 'יהוה').")
    parser.add_argument("--debug", action="store_true", help="Aktiviert detaillierte Debug-Ausgaben.")
    args = parser.parse_args()
    if args.debug:
        logging.getLogger().setLevel(logging.DEBUG)
    main(args)