neuralworm commited on
Commit
71fa3d9
·
1 Parent(s): fbdb959

better query calc

Browse files
Files changed (1) hide show
  1. analyze_verses_universal.py +80 -52
analyze_verses_universal.py CHANGED
@@ -9,68 +9,93 @@ from deep_translator import GoogleTranslator
9
  from gematria import calculate_gematria
10
  from collections import defaultdict
11
 
12
- # --- Konfiguration ---
13
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
14
  BOOK_RANGE = range(1, 40)
15
  CACHE_FILE = "tanakh_phrasedict.cache"
16
 
17
- # --- Kernfunktionen ---
 
18
  def get_power_result(total_sum, query_value):
19
- """Berechnet das Potenz-Ergebnis basierend auf dem höchsten möglichen Exponenten."""
 
 
 
 
20
  if total_sum <= 0 or query_value <= 1:
21
  return 1
22
- elif query_value > total_sum:
23
- return math.ceil(math.sqrt(query_value))
24
 
25
- try:
26
- exponent = int(math.floor(math.log(total_sum, query_value)))
27
- return query_value ** exponent
28
- except (ValueError, OverflowError):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  return 1
30
 
31
  def load_phrase_dictionary():
32
  if not os.path.exists(CACHE_FILE):
33
- sys.exit(f"FEHLER: Cache-Datei '{CACHE_FILE}' nicht gefunden. Bitte 'build_indices.py' ausführen.")
34
- logging.info(f"Lade Phrasen-Wörterbuch aus Cache: {CACHE_FILE}")
35
  try:
36
  with open(CACHE_FILE, 'rb') as f:
37
  return pickle.load(f)
38
  except Exception as e:
39
- sys.exit(f"FEHLER: Cache-Datei ist korrupt. Bitte löschen und 'build_indices.py' erneut ausführen. Fehler: {e}")
40
 
41
  def find_all_matching_phrases(target_sum, phrase_dictionary):
42
- return phrase_dictionary.get(target_sum, [])
43
 
44
- # --- Hauptprogramm ---
45
  def main(args):
46
  phrase_dictionary = load_phrase_dictionary()
47
  query_value = calculate_gematria(args.query)
48
  if query_value <= 1:
49
- sys.exit(f"Anfrage '{args.query}' hat einen ungültigen Gematria-Wert ({query_value}).")
50
 
51
  translator = None
52
  if args.translate:
53
  try:
 
54
  translator = GoogleTranslator(source='iw', target='en')
55
  except Exception as e:
56
- logging.error(f"Konnte Übersetzer nicht initialisieren: {e}")
57
 
58
- logging.info(f"Starte Orakel-Analyse für '{args.query}' (G:{query_value}) mit Bitplane-Variationstiefe {args.xor_depth}...")
59
- print("\n" + "="*20 + f" ORAKEL-ANTWORTEN FÜR '{args.query}' " + "="*20)
60
 
61
  verses_processed = 0
62
  resonance_count = 0
63
 
64
  for book_num in BOOK_RANGE:
65
  if args.process_verses and verses_processed >= args.process_verses: break
66
-
67
  filepath = f"texts/torah/{book_num:02}.json"
68
  try:
69
  with open(filepath, 'r', encoding='utf-8') as file:
70
  data = json.load(file)
71
  for chap_idx, chapter in enumerate(data.get("text", []), start=1):
72
  if args.process_verses and verses_processed >= args.process_verses: break
73
-
74
  for verse_idx, verse_text in enumerate(chapter, start=1):
75
  if args.process_verses and verses_processed >= args.process_verses: break
76
  verses_processed += 1
@@ -79,69 +104,72 @@ def main(args):
79
  if verse_sum <= 1: continue
80
 
81
  power_result = get_power_result(verse_sum, query_value)
82
-
83
- # Zuerst das Haupt-Ergebnis berechnen
84
  main_target_sum = verse_sum ^ power_result
85
  main_matches = find_all_matching_phrases(main_target_sum, phrase_dictionary)
86
 
87
- # Nur fortfahren, wenn die Haupt-Resonanz existiert
88
  if not main_matches:
89
  continue
90
 
91
  resonance_count += 1
92
- verse_ref = f"B{book_num:02d}, K{chap_idx}, V{verse_idx}"
93
- print(f"\n--- Resonanz #{resonance_count} in [{verse_ref}] (G_sum:{verse_sum}) ---")
94
- print(f"Originalvers: {verse_text.strip()}")
95
-
96
  def print_matches(matches, title, calculation_str):
97
  if not matches: return
98
-
99
  matches.sort(key=lambda p: (p.get('freq', 0) / p.get('words', 99)), reverse=True)
100
  matches_to_show = matches[:args.results_per_verse]
101
 
102
  print(f" ↳ {title}: {calculation_str}")
103
-
104
  for match in matches_to_show:
105
  translation_str = ""
106
  if translator:
107
- try: translation_str = translator.translate(match['text'])
108
- except Exception: translation_str = "[Übersetzung fehlgeschlagen]"
 
 
109
 
110
  score = (match.get('freq', 0) / match.get('words', 99))
111
- info = f"(Wörter: {match.get('words', 'N/A')}, Freq: {match.get('freq', 'N/A')}, Score: {score:.2f})"
112
  print(f" - {match['text']} {info}")
113
  if translation_str:
114
  print(f" ↳ Interpretation: \"{translation_str}\"")
115
 
116
- # 1. Die Haupt-Resonanz anzeigen
117
- calc_str = f"[{verse_sum}] ^ [{power_result}] → [G_ziel:{main_target_sum}]"
118
- print_matches(main_matches, "Haupt-Resonanz", calc_str)
119
-
120
- # 2. Die Bitplane-Variationen des ERGEBNISSES anzeigen
121
  if args.xor_depth > 0:
122
- print(f" [INFO] Bitplane-Variationen des Ergebnisses ({main_target_sum}):")
123
  for depth in range(args.xor_depth):
124
  bit_flip = 1 << depth
125
-
126
- # Flippe das Bit 'd' im Hauptergebnis
127
  target_sum = main_target_sum ^ bit_flip
128
-
129
  bitplane_matches = find_all_matching_phrases(target_sum, phrase_dictionary)
130
-
131
  if bitplane_matches:
132
- bitplane_calc_str = f"[{main_target_sum}] ^ [Bit {depth}] [G_ziel:{target_sum}]"
133
- print_matches(bitplane_matches, f"Variation (Tiefe {depth})", bitplane_calc_str)
134
-
 
135
  except FileNotFoundError: continue
136
 
137
- logging.info(f"Analyse abgeschlossen. {resonance_count} Resonanz-Verse in {verses_processed} analysierten Versen gefunden.")
138
 
139
  if __name__ == "__main__":
140
- parser = argparse.ArgumentParser(description="Tanakh Universal Resonance Analyzer mit Bitplane-Variationen.")
141
- parser.add_argument("query", type=str, help="Die Abfragephrase (z.B. 'יהוה').")
142
- parser.add_argument("--translate", action="store_true", help="Aktiviert die automatische Übersetzung.")
143
- parser.add_argument("--process-verses", type=int, default=10, help="Maximale Anzahl der zu analysierenden Start-Verse.")
144
- parser.add_argument("--results-per-verse", type=int, default=3, help="Maximale Orakel-Antworten pro Resonanz-Typ (Standard: 3).")
145
- parser.add_argument("--xor-depth", type=int, default=16, help="Maximale Tiefe für Bitplane-Variationen des Ergebnisses (0-15) (Standard: 16).")
146
  args = parser.parse_args()
147
  main(args)
 
9
  from gematria import calculate_gematria
10
  from collections import defaultdict
11
 
12
+ # --- Configuration ---
13
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
14
  BOOK_RANGE = range(1, 40)
15
  CACHE_FILE = "tanakh_phrasedict.cache"
16
 
17
+ # --- Core Functions ---
18
+
19
  def get_power_result(total_sum, query_value):
20
+ """
21
+ Calculates the power or root result.
22
+ - If query_value <= total_sum, it finds the highest power of query_value that is <= total_sum.
23
+ - If query_value > total_sum, it finds the smallest n-th root of query_value whose result is <= total_sum.
24
+ """
25
  if total_sum <= 0 or query_value <= 1:
26
  return 1
 
 
27
 
28
+ # Normal case: "Power Mode"
29
+ if query_value <= total_sum:
30
+ try:
31
+ exponent = int(math.floor(math.log(total_sum, query_value)))
32
+ return query_value ** exponent
33
+ except (ValueError, OverflowError):
34
+ return 1
35
+
36
+ # Special case: "Root Mode"
37
+ else: # query_value > total_sum
38
+ # Find the smallest integer root 'n' (starting from 2)
39
+ # that reduces query_value to a number <= total_sum.
40
+ for n in range(2, 65): # Limit the search to a reasonable range (up to the 64th root)
41
+ try:
42
+ root_result = query_value ** (1.0 / n)
43
+ if root_result <= total_sum:
44
+ # We found the smallest root exponent n.
45
+ # Round up and return as an integer.
46
+ return math.ceil(root_result)
47
+ except (ValueError, OverflowError):
48
+ # Catches math errors with extreme numbers
49
+ return 1
50
+
51
+ # If even the 64th root is too large, which only happens with
52
+ # extreme number ratios, return a default value.
53
  return 1
54
 
55
  def load_phrase_dictionary():
56
  if not os.path.exists(CACHE_FILE):
57
+ sys.exit(f"ERROR: Cache file '{CACHE_FILE}' not found. Please run 'build_indices.py' first to create the index.")
58
+ logging.info(f"Loading phrase dictionary from cache: {CACHE_FILE}")
59
  try:
60
  with open(CACHE_FILE, 'rb') as f:
61
  return pickle.load(f)
62
  except Exception as e:
63
+ sys.exit(f"ERROR: Cache file '{CACHE_FILE}' is corrupt. Please delete it and run 'build_indices.py' again. Error: {e}")
64
 
65
  def find_all_matching_phrases(target_sum, phrase_dictionary):
66
+ return phrase_dictionary.get(int(target_sum), [])
67
 
68
+ # --- Main Program ---
69
  def main(args):
70
  phrase_dictionary = load_phrase_dictionary()
71
  query_value = calculate_gematria(args.query)
72
  if query_value <= 1:
73
+ sys.exit(f"ERROR: Query '{args.query}' has an invalid Gematria value ({query_value}).")
74
 
75
  translator = None
76
  if args.translate:
77
  try:
78
+ # The correct code for Hebrew is 'iw'
79
  translator = GoogleTranslator(source='iw', target='en')
80
  except Exception as e:
81
+ logging.error(f"Could not initialize translator: {e}")
82
 
83
+ logging.info(f"Starting oracle analysis for '{args.query}' (G:{query_value}) with bitplane variation depth {args.xor_depth}...")
84
+ print("\n" + "="*20 + f" ORACLE ANSWERS FOR '{args.query}' " + "="*20)
85
 
86
  verses_processed = 0
87
  resonance_count = 0
88
 
89
  for book_num in BOOK_RANGE:
90
  if args.process_verses and verses_processed >= args.process_verses: break
91
+
92
  filepath = f"texts/torah/{book_num:02}.json"
93
  try:
94
  with open(filepath, 'r', encoding='utf-8') as file:
95
  data = json.load(file)
96
  for chap_idx, chapter in enumerate(data.get("text", []), start=1):
97
  if args.process_verses and verses_processed >= args.process_verses: break
98
+
99
  for verse_idx, verse_text in enumerate(chapter, start=1):
100
  if args.process_verses and verses_processed >= args.process_verses: break
101
  verses_processed += 1
 
104
  if verse_sum <= 1: continue
105
 
106
  power_result = get_power_result(verse_sum, query_value)
107
+
108
+ # Calculate the main result first
109
  main_target_sum = verse_sum ^ power_result
110
  main_matches = find_all_matching_phrases(main_target_sum, phrase_dictionary)
111
 
112
+ # Only proceed if a main resonance exists
113
  if not main_matches:
114
  continue
115
 
116
  resonance_count += 1
117
+ verse_ref = f"B{book_num:02d}, C{chap_idx}, V{verse_idx}"
118
+ print(f"\n--- Resonance #{resonance_count} in [{verse_ref}] (G_sum:{verse_sum}) ---")
119
+ print(f"Original Verse: {verse_text.strip()}")
120
+
121
  def print_matches(matches, title, calculation_str):
122
  if not matches: return
123
+
124
  matches.sort(key=lambda p: (p.get('freq', 0) / p.get('words', 99)), reverse=True)
125
  matches_to_show = matches[:args.results_per_verse]
126
 
127
  print(f" ↳ {title}: {calculation_str}")
128
+
129
  for match in matches_to_show:
130
  translation_str = ""
131
  if translator:
132
+ try:
133
+ translation_str = translator.translate(match['text'])
134
+ except Exception:
135
+ translation_str = "[Translation failed]"
136
 
137
  score = (match.get('freq', 0) / match.get('words', 99))
138
+ info = f"(Words: {match.get('words', 'N/A')}, Freq: {match.get('freq', 'N/A')}, Score: {score:.2f})"
139
  print(f" - {match['text']} {info}")
140
  if translation_str:
141
  print(f" ↳ Interpretation: \"{translation_str}\"")
142
 
143
+ # 1. Display the main resonance
144
+ calc_str = f"[{verse_sum}] ^ [{power_result}] → [G_target:{main_target_sum}]"
145
+ print_matches(main_matches, "Main Resonance", calc_str)
146
+
147
+ # 2. Display the bitplane variations of the RESULT
148
  if args.xor_depth > 0:
149
+ print(f" [INFO] Bitplane Variations of the Result ({main_target_sum}):")
150
  for depth in range(args.xor_depth):
151
  bit_flip = 1 << depth
152
+
153
+ # Flip the 'd'-th bit in the main result
154
  target_sum = main_target_sum ^ bit_flip
155
+
156
  bitplane_matches = find_all_matching_phrases(target_sum, phrase_dictionary)
157
+
158
  if bitplane_matches:
159
+ # FIX: The label is now depth + 1 for human readability
160
+ bitplane_calc_str = f"[{main_target_sum}] ^ [Bit {depth+1}] → [G_target:{target_sum}]"
161
+ print_matches(bitplane_matches, f"Variation (Depth {depth + 1})", bitplane_calc_str)
162
+
163
  except FileNotFoundError: continue
164
 
165
+ logging.info(f"Analysis complete. Found {resonance_count} resonance groups in {verses_processed} analyzed verses.")
166
 
167
  if __name__ == "__main__":
168
+ parser = argparse.ArgumentParser(description="Tanakh Universal Resonance Analyzer with Bitplane Variations.")
169
+ parser.add_argument("query", type=str, help="The query phrase (e.g., 'יהוה').")
170
+ parser.add_argument("--translate", action="store_true", default=True, help="Enable automatic translation of results to English.")
171
+ parser.add_argument("--process-verses", type=int, default=10, help="Maximum number of starting verses to analyze.")
172
+ parser.add_argument("--results-per-verse", type=int, default=1, help="Maximum oracle answers to show per resonance type (default: 3).")
173
+ parser.add_argument("--xor-depth", type=int, default=2, help="Maximum depth for bitplane variations of the result (0-15) (default: 16).")
174
  args = parser.parse_args()
175
  main(args)