ZeeAI1 commited on
Commit
388e3b5
·
verified ·
1 Parent(s): 8568540

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -19
app.py CHANGED
@@ -1,16 +1,34 @@
1
  import sqlite3
2
  import json
3
- from transformers import AutoModelForCausalLM, AutoTokenizer
4
  import uuid
5
  import datetime
 
6
 
7
- # Initialize AI model (Mistral-7B placeholder; replace with actual model)
8
- model_name = "mistralai/Mixtral-8x7B-Instruct-v0.1"
 
 
9
  try:
10
- tokenizer = AutoTokenizer.from_pretrained(model_name)
11
- model = AutoModelForCausalLM.from_pretrained(model_name)
12
- except:
13
- # Fallback for demo (mock AI response)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  tokenizer = None
15
  model = None
16
 
@@ -79,24 +97,25 @@ def initialize_chart_of_accounts():
79
  VALUES (?, ?, ?, ?, ?, ?)
80
  """, accounts)
81
  conn.commit()
 
82
 
83
  # Parse prompt using AI model (or fallback)
84
  def parse_prompt(prompt):
85
  if model and tokenizer:
86
- input_text = f"""
87
- Parse the following accounting prompt into a JSON object with:
88
- - debit: {account, type, amount}
89
- - credit: {account, type, amount}
90
- - payment_method: 'cash' or 'credit' or null
91
- Prompt: {prompt}
92
- """
93
- inputs = tokenizer(input_text, return_tensors="pt")
94
- outputs = model.generate(**inputs, max_length=300)
95
- response = tokenizer.decode(outputs[0], skip_special_tokens=True)
96
  try:
 
 
 
 
 
 
 
 
 
 
97
  return json.loads(response)
98
- except:
99
- pass
100
 
101
  # Fallback parsing for common scenarios
102
  prompt_lower = prompt.lower()
@@ -110,6 +129,7 @@ def parse_prompt(prompt):
110
  pass
111
 
112
  if not amount:
 
113
  return None
114
 
115
  if "laptop" in prompt_lower:
@@ -130,6 +150,7 @@ def parse_prompt(prompt):
130
  "credit": {"account": credit_account, "type": credit_type, "amount": amount},
131
  "payment_method": payment_method
132
  }
 
133
  return None
134
 
135
  # Generate journal entry
@@ -184,6 +205,7 @@ def generate_journal_entry(prompt, follow_up_response=None):
184
  VALUES (?, ?, ?, ?, ?, ?)
185
  """, (entry_id, date, debit_result[0], credit_result[0], amount, prompt))
186
  conn.commit()
 
187
 
188
  return f"Journal Entry Created: Debit {debit_account} ${amount}, Credit {credit_account} ${amount}"
189
 
 
1
  import sqlite3
2
  import json
 
3
  import uuid
4
  import datetime
5
+ import logging
6
 
7
+ # Setup logging
8
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
9
+
10
+ # Attempt to import transformers
11
  try:
12
+ from transformers import AutoModelForCausalLM, AutoTokenizer
13
+ TRANSFORMERS_AVAILABLE = True
14
+ except ImportError:
15
+ logging.warning("Transformers library not found. Using fallback parser.")
16
+ TRANSFORMERS_AVAILABLE = False
17
+ AutoModelForCausalLM = None
18
+ AutoTokenizer = None
19
+
20
+ # Initialize AI model (distilbert as placeholder; replace with fine-tuned model or Mistral-7B)
21
+ model_name = "distilbert-base-uncased" # Lightweight model for demo
22
+ if TRANSFORMERS_AVAILABLE:
23
+ try:
24
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
25
+ model = AutoModelForCausalLM.from_pretrained(model_name)
26
+ logging.info(f"Loaded model: {model_name}")
27
+ except Exception as e:
28
+ logging.error(f"Failed to load model {model_name}: {e}")
29
+ tokenizer = None
30
+ model = None
31
+ else:
32
  tokenizer = None
33
  model = None
34
 
 
97
  VALUES (?, ?, ?, ?, ?, ?)
98
  """, accounts)
99
  conn.commit()
100
+ logging.info("Chart of accounts initialized.")
101
 
102
  # Parse prompt using AI model (or fallback)
103
  def parse_prompt(prompt):
104
  if model and tokenizer:
 
 
 
 
 
 
 
 
 
 
105
  try:
106
+ input_text = f"""
107
+ Parse the following accounting prompt into a JSON object with:
108
+ - debit: {{account, type, amount}}
109
+ - credit: {{account, type, amount}}
110
+ - payment_method: 'cash' or 'credit' or null
111
+ Prompt: {prompt}
112
+ """
113
+ inputs = tokenizer(input_text, return_tensors="pt")
114
+ outputs = model.generate(**inputs, max_length=300)
115
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
116
  return json.loads(response)
117
+ except Exception as e:
118
+ logging.warning(f"Model parsing failed: {e}. Using fallback parser.")
119
 
120
  # Fallback parsing for common scenarios
121
  prompt_lower = prompt.lower()
 
129
  pass
130
 
131
  if not amount:
132
+ logging.error("No amount found in prompt.")
133
  return None
134
 
135
  if "laptop" in prompt_lower:
 
150
  "credit": {"account": credit_account, "type": credit_type, "amount": amount},
151
  "payment_method": payment_method
152
  }
153
+ logging.error("Prompt not recognized.")
154
  return None
155
 
156
  # Generate journal entry
 
205
  VALUES (?, ?, ?, ?, ?, ?)
206
  """, (entry_id, date, debit_result[0], credit_result[0], amount, prompt))
207
  conn.commit()
208
+ logging.info(f"Journal entry created: Debit {debit_account} ${amount}, Credit {credit_account} ${amount}")
209
 
210
  return f"Journal Entry Created: Debit {debit_account} ${amount}, Credit {credit_account} ${amount}"
211