CogwiseAI commited on
Commit
8ade6e8
·
1 Parent(s): fa20b68

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -7
app.py CHANGED
@@ -73,11 +73,10 @@ st.markdown("""
73
  </style>
74
  """, unsafe_allow_html=True)
75
 
76
- # Load the model outside the handle_input() function
77
- with open('model_saved.pkl', 'rb') as f:
78
- model = pickle.load(f)
79
- if not isinstance(model, str):
80
- st.error("The loaded model is not valid.")
81
 
82
  def write_top_bar():
83
  col1, col2, col3 = st.columns([1,10,2])
@@ -110,8 +109,10 @@ def handle_input():
110
  if len(chat_history) == MAX_HISTORY_LENGTH:
111
  chat_history = chat_history[:-1]
112
 
113
- prompt = input
114
- answer = model # Replace the predict() method with the model itself
 
 
115
 
116
  chat_history.append((input, answer))
117
 
 
73
  </style>
74
  """, unsafe_allow_html=True)
75
 
76
+ # Load the model and tokenizer from Hugging Face Hub
77
+ model_name = "your_model_name" # Replace with the actual model name
78
+ model = AutoModelForCausalLM.from_pretrained(model_name)
79
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
 
80
 
81
  def write_top_bar():
82
  col1, col2, col3 = st.columns([1,10,2])
 
109
  if len(chat_history) == MAX_HISTORY_LENGTH:
110
  chat_history = chat_history[:-1]
111
 
112
+ # Generate response using the model
113
+ inputs = tokenizer.encode(input, return_tensors="pt")
114
+ outputs = model.generate(inputs)
115
+ answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
116
 
117
  chat_history.append((input, answer))
118