michailroussos commited on
Commit
b55dba2
·
1 Parent(s): c4001e3
Files changed (1) hide show
  1. app.py +10 -2
app.py CHANGED
@@ -48,12 +48,20 @@ def respond(message, max_new_tokens, temperature, system_message="You are a help
48
  temperature=temperature,
49
  use_cache=True,
50
  )
51
- print("[DEBUG] message:",messages)
52
-
 
 
53
  # Decode the generated tokens back to text
54
  generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
55
  print("[DEBUG] Generated Text:", generated_text)
56
 
 
 
 
 
 
 
57
  # Clean up the response by removing unwanted parts (e.g., system and user info)
58
  cleaned_response = "".join(generated_text.split("\n")[9:]) # Assuming the response ends at the last line
59
 
 
48
  temperature=temperature,
49
  use_cache=True,
50
  )
51
+ promt = messages[0]['content']
52
+ promt += "assistant"
53
+ print("[DEBUG] prompt with assistant:",promt)
54
+
55
  # Decode the generated tokens back to text
56
  generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
57
  print("[DEBUG] Generated Text:", generated_text)
58
 
59
+ start_pos = generated_text.find(promt)
60
+ result_text = generated_text[start_pos + len(promt):]
61
+ print("[DEBUG] Result Text:", result_text)
62
+
63
+ #print("[DEBUG] Generated Text:", generated_text)
64
+
65
  # Clean up the response by removing unwanted parts (e.g., system and user info)
66
  cleaned_response = "".join(generated_text.split("\n")[9:]) # Assuming the response ends at the last line
67