ColeGuion commited on
Commit
015c303
·
verified ·
1 Parent(s): 5e37f93

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -2
app.py CHANGED
@@ -55,8 +55,15 @@ def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=256
55
 
56
  stream1 = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
57
  stream2 = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=False, return_full_text=False)
58
- print(f">> STREAM1 - '{stream1}'")
59
- print(f">> STREAM2 - '{stream2}'")
 
 
 
 
 
 
 
60
  # Generate text from the HF inference
61
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
62
  output = ""
 
55
 
56
  stream1 = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
57
  stream2 = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=False, return_full_text=False)
58
+ #print(f">> STREAM1 - '{stream1}'")
59
+ #print(f">> STREAM2 - '{stream2}'")
60
+ a, b = ">> STREAM1:", ">> STREAM2:"
61
+
62
+ for i in stream1: a += f"\n - '{str(i)}'"
63
+ for i in stream2: b += f"\n - '{str(i)}'"
64
+ print(a + "\n")
65
+ print(b + "\n")
66
+
67
  # Generate text from the HF inference
68
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
69
  output = ""