Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -53,17 +53,6 @@ def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=256
|
|
53 |
#formatted_prompt = format_prompt_grammar(f"Corrected Sentence: {prompt}", history)
|
54 |
print("\nPROMPT: \n\t" + formatted_prompt)
|
55 |
|
56 |
-
stream1 = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
57 |
-
stream2 = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=False, return_full_text=False)
|
58 |
-
#print(f">> STREAM1 - '{stream1}'")
|
59 |
-
#print(f">> STREAM2 - '{stream2}'")
|
60 |
-
a, b = ">> STREAM1:", ">> STREAM2:"
|
61 |
-
|
62 |
-
for i in stream1: a += f"\n - '{str(i)}'"
|
63 |
-
for i in stream2: b += f"\n - '{str(i)}'"
|
64 |
-
print(a + "\n")
|
65 |
-
print(b + "\n")
|
66 |
-
|
67 |
# Generate text from the HF inference
|
68 |
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
69 |
output = ""
|
|
|
53 |
#formatted_prompt = format_prompt_grammar(f"Corrected Sentence: {prompt}", history)
|
54 |
print("\nPROMPT: \n\t" + formatted_prompt)
|
55 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
# Generate text from the HF inference
|
57 |
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
58 |
output = ""
|