File size: 1,192 Bytes
1d28e28 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
import gradio as gr
from ollama import chat, ChatResponse
import subprocess
def interact(message: str, history: list):
message_dct = {
"role": "user",
"content": message
}
chat_history = [msg for msg in history]
chat_history.append(message_dct)
response: ChatResponse = chat(
model="deepseek-r1:1.5b",
messages=chat_history,
stream=True
)
text_response = ""
thinking_response = gr.ChatMessage(content="", metadata={"title":"Thinking Cloud"})
thinking = False
for chunk in response:
bit = chunk["message"]["content"]
if(bit == "<think>"):
thinking = True
continue
elif(bit == "</think>"):
thinking = False
continue
if(thinking):
thinking_response.content += bit
else:
text_response += bit
final_response = [thinking_response, text_response]
yield final_response
interface = gr.ChatInterface(
fn=interact,
type="messages",
title="Deepseek-R1 Chat Interface"
)
if __name__ == "__main__":
subprocess.run(["ollama", "run", "deepseek-r1:1.5b"])
interface.launch() |