Spaces:
Runtime error
Runtime error
import os | |
import subprocess | |
import gradio as gr | |
import ollama | |
# Ensure Ollama is installed | |
def install_ollama(): | |
try: | |
subprocess.run(["ollama", "--version"], check=True) | |
print("β Ollama is already installed.") | |
except FileNotFoundError: | |
print("π Installing Ollama...") | |
subprocess.run(["curl", "-fsSL", "https://ollama.com/install.sh", "|", "sh"], shell=True, check=True) | |
print("β Ollama installed successfully!") | |
# Start Ollama if it's not running | |
def start_ollama(): | |
try: | |
subprocess.run(["pgrep", "-f", "ollama"], check=True) | |
print("β Ollama is already running.") | |
except subprocess.CalledProcessError: | |
print("π Starting Ollama server...") | |
subprocess.Popen(["ollama", "serve"]) | |
print("β Ollama started.") | |
# Ensure model is downloaded to models/ folder | |
MODEL_NAME = "deepseek-llm-7b" | |
MODEL_PATH = f"models/{MODEL_NAME}" | |
def download_model(): | |
if not os.path.exists(MODEL_PATH): | |
print(f"π Downloading model: {MODEL_NAME} to {MODEL_PATH} ...") | |
os.makedirs("models", exist_ok=True) | |
subprocess.run(["ollama", "pull", f"deepseek/{MODEL_NAME}"], check=True) | |
print(f"β Model downloaded to {MODEL_PATH}.") | |
else: | |
print(f"β Model {MODEL_NAME} already exists.") | |
# Generate AI response using Ollama | |
def chat_response(user_input): | |
response = ollama.chat(model=MODEL_NAME, messages=[{"role": "user", "content": user_input}]) | |
return response['message']['content'] | |
# Run setup | |
install_ollama() | |
start_ollama() | |
download_model() | |
# Create Gradio Interface | |
iface = gr.Interface( | |
fn=chat_response, | |
inputs="text", | |
outputs="text", | |
title="DeepSeek ChatBot (Ollama)", | |
description="Chat with DeepSeek LLM 7B using Ollama." | |
) | |
# Launch Gradio App | |
if __name__ == "__main__": | |
iface.launch(server_name="0.0.0.0", server_port=7860) | |