chat-bot-llava7b / helpers /llm_helper.py
prathammk's picture
Upload 9 files
4f7b4af verified
raw
history blame
607 Bytes
from ollama import generate
from config import Config
from helpers.image_helper import get_image_bytes
system_prompt = Config.SYSTEM_PROMPT
def analyze_image_file(image_file, model, user_prompt):
# gets image bytes using helper function
image_bytes = get_image_bytes(image_file)
# calls the llava model using Ollama SDK
stream = generate(model=model,
prompt=user_prompt,
images=[image_bytes],
stream=True)
return stream
# handles stream response back from LLM
def stream_parser(stream):
for chunk in stream:
yield chunk['response']