Spaces:
Running
Running
File size: 931 Bytes
49cde8e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 |
from typing import Optional
import weave
from PIL import Image
from ..retrieval import SimilarityMetric
from .llm_client import LLMClient
class MedQAAssistant(weave.Model):
llm_client: LLMClient
retriever: weave.Model
top_k_chunks: int = 2
retrieval_similarity_metric: SimilarityMetric = SimilarityMetric.COSINE
@weave.op()
def predict(self, query: str, image: Optional[Image.Image] = None) -> str:
_image = image
retrieved_chunks = self.retriever.predict(
query, top_k=self.top_k_chunks, metric=self.retrieval_similarity_metric
)
retrieved_chunks = [chunk["text"] for chunk in retrieved_chunks]
system_prompt = """
You are a medical expert. You are given a query and a list of chunks from a medical document.
"""
return self.llm_client.predict(
system_prompt=system_prompt, user_prompt=retrieved_chunks
)
|