AdrienB134's picture
Update rag_demo/rag/reranker.py
75c733d verified
raw
history blame
760 Bytes
import os
from huggingface_hub import InferenceClient
from base.query import Query
from base.template_factory import RAGStep
from .preprocessing.embed import EmbeddedChunk
class Reranker(RAGStep):
def generate(
self, query: Query, chunks: list[EmbeddedChunk], keep_top_k: int
) -> list[EmbeddedChunk]:
api = InferenceClient(
model="intfloat/multilingual-e5-large-instruct",
token=os.getenv("HF_API_TOKEN"),
)
similarity = api.sentence_similarity(
query.content, [chunk.content for chunk in chunks]
)
for chunk, sim in zip(chunks, similarity):
chunk.similarity = sim
return sorted(chunks, key=lambda x: x.similarity, reverse=True)[:keep_top_k]