|
|
|
desc = """ |
|
# Book QA |
|
|
|
Questions answering with Hugging Face embeddings. Adapted from the [LlamaIndex |
|
example](https://github.com/jerryjliu/gpt_index/blob/main/examples/gatsby/TestGatsby.ipynb). |
|
""" |
|
|
|
|
|
import datasets |
|
import numpy as np |
|
|
|
from minichain import EmbeddingPrompt, TemplatePrompt, show_log, start_chain |
|
|
|
|
|
|
|
gatsby = datasets.load_from_disk("gatsby") |
|
gatsby.add_faiss_index("embeddings") |
|
|
|
|
|
|
|
class KNNPrompt(EmbeddingPrompt): |
|
def prompt(self, inp): |
|
return inp["query"] |
|
|
|
def find(self, out, inp): |
|
res = gatsby.get_nearest_examples("embeddings", np.array(out), 1) |
|
return {"question": inp["query"], "docs": res.examples["passages"]} |
|
|
|
|
|
|
|
|
|
class QAPrompt(TemplatePrompt): |
|
template_file = "gatsby.pmpt.tpl" |
|
|
|
|
|
with start_chain("gatsby") as backend: |
|
|
|
prompt = KNNPrompt( |
|
backend.HuggingFaceEmbed("sentence-transformers/all-mpnet-base-v2") |
|
).chain(QAPrompt(backend.OpenAI())) |
|
|
|
|
|
|
|
|
|
gradio = prompt.to_gradio(fields=["query"], |
|
examples=["What did Gatsby do before he met Daisy?"], |
|
keys={"HF_KEY"}, |
|
description=desc) |
|
if __name__ == "__main__": |
|
gradio.launch() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|