from fastapi import FastAPI, Request from huggingface_hub import HfApi from langchain_community.vectorstores.faiss import FAISS from langchain_community.embeddings import OllamaEmbeddings import os app = FastAPI() @app.get("/") def greet_json(): with open("test.txt", "w") as f: f.write("HELLO WORLD") api = HfApi() api.upload_file( path_or_fileobj="test.txt", path_in_repo="test.txt", repo_id="jonathanjordan21/test-dataset", repo_type="dataset", token = os.getenv("HF_WRITE_TOKEN") ) return {"Hello": "World!"} @app.get("/{rand_int}/") async def main(rand_int: str, request: Request): with open("test.txt", "w") as f: f.write(rand_int) api = HfApi() api.upload_file( path_or_fileobj="test.txt", path_in_repo="test.txt", repo_id="jonathanjordan21/test-dataset", repo_type="dataset", token = os.getenv("HF_WRITE_TOKEN") ) return {"raw_url": str(request.url), "message":rand_int} @app.get("/vecs/vecs") async def main2(request: Request): emb = OllamaEmbeddings(model="intfloat/multilingual-e5-large-instruct", base_url="https://lintasmediadanawa-hf-llm-api.hf.space", embed_instruction="", query_instruction="Instruct: Given a web search query, retrieve relevant passages that answer the query\nQuery: ", headers={"Authorization": f"Bearer {os.getenv('HF_WRITE_TOKEN')}"}, ) vecs = FAISS.from_texts(["haha"], emb) api = HfApi() vecs.save_local("faiss_index") api.upload_folder( folder_path="faiss_index", repo_id="jonathanjordan21/test-dataset", path_in_repo="vecs/faiss_index", repo_type="dataset", token = os.getenv("HF_WRITE_TOKEN") ) return {"raw_url": str(request.url)}