Spaces:
Sleeping
Sleeping
File size: 1,939 Bytes
3e21b44 cc4b2a1 3e21b44 323be74 3e21b44 ad647f4 03e24bd ad647f4 cc4b2a1 aa838a3 cc4b2a1 ad647f4 3e21b44 85dc71c 3e21b44 323be74 69fc1f0 323be74 3e21b44 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
from fastapi import FastAPI, Request
from huggingface_hub import HfApi
from langchain_community.vectorstores.faiss import FAISS
from langchain_community.embeddings import OllamaEmbeddings
import os
app = FastAPI()
@app.get("/")
def greet_json():
with open("test.txt", "w") as f:
f.write("HELLO WORLD")
api = HfApi()
api.upload_file(
path_or_fileobj="test.txt",
path_in_repo="test.txt",
repo_id="jonathanjordan21/test-dataset",
repo_type="dataset",
token = os.getenv("HF_WRITE_TOKEN")
)
return {"Hello": "World!"}
@app.get("/{rand_int}/")
async def main(rand_int: str, request: Request):
with open("test.txt", "w") as f:
f.write(rand_int)
api = HfApi()
api.upload_file(
path_or_fileobj="test.txt",
path_in_repo="test.txt",
repo_id="jonathanjordan21/test-dataset",
repo_type="dataset",
token = os.getenv("HF_WRITE_TOKEN")
)
return {"raw_url": str(request.url), "message":rand_int}
@app.get("/vecs/vecs")
async def main2(request: Request):
emb = OllamaEmbeddings(model="intfloat/multilingual-e5-large-instruct",
base_url="https://lintasmediadanawa-hf-llm-api.hf.space",
embed_instruction="",
query_instruction="Instruct: Given a web search query, retrieve relevant passages that answer the query\nQuery: ",
headers={"Authorization": f"Bearer {os.getenv('HF_WRITE_TOKEN')}"},
)
vecs = FAISS.from_texts(["haha"], emb)
api = HfApi()
vecs.save_local("faiss_index")
api.upload_folder(
folder_path="faiss_index",
repo_id="jonathanjordan21/test-dataset",
path_in_repo="vecs/faiss_index",
repo_type="dataset",
token = os.getenv("HF_WRITE_TOKEN")
)
return {"raw_url": str(request.url)}
|