Update app.py
Browse files
app.py
CHANGED
|
@@ -29,7 +29,28 @@ import io
|
|
| 29 |
tess.pytesseract.tesseract_cmd = r"/app/tesseract.exe"
|
| 30 |
|
| 31 |
# Use a pipeline as a high-level helper
|
| 32 |
-
pipe = pipeline("text-generation", model="eachadea/vicuna-7b-1.1")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
|
| 34 |
def get_embedding(text):
|
| 35 |
return model1.encode(text, convert_to_tensor=True)
|
|
|
|
| 29 |
tess.pytesseract.tesseract_cmd = r"/app/tesseract.exe"
|
| 30 |
|
| 31 |
# Use a pipeline as a high-level helper
|
| 32 |
+
# pipe = pipeline("text-generation", model="eachadea/vicuna-7b-1.1")
|
| 33 |
+
|
| 34 |
+
# Initialize the pipeline with the Hugging Face API
|
| 35 |
+
# pipe = pipeline("text-generation", model="eachadea/vicuna-7b-1.1", api_key="your_api_key")
|
| 36 |
+
import requests
|
| 37 |
+
|
| 38 |
+
API_URL = "https://api-inference.huggingface.co/models/openai-community/gpt2"
|
| 39 |
+
headers = {"Authorization": "hf_TsCTtXxnvpmhFKABqKmcVLyLEhjQPsITSVx"}
|
| 40 |
+
|
| 41 |
+
def query(payload):
|
| 42 |
+
response = requests.post(API_URL, headers=headers, json=payload)
|
| 43 |
+
return response.json()
|
| 44 |
+
|
| 45 |
+
# output = query({
|
| 46 |
+
# "inputs": "Can you please let us know more details about your ",
|
| 47 |
+
# })
|
| 48 |
+
|
| 49 |
+
def generate_response(prompt):
|
| 50 |
+
# Generate response from the API
|
| 51 |
+
response = query({"inputs":prompt})
|
| 52 |
+
return response[0]['generated_text']
|
| 53 |
+
|
| 54 |
|
| 55 |
def get_embedding(text):
|
| 56 |
return model1.encode(text, convert_to_tensor=True)
|