Spaces:
Running
Running
File size: 3,435 Bytes
aeb38f0 e63103b 9717e5b e63103b deb05dc e63103b 182e6fa aeb38f0 9717e5b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
import tempfile, os, requests
from rest_framework import viewsets, filters
from django_filters.rest_framework import DjangoFilterBackend
from endpoint_teste.models import EndpointTesteModel
from endpoint_teste.serializer import EndpointTesteSerializer, PDFUploadSerializer
from rest_framework.decorators import api_view
from rest_framework.response import Response
from langchain_backend.main import get_llm_answer
from .serializer import TesteSerializer
class EndpointTesteViewSet(viewsets.ModelViewSet):
"""Mostrará todas as tarefas"""
queryset = EndpointTesteModel.objects.order_by("id").all()
serializer_class = EndpointTesteSerializer
filter_backends = [DjangoFilterBackend, filters.SearchFilter]
search_fields = ["id"]
@api_view(["GET", "POST"])
def getTeste(request):
if request.method == "POST":
serializer = TesteSerializer(data=request.data)
if serializer.is_valid(raise_exception=True):
data = request.data
pdf_url = None
if data["pdf_url"]:
pdf_url = data["pdf_url"]
resposta_llm = get_llm_answer(data["system_prompt"], data["user_message"], pdf_url)
return Response({
"Resposta": resposta_llm
})
if request.method == "GET":
API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B"
headers = {"Authorization": "Bearer hf_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}
def query(payload):
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()
output = query({
"inputs": "Can you please let us know more details about your something I don't know",
})
print('output: ', output)
print('output: ', dir(output))
return Response(output)
@api_view(["POST"])
def getPDF(request):
if request.method == "POST":
serializer = PDFUploadSerializer(data=request.data)
if serializer.is_valid(raise_exception=True):
# Access the uploaded file
data = request.data
print('data: ', data)
pdf_file = serializer.validated_data['file']
pdf_file.seek(0)
# print(dir(pdf_file))
# print('pdf_file: ', pdf_file.read())
# pdf_content = pdf_file.read()
# Save the file or process it as needed
# For example, you can save it to a specific location
# with open(f'endpoint_teste/media/uploads/{pdf_file.name}', 'wb+') as destination:
# for chunk in pdf_file.chunks():
# destination.write(chunk)
# return Response({"message": "File uploaded successfully."})
# Create a temporary file to save the uploaded PDF
with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as temp_file:
# Write the uploaded file content to the temporary file
for chunk in pdf_file.chunks():
temp_file.write(chunk)
temp_file_path = temp_file.name # Get the path of the temporary file
print('temp_file_path: ', temp_file_path)
resposta_llm = get_llm_answer(data["system_prompt"], data["user_message"], temp_file_path)
os.remove(temp_file_path)
return Response({
"Resposta": resposta_llm
}) |