muryshev's picture
update
fd3c8b9
raw
history blame
4.83 kB
import logging
from typing import Annotated
from fastapi import APIRouter, Response, UploadFile, Depends
from fastapi.responses import FileResponse
from common import auth
from schemas.llm_prompt import LlmPromptCreateSchema, LlmPromptSchema
from components.services.llm_prompt import LlmPromptService
import common.dependencies as DI
router = APIRouter(prefix='/llm_prompt', tags=['Prompt settings'])
logger = logging.getLogger(__name__)
@router.get('/')
async def get_llm_prompt_list(llm_prompt_service: Annotated[LlmPromptService, Depends(DI.get_llm_prompt_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]
) -> list[LlmPromptSchema]:
logger.info("Handling GET request to /llm_prompt/{prompt_id}}")
try:
prompt = llm_prompt_service.get_list()
return prompt
except Exception as e:
logger.error(f"Error retrieving llm prompt: {str(e)}")
raise e
@router.get('/default')
async def get_llm_default_prompt(llm_prompt_service: Annotated[LlmPromptService, Depends(DI.get_llm_prompt_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]
) -> LlmPromptSchema:
logger.info("Handling GET request to /llm_prompt/default/")
try:
prompt = llm_prompt_service.get_default()
logger.info(
f"Successfully retrieved default llm prompt with ID {prompt.id}"
)
return prompt
except Exception as e:
logger.error(f"Error retrieving default llm prompt: {str(e)}")
raise e
@router.get('/{prompt_id}')
async def get_llm_prompt(prompt_id: int,
llm_prompt_service: Annotated[LlmPromptService, Depends(DI.get_llm_prompt_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]
) -> LlmPromptSchema:
logger.info("Handling GET request to /llm_prompt/{prompt_id}}")
try:
prompt = llm_prompt_service.get_by_id(prompt_id)
logger.info(
f"Successfully retrieved llm prompt with ID: {prompt_id}"
)
return prompt
except Exception as e:
logger.error(f"Error retrieving llm prompt: {str(e)}")
raise e
@router.put('/default/{prompt_id}')
async def set_as_default_prompt(prompt_id: int,
llm_prompt_service: Annotated[LlmPromptService, Depends(DI.get_llm_prompt_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]):
logger.info("Handling PUT request to /llm_prompt/default/{prompt_id}")
try:
llm_prompt_service.set_as_default(prompt_id)
logger.info(
f"Successfully setted default llm prompt with ID: {prompt_id}"
)
return Response(status_code=200)
except Exception as e:
logger.error(f"Error setting the default llm prompt: {str(e)}")
raise e
@router.delete('/{prompt_id}')
async def delete_prompt(prompt_id: int,
llm_prompt_service: Annotated[LlmPromptService, Depends(DI.get_llm_prompt_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]):
logger.info("Handling DELETE request to /llm_prompt/{prompt_id}")
try:
llm_prompt_service.delete(prompt_id)
logger.info(
f"Successfully deleted llm prompt: {prompt_id}"
)
return Response(status_code=200)
except Exception as e:
logger.error(f"Error deleting llm prompt: {str(e)}")
raise e
@router.post('/')
async def create_prompt(data: LlmPromptCreateSchema,
llm_prompt_service: Annotated[LlmPromptService, Depends(DI.get_llm_prompt_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]):
logger.info("Handling POST request to /llm_prompt")
try:
new_prompt = llm_prompt_service.create(data)
logger.info(
f"Successfully created llm prompt with ID: {new_prompt.id}"
)
return new_prompt
except Exception as e:
logger.error(f"Error creating llm prompt: {str(e)}")
raise e
@router.put('/{prompt_id}')
async def update_prompt(prompt_id: int, file: LlmPromptSchema,
llm_prompt_service: Annotated[LlmPromptService, Depends(DI.get_llm_prompt_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]):
logger.info("Handling PUT request to /llm_prompt/{prompt_id}")
try:
updated_prompt = llm_prompt_service.update(prompt_id, file)
logger.info(
f"Successfully updated llm prompt with ID: {prompt_id}"
)
return updated_prompt
except Exception as e:
logger.error(f"Error updating llm prompt: {str(e)}")
raise e