File size: 4,136 Bytes
57cf043
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
import logging

from fastapi import APIRouter, Response, UploadFile, Depends
from fastapi.responses import FileResponse

from schemas.llm_prompt import LlmPromptCreateSchema, LlmPromptSchema
from components.services.llm_prompt import LlmPromptService
import common.dependencies as DI

router = APIRouter(prefix='/llm_prompt')
logger = logging.getLogger(__name__)


@router.get('/')
async def get_llm_prompt_list(llm_prompt_service: LlmPromptService = Depends(DI.get_llm_prompt_service)) -> list[LlmPromptSchema]:
    logger.info("Handling GET request to /llm_prompt/{prompt_id}}")
    try:
        prompt = llm_prompt_service.get_list()
        return prompt
    except Exception as e:
        logger.error(f"Error retrieving llm prompt: {str(e)}")
        raise e
    
@router.get('/default')
async def get_llm_default_prompt(llm_prompt_service: 
                        LlmPromptService = Depends(DI.get_llm_prompt_service)
                        ) -> LlmPromptSchema:
    logger.info("Handling GET request to /llm_prompt/default/")
    try:
        prompt = llm_prompt_service.get_default()
        logger.info(
            f"Successfully retrieved default llm prompt with ID {prompt.id}"
        )
        return prompt
    except Exception as e:
        logger.error(f"Error retrieving default llm prompt: {str(e)}")
        raise e


@router.get('/{prompt_id}')
async def get_llm_prompt(prompt_id: int, 
                        llm_prompt_service: LlmPromptService = Depends(DI.get_llm_prompt_service)
                        ) -> LlmPromptSchema:
    logger.info("Handling GET request to /llm_prompt/{prompt_id}}")
    try:
        prompt = llm_prompt_service.get_by_id(prompt_id)
        logger.info(
            f"Successfully retrieved llm prompt with ID: {prompt_id}"
        )
        return prompt
    except Exception as e:
        logger.error(f"Error retrieving llm prompt: {str(e)}")
        raise e
    

@router.put('/default/{prompt_id}')
async def set_as_default_prompt(prompt_id: int, 
                        llm_prompt_service: LlmPromptService = Depends(DI.get_llm_prompt_service)):
    logger.info("Handling PUT request to /llm_prompt/default/{prompt_id}")
    try:
        llm_prompt_service.set_as_default(prompt_id)
        logger.info(
            f"Successfully setted default llm prompt with ID: {prompt_id}"
        )
        return Response(status_code=200)
    except Exception as e:
        logger.error(f"Error setting the default llm prompt: {str(e)}")
        raise e


@router.delete('/{prompt_id}')
async def delete_prompt(prompt_id: int, 
                        llm_prompt_service: LlmPromptService = Depends(DI.get_llm_prompt_service)):
    logger.info("Handling DELETE request to /llm_prompt/{prompt_id}")
    try:
        llm_prompt_service.delete(prompt_id)
        logger.info(
            f"Successfully deleted llm prompt: {prompt_id}"
        )
        return Response(status_code=200)
    except Exception as e:
        logger.error(f"Error deleting llm prompt: {str(e)}")
        raise e


@router.post('/')
async def create_prompt(data: LlmPromptCreateSchema, 
                       llm_prompt_service: LlmPromptService = Depends(DI.get_llm_prompt_service)):
    logger.info("Handling POST request to /llm_prompt")
    try:
        new_prompt = llm_prompt_service.create(data)
        logger.info(
            f"Successfully created llm prompt with ID: {new_prompt.id}"
        )
        return new_prompt
    except Exception as e:
        logger.error(f"Error creating llm prompt: {str(e)}")
        raise e


@router.put('/{prompt_id}')
async def update_prompt(prompt_id: int, file: LlmPromptSchema, 
                       llm_prompt_service: LlmPromptService = Depends(DI.get_llm_prompt_service)):
    logger.info("Handling PUT request to /llm_prompt/{prompt_id}")
    try:
        updated_prompt = llm_prompt_service.update(prompt_id, file)
        logger.info(
            f"Successfully updated llm prompt with ID: {prompt_id}"
        )
        return updated_prompt
    except Exception as e:
        logger.error(f"Error updating llm prompt: {str(e)}")
        raise e