muryshev's picture
init
57cf043
raw
history blame
4.24 kB
import logging
from fastapi import APIRouter, Response, UploadFile, Depends
from fastapi.responses import FileResponse
from schemas.llm_config import LLMConfig as LLMConfigScheme, LLMConfigCreateScheme
from components.dbo.models.llm_config import LLMConfig as LLMConfigSQL
from components.services.llm_config import LLMConfigService
import common.dependencies as DI
from schemas.llm_config import LLMConfig
router = APIRouter(prefix='/llm_config')
logger = logging.getLogger(__name__)
@router.get('/')
async def get_llm_config_list(llm_config_service: LLMConfigService = Depends(DI.get_llm_config_service)) -> list[LLMConfig]:
logger.info("Handling GET request to /llm_config/{config_id}}")
try:
config = llm_config_service.get_list()
return config
except Exception as e:
logger.error(f"Error retrieving llm config: {str(e)}")
raise e
@router.get('/default')
async def get_llm_default_config(llm_config_service:
LLMConfigService = Depends(DI.get_llm_config_service)
) -> LLMConfig:
logger.info("Handling GET request to /llm_config/default/")
try:
config = llm_config_service.get_default()
logger.info(
f"Successfully retrieved default llm config with ID {config.id}"
)
return config
except Exception as e:
logger.error(f"Error retrieving default llm config: {str(e)}")
raise e
@router.get('/{config_id}')
async def get_llm_config(config_id: int,
llm_config_service: LLMConfigService = Depends(DI.get_llm_config_service)
) -> LLMConfig:
logger.info("Handling GET request to /llm_config/{config_id}}")
try:
config = llm_config_service.get_by_id(config_id)
logger.info(
f"Successfully retrieved llm config with ID: {config_id}"
)
return config
except Exception as e:
logger.error(f"Error retrieving llm config: {str(e)}")
raise e
@router.put('/default/{config_id}')
async def set_as_default_config(config_id: int,
llm_config_service: LLMConfigService = Depends(DI.get_llm_config_service)):
logger.info("Handling PUT request to /llm_config/default/{config_id}")
try:
llm_config_service.set_as_default(config_id)
logger.info(
f"Successfully setted default llm config with ID: {config_id}"
)
return Response(status_code=200)
except Exception as e:
logger.error(f"Error setting the default llm config: {str(e)}")
raise e
@router.delete('/{config_id}')
async def delete_config(config_id: int,
llm_config_service: LLMConfigService = Depends(DI.get_llm_config_service)):
logger.info("Handling DELETE request to /llm_config/{config_id}")
try:
llm_config_service.delete(config_id)
logger.info(
f"Successfully deleted llm config: {config_id}"
)
return Response(status_code=200)
except Exception as e:
logger.error(f"Error deleting llm config: {str(e)}")
raise e
@router.post('/')
async def create_config(data: LLMConfigCreateScheme,
llm_config_service: LLMConfigService = Depends(DI.get_llm_config_service)):
logger.info("Handling POST request to /llm_config")
try:
new_config = llm_config_service.create(data)
logger.info(
f"Successfully created llm config with ID: {new_config.id}"
)
return new_config
except Exception as e:
logger.error(f"Error creating llm config: {str(e)}")
raise e
@router.put('/{config_id}')
async def update_config(config_id: int, file: LLMConfigScheme,
llm_config_service: LLMConfigService = Depends(DI.get_llm_config_service)):
logger.info("Handling PUT request to /llm_config/{config_id}")
try:
updated_config = llm_config_service.update(config_id, file)
logger.info(
f"Successfully updated llm config with ID: {config_id}"
)
return updated_config
except Exception as e:
logger.error(f"Error updating llm config: {str(e)}")
raise e