Spaces:
Sleeping
Sleeping
File size: 6,691 Bytes
57cf043 86c402d 57cf043 fd3c8b9 86c402d 57cf043 fd3c8b9 57cf043 fd3c8b9 57cf043 fd3c8b9 57cf043 86c402d 57cf043 fd3c8b9 57cf043 fd3c8b9 57cf043 fd3c8b9 57cf043 fd3c8b9 57cf043 fd3c8b9 57cf043 fd3c8b9 57cf043 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 |
import logging
from typing import Annotated
from fastapi import (APIRouter, BackgroundTasks, Depends, HTTPException,
Response, UploadFile)
from common import auth
import common.dependencies as DI
from components.services.dataset import DatasetService
from schemas.dataset import (Dataset, DatasetExpanded, DatasetProcessing,
SortQuery, SortQueryList)
router = APIRouter(prefix='/datasets', tags=['Datasets'])
logger = logging.getLogger(__name__)
@router.get('/')
async def get_datasets(dataset_service: Annotated[DatasetService, Depends(DI.get_dataset_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]
) -> list[Dataset]:
logger.info("Handling GET request to /datasets")
try:
result = dataset_service.get_datasets()
logger.info(f"Successfully retrieved {len(result)} datasets")
return result
except Exception as e:
logger.error(f"Error retrieving datasets: {str(e)}")
raise
@router.get('/processing')
async def get_processing(dataset_service: Annotated[DatasetService, Depends(DI.get_dataset_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]
) -> DatasetProcessing:
logger.info("Handling GET request to /datasets/processing")
try:
result = dataset_service.get_processing()
logger.info(f"Successfully retrieved processing status: {result.status}")
return result
except Exception as e:
logger.error(f"Error retrieving processing status: {str(e)}")
raise
def try_create_default_dataset(dataset_service: DatasetService):
"""
Создаёт датасет по умолчанию, если такого нет.
"""
if not dataset_service.get_default_dataset():
print('creating default dataset')
if dataset_service.config.db_config.files.empty_start:
dataset_service.create_empty_dataset(is_default=True)
else:
dataset_service.create_dataset_from_directory(
is_default=True,
directory_with_documents=dataset_service.config.db_config.files.xmls_path_default,
directory_with_ready_dataset=dataset_service.config.db_config.files.start_path,
)
@router.get('/try_init_default_dataset')
async def try_init_default_dataset(dataset_service: Annotated[DatasetService, Depends(DI.get_dataset_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]):
logger.info(f"Handling GET request try_init_default_dataset")
try_create_default_dataset(dataset_service)
try:
return {"ok": True}
except Exception as e:
logger.error(f"Error creating default dataset: {str(e)}")
raise
@router.get('/{dataset_id}')
async def get_dataset(
dataset_id: int,
dataset_service: Annotated[DatasetService, Depends(DI.get_dataset_service)],
current_user: Annotated[any, Depends(auth.get_current_user)],
page: int = 1,
page_size: int = 20,
search: str = '',
sort: str = ''
) -> DatasetExpanded:
logger.info(f"Handling GET request to /datasets/{dataset_id} (page={page}, size={page_size}, search='{search}')")
if sort:
try:
sorts = []
for one in sort.split(','):
field, direction = one.split(':')
sorts.append(SortQuery(field=field, direction=direction))
sort = SortQueryList(sorts=sorts)
except ValueError:
raise HTTPException(
status_code=400,
detail="Invalid sort format. Expected format: 'field:direction,field:direction'",
)
try:
result = dataset_service.get_dataset(
dataset_id,
page=page,
page_size=page_size,
search=search,
sort=sort,
)
logger.info(f"Successfully retrieved dataset {dataset_id}")
return result
except Exception as e:
logger.error(f"Error retrieving dataset {dataset_id}: {str(e)}")
raise e
@router.post('/{parent_id}/edit')
async def create_draft(parent_id: int, dataset_service: Annotated[DatasetService, Depends(DI.get_dataset_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]
) -> Dataset:
logger.info(f"Handling POST request to /datasets/{parent_id}/edit")
try:
result = dataset_service.create_draft(parent_id)
logger.info(f"Successfully created draft from dataset {parent_id}")
return result
except Exception as e:
logger.error(f"Error creating draft from dataset {parent_id}: {str(e)}")
raise e
@router.post('/{dataset_id}')
async def make_active(dataset_id: int, dataset_service: Annotated[DatasetService, Depends(DI.get_dataset_service)],
background_tasks: BackgroundTasks,
current_user: Annotated[any, Depends(auth.get_current_user)]
) -> DatasetExpanded:
logger.info(f"Handling POST request to /datasets/{dataset_id}/activate")
try:
result = dataset_service.activate_dataset(dataset_id, background_tasks)
logger.info(f"Successfully activated dataset {dataset_id}")
return result
except Exception as e:
logger.error(f"Error activating dataset {dataset_id}: {str(e)}")
raise e
@router.delete('/{dataset_id}')
async def delete_dataset(dataset_id: int,
dataset_service: Annotated[DatasetService, Depends(DI.get_dataset_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]
) -> None:
logger.info(f"Handling DELETE request to /datasets/{dataset_id}")
try:
dataset_service.delete_dataset(dataset_id)
logger.info(f"Successfully deleted dataset {dataset_id}")
return Response(status_code=200)
except Exception as e:
logger.error(f"Error deleting dataset {dataset_id}: {str(e)}")
raise e
@router.post('/')
async def upload_zip(file: UploadFile,
dataset_service: Annotated[DatasetService, Depends(DI.get_dataset_service)],
current_user: Annotated[any, Depends(auth.get_current_user)]
) -> DatasetExpanded:
logger.info(f"Handling POST request to /datasets/upload with file {file.filename}")
try:
result = dataset_service.upload_zip(file)
logger.info("Successfully uploaded and processed dataset")
return result
except Exception as e:
logger.error(f"Error uploading dataset: {str(e)}")
raise e
|