File size: 16,480 Bytes
f959360
c26b6eb
5dc86cf
 
c26b6eb
5dc86cf
 
 
 
f959360
5dc86cf
 
 
f959360
5dc86cf
 
f959360
5dc86cf
 
 
f959360
5dc86cf
c26b6eb
5dc86cf
 
 
c26b6eb
 
5dc86cf
 
 
 
c26b6eb
5dc86cf
 
 
 
 
 
 
 
 
 
 
f959360
5dc86cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c26b6eb
5dc86cf
c26b6eb
5dc86cf
 
 
 
 
 
 
 
 
 
 
 
 
 
c26b6eb
 
5dc86cf
 
 
 
 
 
 
c26b6eb
5dc86cf
 
 
c26b6eb
5dc86cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c26b6eb
 
5dc86cf
 
 
 
 
 
 
 
 
 
 
 
c26b6eb
5dc86cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c26b6eb
 
5dc86cf
 
 
 
 
 
 
 
 
 
 
 
c26b6eb
5dc86cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c26b6eb
 
5dc86cf
 
 
 
 
 
 
 
 
 
 
c26b6eb
5dc86cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c26b6eb
 
5dc86cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c26b6eb
5dc86cf
c26b6eb
5dc86cf
 
 
c26b6eb
5dc86cf
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
import duckdb
import os
from fastapi import FastAPI, HTTPException, Request, Path as FastPath
from fastapi.responses import FileResponse, StreamingResponse
from pydantic import BaseModel, Field
from typing import List, Dict, Any, Optional
import logging
import io
import asyncio

# --- Configuration ---
DATABASE_PATH = os.environ.get("DUCKDB_PATH", "data/mydatabase.db")
DATA_DIR = "data"

# Ensure data directory exists
os.makedirs(DATA_DIR, exist_ok=True)

# --- Logging ---
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# --- FastAPI App ---
app = FastAPI(
    title="DuckDB API",
    description="An API to interact with a DuckDB database.",
    version="0.1.0"
)

# --- Database Connection ---
# For simplicity in this example, we connect within each request.
# For production, consider dependency injection or connection pooling.
def get_db():
    try:
        # Check if the database file needs initialization
        initialize = not os.path.exists(DATABASE_PATH) or os.path.getsize(DATABASE_PATH) == 0
        conn = duckdb.connect(DATABASE_PATH, read_only=False)
        if initialize:
            logger.info(f"Database file not found or empty at {DATABASE_PATH}. Initializing.")
            # You could add initial schema setup here if needed
            # conn.execute("CREATE TABLE IF NOT EXISTS initial_table (id INTEGER, name VARCHAR);")
        yield conn
    except duckdb.Error as e:
        logger.error(f"Database connection error: {e}")
        raise HTTPException(status_code=500, detail=f"Database connection error: {e}")
    finally:
        if 'conn' in locals() and conn:
            conn.close()

# --- Pydantic Models ---
class ColumnDefinition(BaseModel):
    name: str
    type: str

class CreateTableRequest(BaseModel):
    columns: List[ColumnDefinition]

class CreateRowRequest(BaseModel):
    # List of rows, where each row is a dict of column_name: value
    rows: List[Dict[str, Any]]

class UpdateRowRequest(BaseModel):
    updates: Dict[str, Any] # Column value pairs to set
    condition: str         # SQL WHERE clause string to identify rows

class DeleteRowRequest(BaseModel):
    condition: str         # SQL WHERE clause string to identify rows

class ApiResponse(BaseModel):
    message: str
    details: Optional[Any] = None

# --- Helper Functions ---
def safe_identifier(name: str) -> str:
    """Quotes an identifier safely."""
    if not name.isidentifier():
        # Basic check, consider more robust validation/sanitization if needed
         # Use DuckDB's quoting
        try:
            conn = duckdb.connect(':memory:')
            quoted = conn.execute(f"SELECT '{name}'::IDENTIFIER").fetchone()[0]
            conn.close()
            return quoted
        except duckdb.Error:
             raise HTTPException(status_code=400, detail=f"Invalid identifier: {name}")
    # Also quote standard identifiers to be safe
    return f'"{name}"'

def generate_column_sql(columns: List[ColumnDefinition]) -> str:
    """Generates the column definition part of a CREATE TABLE statement."""
    defs = []
    for col in columns:
        col_name_safe = safe_identifier(col.name)
        # Basic type validation (can be expanded)
        allowed_types = ['INTEGER', 'VARCHAR', 'TEXT', 'BOOLEAN', 'FLOAT', 'DOUBLE', 'DATE', 'TIMESTAMP', 'BLOB', 'BIGINT', 'DECIMAL']
        type_upper = col.type.strip().upper()
        # Allow DECIMAL(p,s) syntax
        if not (type_upper.startswith('DECIMAL(') and type_upper.endswith(')')) and \
           not any(base_type in type_upper for base_type in allowed_types):
             raise HTTPException(status_code=400, detail=f"Unsupported or invalid data type: {col.type}")
        defs.append(f"{col_name_safe} {col.type}")
    return ", ".join(defs)

# --- API Endpoints ---

@app.get("/", summary="API Root", response_model=ApiResponse)
async def read_root():
    """Provides a welcome message for the API."""
    return {"message": "Welcome to the DuckDB API!"}

@app.post("/tables/{table_name}", summary="Create Table", response_model=ApiResponse, status_code=201)
async def create_table(
    table_name: str = FastPath(..., description="Name of the table to create"),
    schema: CreateTableRequest = ...,
):
    """Creates a new table with the specified schema."""
    table_name_safe = safe_identifier(table_name)
    if not schema.columns:
        raise HTTPException(status_code=400, detail="Table must have at least one column.")

    try:
        columns_sql = generate_column_sql(schema.columns)
        sql = f"CREATE TABLE {table_name_safe} ({columns_sql});"
        logger.info(f"Executing SQL: {sql}")
        for conn in get_db():
            conn.execute(sql)
        return {"message": f"Table '{table_name}' created successfully."}
    except HTTPException as e: # Re-raise validation errors
        raise e
    except duckdb.Error as e:
        logger.error(f"Error creating table '{table_name}': {e}")
        raise HTTPException(status_code=400, detail=f"Error creating table: {e}")
    except Exception as e:
        logger.error(f"Unexpected error creating table '{table_name}': {e}")
        raise HTTPException(status_code=500, detail="An unexpected error occurred.")

@app.get("/tables/{table_name}", summary="Read Table Data")
async def read_table(
    table_name: str = FastPath(..., description="Name of the table to read from"),
    limit: Optional[int] = None,
    offset: Optional[int] = None
):
    """Reads and returns all rows from a specified table. Supports limit and offset."""
    table_name_safe = safe_identifier(table_name)
    sql = f"SELECT * FROM {table_name_safe}"
    params = []
    if limit is not None:
        sql += " LIMIT ?"
        params.append(limit)
    if offset is not None:
        sql += " OFFSET ?"
        params.append(offset)
    sql += ";"

    try:
        logger.info(f"Executing SQL: {sql} with params: {params}")
        for conn in get_db():
            result = conn.execute(sql, params).fetchall()
            # Convert rows to dictionaries for JSON serialization
            column_names = [desc[0] for desc in conn.description]
            data = [dict(zip(column_names, row)) for row in result]
        return data
    except duckdb.CatalogException as e:
         raise HTTPException(status_code=404, detail=f"Table '{table_name}' not found.")
    except duckdb.Error as e:
        logger.error(f"Error reading table '{table_name}': {e}")
        raise HTTPException(status_code=400, detail=f"Error reading table: {e}")
    except Exception as e:
        logger.error(f"Unexpected error reading table '{table_name}': {e}")
        raise HTTPException(status_code=500, detail="An unexpected error occurred.")


@app.post("/tables/{table_name}/rows", summary="Create Rows", response_model=ApiResponse, status_code=201)
async def create_rows(
    table_name: str = FastPath(..., description="Name of the table to insert into"),
    request: CreateRowRequest = ...,
):
    """Inserts one or more rows into the specified table."""
    table_name_safe = safe_identifier(table_name)
    if not request.rows:
        raise HTTPException(status_code=400, detail="No rows provided to insert.")

    # Assume all rows have the same columns based on the first row
    columns = list(request.rows[0].keys())
    columns_safe = [safe_identifier(col) for col in columns]
    placeholders = ", ".join(["?"] * len(columns))
    columns_sql = ", ".join(columns_safe)

    sql = f"INSERT INTO {table_name_safe} ({columns_sql}) VALUES ({placeholders});"

    # Convert list of dicts to list of lists/tuples for executemany
    params_list = []
    for row_dict in request.rows:
        if list(row_dict.keys()) != columns:
             raise HTTPException(status_code=400, detail="All rows must have the same columns in the same order.")
        params_list.append(list(row_dict.values()))

    try:
        logger.info(f"Executing SQL: {sql} for {len(params_list)} rows")
        for conn in get_db():
            conn.executemany(sql, params_list)
            conn.commit() # Explicit commit after potential bulk insert
        return {"message": f"Successfully inserted {len(params_list)} rows into '{table_name}'."}
    except duckdb.CatalogException as e:
         raise HTTPException(status_code=404, detail=f"Table '{table_name}' not found.")
    except duckdb.Error as e:
        logger.error(f"Error inserting rows into '{table_name}': {e}")
        # Rollback on error might be needed depending on transaction behavior
        # For get_db creating connection per request, this is less critical
        raise HTTPException(status_code=400, detail=f"Error inserting rows: {e}")
    except Exception as e:
        logger.error(f"Unexpected error inserting rows into '{table_name}': {e}")
        raise HTTPException(status_code=500, detail="An unexpected error occurred.")


@app.put("/tables/{table_name}/rows", summary="Update Rows", response_model=ApiResponse)
async def update_rows(
    table_name: str = FastPath(..., description="Name of the table to update"),
    request: UpdateRowRequest = ...,
):
    """Updates rows in the table based on a condition."""
    table_name_safe = safe_identifier(table_name)
    if not request.updates:
        raise HTTPException(status_code=400, detail="No updates provided.")
    if not request.condition:
         raise HTTPException(status_code=400, detail="Update condition (WHERE clause) is required.")

    set_clauses = []
    params = []
    for col, value in request.updates.items():
        set_clauses.append(f"{safe_identifier(col)} = ?")
        params.append(value)

    set_sql = ", ".join(set_clauses)
    # WARNING: Injecting request.condition directly is a security risk.
    # In a real app, use query parameters or a safer way to build the WHERE clause.
    sql = f"UPDATE {table_name_safe} SET {set_sql} WHERE {request.condition};"

    try:
        logger.info(f"Executing SQL: {sql} with params: {params}")
        for conn in get_db():
            # Use execute for safety with parameters
            conn.execute(sql, params)
            conn.commit()
        return {"message": f"Rows in '{table_name}' updated successfully based on condition."}
    except duckdb.CatalogException as e:
         raise HTTPException(status_code=404, detail=f"Table '{table_name}' not found.")
    except duckdb.Error as e:
        logger.error(f"Error updating rows in '{table_name}': {e}")
        raise HTTPException(status_code=400, detail=f"Error updating rows: {e}")
    except Exception as e:
        logger.error(f"Unexpected error updating rows in '{table_name}': {e}")
        raise HTTPException(status_code=500, detail="An unexpected error occurred.")

@app.delete("/tables/{table_name}/rows", summary="Delete Rows", response_model=ApiResponse)
async def delete_rows(
    table_name: str = FastPath(..., description="Name of the table to delete from"),
    request: DeleteRowRequest = ...,
):
    """Deletes rows from the table based on a condition."""
    table_name_safe = safe_identifier(table_name)
    if not request.condition:
         raise HTTPException(status_code=400, detail="Delete condition (WHERE clause) is required.")

    # WARNING: Injecting request.condition directly is a security risk.
    # In a real app, use query parameters or a safer way to build the WHERE clause.
    sql = f"DELETE FROM {table_name_safe} WHERE {request.condition};"

    try:
        logger.info(f"Executing SQL: {sql}")
        for conn in get_db():
            # Execute does not directly support parameters for WHERE in DELETE like this easily
            conn.execute(sql)
            conn.commit()
        return {"message": f"Rows from '{table_name}' deleted successfully based on condition."}
    except duckdb.CatalogException as e:
         raise HTTPException(status_code=404, detail=f"Table '{table_name}' not found.")
    except duckdb.Error as e:
        logger.error(f"Error deleting rows from '{table_name}': {e}")
        raise HTTPException(status_code=400, detail=f"Error deleting rows: {e}")
    except Exception as e:
        logger.error(f"Unexpected error deleting rows from '{table_name}': {e}")
        raise HTTPException(status_code=500, detail="An unexpected error occurred.")

# --- Download Endpoints ---

@app.get("/download/table/{table_name}", summary="Download Table as CSV")
async def download_table_csv(
    table_name: str = FastPath(..., description="Name of the table to download")
):
    """Downloads the entire content of a table as a CSV file."""
    table_name_safe = safe_identifier(table_name)
    # Use COPY TO STDOUT for efficient streaming
    sql = f"COPY (SELECT * FROM {table_name_safe}) TO STDOUT (FORMAT CSV, HEADER)"

    async def stream_csv_data():
        # We need a non-blocking way to stream data from DuckDB.
        # DuckDB's Python API is blocking. A simple approach for this demo
        # is to fetch all data first, then stream it.
        # A more advanced approach would involve running the DuckDB query
        # in a separate thread or process pool managed by asyncio.

        try:
            all_data_io = io.StringIO()
            # This COPY TO variant isn't directly available in Python API for streaming to a buffer easily.
            # Let's fetch data and format as CSV manually or use Pandas.
            for conn in get_db():
                df = conn.execute(f"SELECT * FROM {table_name_safe}").df() # Use pandas for CSV conversion
            
            # Use an in-memory text buffer
            df.to_csv(all_data_io, index=False)
            all_data_io.seek(0)
            
            # Stream the content chunk by chunk
            chunk_size = 8192
            while True:
                chunk = all_data_io.read(chunk_size)
                if not chunk:
                    break
                yield chunk
                # Allow other tasks to run
                await asyncio.sleep(0)
            all_data_io.close()

        except duckdb.CatalogException as e:
            # Stream an error message if the table doesn't exist
            yield f"Error: Table '{table_name}' not found.".encode('utf-8')
            logger.error(f"Error downloading table '{table_name}': {e}")
        except duckdb.Error as e:
            yield f"Error: Could not export table '{table_name}'. {e}".encode('utf-8')
            logger.error(f"Error downloading table '{table_name}': {e}")
        except Exception as e:
             yield f"Error: An unexpected error occurred.".encode('utf-8')
             logger.error(f"Unexpected error downloading table '{table_name}': {e}")


    return StreamingResponse(
        stream_csv_data(),
        media_type="text/csv",
        headers={"Content-Disposition": f"attachment; filename={table_name}.csv"},
    )


@app.get("/download/database", summary="Download Database File")
async def download_database_file():
    """Downloads the entire DuckDB database file."""
    if not os.path.exists(DATABASE_PATH):
        raise HTTPException(status_code=404, detail="Database file not found.")

    # Ensure connections are closed before downloading to avoid partial writes/locking issues.
    # This is tricky with the current get_db pattern. A proper app stop/start or
    # dedicated maintenance mode would be better. For this demo, we hope for the best.
    logger.warning("Attempting to download database file. Ensure no active writes are occurring.")

    return FileResponse(
        path=DATABASE_PATH,
        filename=os.path.basename(DATABASE_PATH),
        media_type="application/octet-stream" # Generic binary file type
    )


# --- Health Check ---
@app.get("/health", summary="Health Check", response_model=ApiResponse)
async def health_check():
    """Checks if the API and database connection are working."""
    try:
        for conn in get_db():
            conn.execute("SELECT 1")
        return {"message": "API is healthy and database connection is successful."}
    except Exception as e:
        logger.error(f"Health check failed: {e}")
        raise HTTPException(status_code=503, detail=f"Health check failed: {e}")

# --- Optional: Add Startup/Shutdown events if needed ---
# @app.on_event("startup")
# async def startup_event():
#     # Initialize database connection pool, etc.
#     logger.info("Application startup.")

# @app.on_event("shutdown")
# async def shutdown_event():
#     # Clean up resources, close connections, etc.
#     logger.info("Application shutdown.")