repo
stringclasses 856
values | pull_number
int64 3
127k
| instance_id
stringlengths 12
58
| issue_numbers
sequencelengths 1
5
| base_commit
stringlengths 40
40
| patch
stringlengths 67
1.54M
| test_patch
stringlengths 0
107M
| problem_statement
stringlengths 3
307k
| hints_text
stringlengths 0
908k
| created_at
timestamp[s] |
---|---|---|---|---|---|---|---|---|---|
litestar-org/litestar | 1,255 | litestar-org__litestar-1255 | [
"1191"
] | c4321e6c81c0c95c22a13ee3c122f8dced1b2138 | diff --git a/starlite/cli/commands/core.py b/starlite/cli/commands/core.py
--- a/starlite/cli/commands/core.py
+++ b/starlite/cli/commands/core.py
@@ -2,17 +2,31 @@
import inspect
import multiprocessing
+import subprocess
+from typing import Any
import click
from click import command, option
from rich.tree import Tree
from starlite import Starlite
-from starlite.cli.utils import StarliteCLIException, StarliteEnv, console, show_app_info
+from starlite.cli.utils import StarliteEnv, console, show_app_info
from starlite.routes import HTTPRoute, WebSocketRoute
from starlite.utils.helpers import unwrap_partial
+def _convert_uvicorn_args(args: dict[str, Any]) -> list[str]:
+ process_args = []
+ for arg, value in args.items():
+ if isinstance(value, bool):
+ if value:
+ process_args.append(f"--{arg}")
+ else:
+ process_args.append(f"--{arg}={value}")
+
+ return process_args
+
+
@command(name="info")
def info_command(app: Starlite) -> None:
"""Show information about the detected Starlite app."""
@@ -51,11 +65,6 @@ def run_command(
instance.
"""
- try:
- import uvicorn
- except ImportError:
- raise StarliteCLIException("Uvicorn needs to be installed to run an app") # pylint: disable=W0707
-
if debug or env.debug:
app.debug = True
@@ -63,14 +72,18 @@ def run_command(
console.rule("[yellow]Starting server process", align="left")
- uvicorn.run(
- env.app_path,
- reload=env.reload or reload,
- host=env.host or host,
- port=env.port or port,
- workers=env.web_concurrency or web_concurrency,
- factory=env.is_app_factory,
- )
+ # invoke uvicorn in a subprocess to be able to use the --reload flag. see
+ # https://github.com/starlite-api/starlite/issues/1191 and https://github.com/encode/uvicorn/issues/1045
+
+ process_args = {
+ "reload": env.reload or reload,
+ "host": env.host or host,
+ "port": env.port or port,
+ "workers": env.web_concurrency or web_concurrency,
+ "factory": env.is_app_factory,
+ }
+
+ subprocess.run(["uvicorn", env.app_path, *_convert_uvicorn_args(process_args)], check=True)
@command(name="routes")
| diff --git a/tests/cli/test_run_command.py b/tests/cli/test_run_command.py
--- a/tests/cli/test_run_command.py
+++ b/tests/cli/test_run_command.py
@@ -17,6 +17,11 @@
from tests.cli.conftest import CreateAppFileFixture
[email protected]()
+def mock_subprocess_run(mocker: MockerFixture) -> MagicMock:
+ return mocker.patch("starlite.cli.commands.core.subprocess.run")
+
+
@pytest.mark.parametrize("set_in_env", [True, False])
@pytest.mark.parametrize("custom_app_file", [Path("my_app.py"), None])
@pytest.mark.parametrize("host", ["0.0.0.0", None])
@@ -27,7 +32,6 @@ def test_run_command(
mocker: MockerFixture,
runner: CliRunner,
monkeypatch: MonkeyPatch,
- mock_uvicorn_run: MagicMock,
reload: Optional[bool],
port: Optional[int],
host: Optional[str],
@@ -35,6 +39,7 @@ def test_run_command(
custom_app_file: Optional[Path],
create_app_file: CreateAppFileFixture,
set_in_env: bool,
+ mock_subprocess_run: MagicMock,
) -> None:
mock_show_app_info = mocker.patch("starlite.cli.commands.core.show_app_info")
@@ -82,9 +87,14 @@ def test_run_command(
assert result.exception is None
assert result.exit_code == 0
- mock_uvicorn_run.assert_called_once_with(
- f"{path.stem}:app", reload=reload, port=port, host=host, factory=False, workers=web_concurrency
- )
+ expected_args = ["uvicorn", f"{path.stem}:app", f"--host={host}", f"--port={port}"]
+ if reload:
+ expected_args.append("--reload")
+ if web_concurrency:
+ expected_args.append(f"--workers={web_concurrency}")
+
+ mock_subprocess_run.assert_called_once()
+ assert sorted(mock_subprocess_run.call_args_list[0].args[0]) == sorted(expected_args)
mock_show_app_info.assert_called_once()
@@ -99,7 +109,7 @@ def test_run_command(
)
def test_run_command_with_autodiscover_app_factory(
runner: CliRunner,
- mock_uvicorn_run: MagicMock,
+ mock_subprocess_run: MagicMock,
file_name: str,
file_content: str,
factory_name: str,
@@ -113,14 +123,21 @@ def test_run_command_with_autodiscover_app_factory(
assert result.exception is None
assert result.exit_code == 0
- mock_uvicorn_run.assert_called_once_with(
- f"{path.stem}:{factory_name}", reload=False, port=8000, host="127.0.0.1", factory=True, workers=1
- )
+ expected_args = [
+ "uvicorn",
+ f"{path.stem}:{factory_name}",
+ "--host=127.0.0.1",
+ "--port=8000",
+ "--factory",
+ "--workers=1",
+ ]
+ mock_subprocess_run.assert_called_once()
+ assert sorted(mock_subprocess_run.call_args_list[0].args[0]) == sorted(expected_args)
def test_run_command_with_app_factory(
runner: CliRunner,
- mock_uvicorn_run: MagicMock,
+ mock_subprocess_run: MagicMock,
create_app_file: CreateAppFileFixture,
) -> None:
path = create_app_file("_create_app_with_path.py", content=CREATE_APP_FILE_CONTENT)
@@ -130,12 +147,24 @@ def test_run_command_with_app_factory(
assert result.exception is None
assert result.exit_code == 0
- mock_uvicorn_run.assert_called_once_with(
- f"{app_path}", reload=False, port=8000, host="127.0.0.1", factory=True, workers=1
- )
+ expected_args = [
+ "uvicorn",
+ str(app_path),
+ "--host=127.0.0.1",
+ "--port=8000",
+ "--factory",
+ "--workers=1",
+ ]
+ mock_subprocess_run.assert_called_once()
+ assert sorted(mock_subprocess_run.call_args_list[0].args[0]) == sorted(expected_args)
-def test_run_command_force_debug(app_file: Path, mocker: MockerFixture, runner: CliRunner) -> None:
+def test_run_command_force_debug(
+ app_file: Path,
+ mocker: MockerFixture,
+ runner: CliRunner,
+ mock_subprocess_run: MagicMock,
+) -> None:
mock_app = MagicMock()
mocker.patch(
"starlite.cli.utils._autodiscover_app",
| Bug: Issue with reload
**Describe the bug**
When I start my app using starlite run -r I get the following error
INFO: Will watch for changes in these directories: ['/Users/alc/starlite-apps/starlite_test']
ERROR: [Errno 48] Address already in use
If I stop that and run it without -r it works, so I guess my port is not blocked
**To Reproduce**
Save this as `app.py`
```python
from starlite import get, Starlite
@get('/')
def route() -> str:
return 'Hello'
app = Starlite([route])
```
**Additional context**
@provinzkraut in discord help suggested a workaround using ` uvicorn <app file>:<application object> --reload` and also linked https://github.com/encode/uvicorn/issues/1045
| ok, assigning this to @provinzkraut for now - he will check it once he feels bette (covid got him). | 2023-03-01T13:53:06 |
litestar-org/litestar | 1,286 | litestar-org__litestar-1286 | [
"1284"
] | a3a675ca4c14c1aa6b262b6d709ec46c63325d6f | diff --git a/starlite/middleware/session/__init__.py b/starlite/middleware/session/__init__.py
--- a/starlite/middleware/session/__init__.py
+++ b/starlite/middleware/session/__init__.py
@@ -8,7 +8,7 @@
def __getattr__(name: str) -> Any:
"""Provide lazy importing as per https://peps.python.org/pep-0562/"""
- if name != "SessionCookieConfig":
+ if name != "CookieBackendConfig":
raise AttributeError(f"Module {__package__} has no attribute {name}")
from .cookie_backend import CookieBackendConfig
@@ -16,7 +16,7 @@ def __getattr__(name: str) -> Any:
warn_deprecation(
deprecated_name=f"{name} from {__package__}",
kind="import",
- alternative="'from startlite.middleware.sessions.cookie_backend import CookieBackendConfig'",
+ alternative="'from starlite.middleware.session.cookie_backend import CookieBackendConfig'",
version="1.47.0",
)
diff --git a/starlite/template/__init__.py b/starlite/template/__init__.py
--- a/starlite/template/__init__.py
+++ b/starlite/template/__init__.py
@@ -32,7 +32,7 @@ def __getattr__(name: str) -> Any:
warn_deprecation(
deprecated_name=f"{name} from {__package__}",
kind="import",
- alternative=f"'from startlite.contrib.{module} import {name}'",
+ alternative=f"'from starlite.contrib.{module} import {name}'",
version="1.46.0",
)
| Bug: `SessionCookieConfig` import DeprecationWarning - suggested import path does not exist
**Describe the bug**
In Starlite >= 1.47, the statement `from starlite.middleware.session.cookie_backend import CookieBackendConfig` raises a DeprecationWarning:
```
DeprecationWarning: Import of deprecated import 'SessionCookieConfig from starlite.middleware.session'.
Deprecated in starlite 1.47.0. This import will be removed in the next major version.
Use "'from startlite.middleware.sessions.cookie_backend import CookieBackendConfig'" instead
```
The suggested import path does not exist and contains a typo.
| Quick searches against the codebase show this also has that `startlite` typo
https://github.com/starlite-api/starlite/blob/abc0ff41ac917afd8d8af718949de2ea935ca44c/starlite/app.py#L466 | 2023-03-05T22:24:49 |
|
litestar-org/litestar | 1,288 | litestar-org__litestar-1288 | [
"1287"
] | 3f526aeef80a17e5ab49f5987e428bf1faab2b68 | diff --git a/starlite/app.py b/starlite/app.py
--- a/starlite/app.py
+++ b/starlite/app.py
@@ -463,7 +463,7 @@ def from_config(cls, config: AppConfig) -> Self:
"""Initialize a ``Starlite`` application from a configuration instance.
Args:
- config: An instance of :class:`AppConfig` <startlite.config.AppConfig>
+ config: An instance of :class:`AppConfig` <starlite.config.AppConfig>
Returns:
An instance of ``Starlite`` application.
| Fix typo in from_config class method
Quick searches against the codebase show this also has that `startlite` typo
https://github.com/starlite-api/starlite/blob/abc0ff41ac917afd8d8af718949de2ea935ca44c/starlite/app.py#L466
_Originally posted by @JacobCoffee in https://github.com/starlite-api/starlite/issues/1284#issuecomment-1455183603_
| 2023-03-05T22:48:33 |
||
litestar-org/litestar | 1,293 | litestar-org__litestar-1293 | [
"1292"
] | 104edd80eeb05149c0a53a118fb3151a2ad88f08 | diff --git a/starlite/_openapi/responses.py b/starlite/_openapi/responses.py
--- a/starlite/_openapi/responses.py
+++ b/starlite/_openapi/responses.py
@@ -14,7 +14,7 @@
from starlite._openapi.utils import pascal_case_to_text
from starlite._signature.models import SignatureField
from starlite.enums import MediaType
-from starlite.exceptions import HTTPException, ImproperlyConfiguredException, ValidationException
+from starlite.exceptions import HTTPException, ValidationException
from starlite.response import Response as StarliteResponse
from starlite.response_containers import File, Redirect, Stream, Template
from starlite.utils import get_enum_string_value, get_name, is_class_and_subclass
@@ -235,11 +235,6 @@ def create_responses(
responses[status_code] = response
for status_code, response in create_additional_responses(route_handler, plugins):
- if status_code in responses:
- raise ImproperlyConfiguredException(
- f"Additional response for status code {status_code} already exists in success or error responses"
- )
-
responses[status_code] = response
return responses or None
| diff --git a/tests/openapi/test_responses.py b/tests/openapi/test_responses.py
--- a/tests/openapi/test_responses.py
+++ b/tests/openapi/test_responses.py
@@ -16,7 +16,6 @@
from starlite.datastructures import Cookie, ResponseHeader
from starlite.exceptions import (
HTTPException,
- ImproperlyConfiguredException,
PermissionDeniedException,
ValidationException,
)
@@ -249,15 +248,33 @@ def test_additional_responses_overlap_with_other_responses() -> None:
class OkResponse(BaseModel):
pass
- @get(responses={200: ResponseSpec(model=OkResponse)})
+ @get(responses={200: ResponseSpec(model=OkResponse, description="Overwritten response")})
def handler() -> Person:
return PersonFactory.build()
- with pytest.raises(
- ImproperlyConfiguredException,
- match="Additional response for status code 200 already exists in success or error responses",
- ):
- create_responses(handler, raises_validation_error=True, generate_examples=False, plugins=[])
+ responses = create_responses(handler, raises_validation_error=True, generate_examples=False, plugins=[])
+
+ assert responses is not None
+ assert responses["200"] is not None
+ assert responses["200"].description == "Overwritten response"
+
+
+def test_additional_responses_overlap_with_raises() -> None:
+ class ErrorResponse(BaseModel):
+ pass
+
+ @get(
+ raises=[ValidationException],
+ responses={400: ResponseSpec(model=ErrorResponse, description="Overwritten response")},
+ )
+ def handler() -> Person:
+ raise ValidationException()
+
+ responses = create_responses(handler, raises_validation_error=True, generate_examples=False, plugins=[])
+
+ assert responses is not None
+ assert responses["400"] is not None
+ assert responses["400"].description == "Overwritten response"
def test_create_response_for_response_subclass() -> None:
| Enhancement: Add a way to override default OpenAPI response descriptions
**What's the feature you'd like to ask for.**
Currently there is no way to replace the default OpenAPI description for HTTP 400 errors. For example, you currently can't do this:
```python
@starlite.post(
"/path",
responses={
400: starlite.ResponseSpec(
model=MyErrorModel, description="A more detailed error description"
)
},
)
async def login() -> starlite.Response:
...
```
If you do, you will get this error on startup:
```console
starlite.exceptions.http_exceptions.ImproperlyConfiguredException: 500: Additional response for status code 400 already exists in success or error responses
```
This makes it impossible to add more detailed error descriptions or properly document response shapes if you want your endpoint to return a different error shape.
**Additional context**
Removing this bit of code would allow the default HTTP 400 description to be replaced:
https://github.com/seladb/starlite/blob/51ae7eebda3e6834a6520471bfdb9cad2ab790c5/starlite/openapi/responses.py#L217-L220
| Have you tried using the raises key?
The raises key doesn't let you specify a model, just an exception type, so you can't specify any OpenAPI details.
Ok, i understand the issue.
Would you like to submit a PR?
> Ok, i understand the issue.
>
> Would you like to submit a PR?
Yeah can do, I'll get one up tonight. | 2023-03-08T07:33:54 |
litestar-org/litestar | 1,327 | litestar-org__litestar-1327 | [
"1326"
] | d73f06d3ee033da7781c96367fae58d673a47292 | diff --git a/starlite/contrib/sqlalchemy_1/config.py b/starlite/contrib/sqlalchemy_1/config.py
--- a/starlite/contrib/sqlalchemy_1/config.py
+++ b/starlite/contrib/sqlalchemy_1/config.py
@@ -265,7 +265,7 @@ def session_maker(self) -> sessionmaker:
)
return cast("sessionmaker", self.session_maker_instance)
- def create_db_session_dependency(self, state: State, scope: Scope) -> Union[Session, AsyncSession]: # noqa
+ def create_db_session_dependency(self, state: State, scope: Scope) -> Union[Session, AsyncSession]: # noqa: F821
"""Create a session instance.
Args:
| Rule exceptions using noqa should be very specific
I think this would ignore all linting here instead of maybe the one we want. May be best to be specific?
https://beta.ruff.rs/docs/configuration/#error-suppression
_Originally posted by @JacobCoffee in https://github.com/starlite-api/starlite/pull/1323#discussion_r1135989720_
| 2023-03-14T18:04:57 |
||
litestar-org/litestar | 1,354 | litestar-org__litestar-1354 | [
"4321",
"1234"
] | 47e6e137e58d18109e3f6c1b10bc1cc56c217c08 | diff --git a/docs/examples/application_hooks/before_send_hook.py b/docs/examples/application_hooks/before_send_hook.py
--- a/docs/examples/application_hooks/before_send_hook.py
+++ b/docs/examples/application_hooks/before_send_hook.py
@@ -1,11 +1,15 @@
-from typing import TYPE_CHECKING, Dict
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
from starlite import Starlite, get
from starlite.datastructures import MutableScopeHeaders
if TYPE_CHECKING:
+ from typing import Dict
+
from starlite.datastructures import State
- from starlite.types import Message
+ from starlite.types import Message, Scope
@get("/test")
@@ -14,7 +18,7 @@ def handler() -> Dict[str, str]:
return {"key": "value"}
-async def before_send_hook_handler(message: "Message", state: "State") -> None:
+async def before_send_hook_handler(message: Message, state: State, scope: Scope) -> None:
"""The function will be called on each ASGI message.
We therefore ensure it runs only on the message start event.
@@ -24,7 +28,7 @@ async def before_send_hook_handler(message: "Message", state: "State") -> None:
headers["My Header"] = state.message
-def on_startup(state: "State") -> None:
+def on_startup(state: State) -> None:
"""A function that will populate the app state before any requests are received."""
state.message = "value injected during send"
diff --git a/starlite/app.py b/starlite/app.py
--- a/starlite/app.py
+++ b/starlite/app.py
@@ -738,10 +738,7 @@ def _wrap_send(self, send: Send, scope: Scope) -> Send:
async def wrapped_send(message: "Message") -> None:
for hook in self.before_send:
- if hook.num_expected_args > 2:
- await hook(message, self.state, scope)
- else:
- await hook(message, self.state)
+ await hook(message, self.state, scope)
await send(message)
return wrapped_send
diff --git a/starlite/types/callable_types.py b/starlite/types/callable_types.py
--- a/starlite/types/callable_types.py
+++ b/starlite/types/callable_types.py
@@ -42,9 +42,7 @@
AsyncAnyCallable = Callable[..., Awaitable[Any]]
AnyCallable = Callable[..., Any]
AnyGenerator = Union[Generator[Any, Any, Any], AsyncGenerator[Any, Any]]
-BeforeMessageSendHookHandler = Union[
- Callable[[Message, State, Scope], SyncOrAsyncUnion[None]], Callable[[Message, State], SyncOrAsyncUnion[None]]
-]
+BeforeMessageSendHookHandler = Callable[[Message, State, Scope], SyncOrAsyncUnion[None]]
BeforeRequestHookHandler = Callable[[Request], Union[Any, Awaitable[Any]]]
CacheKeyBuilder = Callable[[Request], str]
ExceptionHandler = Callable[[Request, ExceptionT], Response]
| diff --git a/tests/app/test_before_send.py b/tests/app/test_before_send.py
--- a/tests/app/test_before_send.py
+++ b/tests/app/test_before_send.py
@@ -1,4 +1,7 @@
-from typing import TYPE_CHECKING, Dict
+# ruff: noqa: UP006
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
from starlite import get
from starlite.datastructures import MutableScopeHeaders
@@ -6,8 +9,10 @@
from starlite.testing import create_test_client
if TYPE_CHECKING:
+ from typing import Dict
+
from starlite.datastructures import State
- from starlite.types import Message
+ from starlite.types import Message, Scope
def test_before_send() -> None:
@@ -15,7 +20,7 @@ def test_before_send() -> None:
def handler() -> Dict[str, str]:
return {"key": "value"}
- async def before_send_hook_handler(message: "Message", state: "State") -> None:
+ async def before_send_hook_handler(message: Message, state: State, scope: Scope) -> None:
if message["type"] == "http.response.start":
headers = MutableScopeHeaders(message)
headers.add("My Header", state.message)
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-21T00:23:06 |
litestar-org/litestar | 1,355 | litestar-org__litestar-1355 | [
"4321",
"1234"
] | 47e6e137e58d18109e3f6c1b10bc1cc56c217c08 | diff --git a/starlite/contrib/sqlalchemy_1/plugin.py b/starlite/contrib/sqlalchemy_1/plugin.py
--- a/starlite/contrib/sqlalchemy_1/plugin.py
+++ b/starlite/contrib/sqlalchemy_1/plugin.py
@@ -223,6 +223,8 @@ def providers_map(self) -> Dict[Type[TypeEngine], Callable[[Union[TypeEngine, Ty
sqlalchemy_type.TupleType: self.handle_tuple_type, # pyright: ignore
sqlalchemy_type.Unicode: self.handle_string_type,
sqlalchemy_type.UnicodeText: self.handle_string_type,
+ sqlalchemy_type.Uuid: lambda x: UUID,
+ sqlalchemy_type.UUID: lambda x: UUID,
sqlalchemy_type.VARBINARY: self.handle_string_type,
sqlalchemy_type.VARCHAR: self.handle_string_type,
# mssql
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-21T01:56:20 |
|
litestar-org/litestar | 1,359 | litestar-org__litestar-1359 | [
"4321",
"1234"
] | d217e6a6d6994aea132ec396f7bbbc2623b066b9 | diff --git a/starlite/contrib/sqlalchemy/repository.py b/starlite/contrib/sqlalchemy/repository.py
--- a/starlite/contrib/sqlalchemy/repository.py
+++ b/starlite/contrib/sqlalchemy/repository.py
@@ -4,7 +4,7 @@
from contextlib import contextmanager
from typing import TYPE_CHECKING, Any, Generic, Literal, Tuple, TypeVar, cast
-from sqlalchemy import delete, over, select, text, update
+from sqlalchemy import Select, delete, over, select, text, update
from sqlalchemy import func as sql_func
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
@@ -20,7 +20,6 @@
from collections import abc
from datetime import datetime
- from sqlalchemy import Select
from sqlalchemy.engine import Result
from sqlalchemy.ext.asyncio import AsyncSession
@@ -76,7 +75,7 @@ def __init__(
"""
super().__init__(**kwargs)
self.session = session
- self.statement = base_select or select(self.model_type)
+ self.statement = base_select if base_select is not None else select(self.model_type)
async def add(self, data: ModelT) -> ModelT:
"""Add `data` to the collection.
@@ -194,7 +193,8 @@ async def get(self, item_id: Any, **kwargs: Any) -> ModelT:
NotFoundError: If no instance found identified by `item_id`.
"""
with wrap_sqlalchemy_exception():
- statement = self._filter_select_by_kwargs(statement=self.statement, **{self.id_attribute: item_id})
+ statement = self._base_select(**kwargs)
+ statement = self._filter_select_by_kwargs(statement=statement, **{self.id_attribute: item_id})
instance = (await self._execute(statement)).scalar_one_or_none()
instance = self.check_not_found(instance)
self.session.expunge(instance)
@@ -213,7 +213,8 @@ async def get_one(self, **kwargs: Any) -> ModelT:
NotFoundError: If no instance found identified by `item_id`.
"""
with wrap_sqlalchemy_exception():
- statement = self._filter_select_by_kwargs(statement=self.statement, **kwargs)
+ statement = self._base_select(**kwargs)
+ statement = self._filter_select_by_kwargs(statement=statement, **kwargs)
instance = (await self._execute(statement)).scalar_one_or_none()
instance = self.check_not_found(instance)
self.session.expunge(instance)
@@ -229,7 +230,8 @@ async def get_one_or_none(self, **kwargs: Any) -> ModelT | None:
The retrieved instance or None
"""
with wrap_sqlalchemy_exception():
- statement = self._filter_select_by_kwargs(statement=self.statement, **kwargs)
+ statement = self._base_select(**kwargs)
+ statement = self._filter_select_by_kwargs(statement=statement, **kwargs)
instance = (await self._execute(statement)).scalar_one_or_none()
if instance:
self.session.expunge(instance)
@@ -259,7 +261,8 @@ async def count(self, *filters: FilterTypes, **kwargs: Any) -> int:
Returns:
Count of records returned by query, ignoring pagination.
"""
- statement = self.statement.with_only_columns(
+ statement = self._base_select(**kwargs)
+ statement = statement.with_only_columns(
sql_func.count(
self.model_type.id, # type:ignore[attr-defined]
),
@@ -345,7 +348,8 @@ async def list_and_count(
Returns:
Count of records returned by query, ignoring pagination.
"""
- statement = self.statement.add_columns(
+ statement = self._base_select(**kwargs)
+ statement = statement.add_columns(
over(
sql_func.count(
self.model_type.id, # type:ignore[attr-defined]
@@ -375,7 +379,8 @@ async def list(self, *filters: FilterTypes, **kwargs: Any) -> list[ModelT]:
Returns:
The list of instances, after filtering applied.
"""
- statement = self._apply_filters(*filters, statement=self.statement)
+ statement = self._base_select(**kwargs)
+ statement = self._apply_filters(*filters, statement=statement)
statement = self._filter_select_by_kwargs(statement, **kwargs)
with wrap_sqlalchemy_exception():
@@ -511,3 +516,6 @@ def _filter_select_by_kwargs(self, statement: SelectT, **kwargs: Any) -> SelectT
for key, val in kwargs.items():
statement = statement.where(getattr(self.model_type, key) == val)
return statement
+
+ def _base_select(self, **kwargs: Any) -> Select[tuple[ModelT]]:
+ return kwargs.pop("base_select", self.statement)
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-21T15:24:55 |
|
litestar-org/litestar | 1,363 | litestar-org__litestar-1363 | [
"4321",
"1234"
] | 325623c388e38ae3183dbaf5ff5b1ddbf25824a7 | diff --git a/starlite/openapi/controller.py b/starlite/openapi/controller.py
--- a/starlite/openapi/controller.py
+++ b/starlite/openapi/controller.py
@@ -332,7 +332,7 @@ def render_swagger_ui(self, request: Request) -> str:
<div id='swagger-container'/>
<script type="text/javascript">
const ui = SwaggerUIBundle({{
- spec: JSON.parse({self._dumped_modified_schema}),
+ spec: {self._dumped_modified_schema},
dom_id: '#swagger-container',
deepLinking: true,
showExtensions: true,
@@ -442,7 +442,7 @@ def render_redoc(self, request: Request) -> str: # pragma: no cover
<div id='redoc-container'/>
<script type="text/javascript">
Redoc.init(
- JSON.parse({self._dumped_schema}),
+ {self._dumped_schema},
undefined,
document.getElementById('redoc-container')
)
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-22T01:09:24 |
|
litestar-org/litestar | 1,364 | litestar-org__litestar-1364 | [
"4321",
"1234"
] | 5076ec5e4a71aa949f18214973d9b1b6918ca130 | diff --git a/starlite/contrib/sqlalchemy/repository.py b/starlite/contrib/sqlalchemy/repository.py
--- a/starlite/contrib/sqlalchemy/repository.py
+++ b/starlite/contrib/sqlalchemy/repository.py
@@ -193,7 +193,7 @@ async def get(self, item_id: Any, **kwargs: Any) -> ModelT:
NotFoundError: If no instance found identified by `item_id`.
"""
with wrap_sqlalchemy_exception():
- statement = self._base_select(**kwargs)
+ statement = kwargs.pop("base_select", self.statement)
statement = self._filter_select_by_kwargs(statement=statement, **{self.id_attribute: item_id})
instance = (await self._execute(statement)).scalar_one_or_none()
instance = self.check_not_found(instance)
@@ -213,7 +213,7 @@ async def get_one(self, **kwargs: Any) -> ModelT:
NotFoundError: If no instance found identified by `item_id`.
"""
with wrap_sqlalchemy_exception():
- statement = self._base_select(**kwargs)
+ statement = kwargs.pop("base_select", self.statement)
statement = self._filter_select_by_kwargs(statement=statement, **kwargs)
instance = (await self._execute(statement)).scalar_one_or_none()
instance = self.check_not_found(instance)
@@ -230,12 +230,12 @@ async def get_one_or_none(self, **kwargs: Any) -> ModelT | None:
The retrieved instance or None
"""
with wrap_sqlalchemy_exception():
- statement = self._base_select(**kwargs)
+ statement = kwargs.pop("base_select", self.statement)
statement = self._filter_select_by_kwargs(statement=statement, **kwargs)
instance = (await self._execute(statement)).scalar_one_or_none()
if instance:
self.session.expunge(instance)
- return instance
+ return instance # type: ignore
async def get_or_create(self, **kwargs: Any) -> tuple[ModelT, bool]:
"""Get instance identified by ``kwargs`` or create if it doesn't exist.
@@ -261,7 +261,7 @@ async def count(self, *filters: FilterTypes, **kwargs: Any) -> int:
Returns:
Count of records returned by query, ignoring pagination.
"""
- statement = self._base_select(**kwargs)
+ statement = kwargs.pop("base_select", self.statement)
statement = statement.with_only_columns(
sql_func.count(
self.model_type.id, # type:ignore[attr-defined]
@@ -348,7 +348,7 @@ async def list_and_count(
Returns:
Count of records returned by query, ignoring pagination.
"""
- statement = self._base_select(**kwargs)
+ statement = kwargs.pop("base_select", self.statement)
statement = statement.add_columns(
over(
sql_func.count(
@@ -379,7 +379,7 @@ async def list(self, *filters: FilterTypes, **kwargs: Any) -> list[ModelT]:
Returns:
The list of instances, after filtering applied.
"""
- statement = self._base_select(**kwargs)
+ statement = kwargs.pop("base_select", self.statement)
statement = self._apply_filters(*filters, statement=statement)
statement = self._filter_select_by_kwargs(statement, **kwargs)
@@ -516,6 +516,3 @@ def _filter_select_by_kwargs(self, statement: SelectT, **kwargs: Any) -> SelectT
for key, val in kwargs.items():
statement = statement.where(getattr(self.model_type, key) == val)
return statement
-
- def _base_select(self, **kwargs: Any) -> Select[tuple[ModelT]]:
- return kwargs.pop("base_select", self.statement)
| diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_aiosqlite.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_aiosqlite.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_aiosqlite.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_aiosqlite.py
@@ -8,7 +8,7 @@
from uuid import UUID, uuid4
import pytest
-from sqlalchemy import NullPool, insert
+from sqlalchemy import NullPool, insert, select
from sqlalchemy.ext.asyncio import (
AsyncEngine,
AsyncSession,
@@ -147,6 +147,20 @@ async def test_repo_count_method(author_repo: AuthorRepository) -> None:
assert await author_repo.count() == 2
+async def test_repo_base_select_override(author_repo: AuthorRepository) -> None:
+ """Test SQLALchemy base select override with sqlite.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ all_count = await author_repo.count()
+ filtered_count = await author_repo.count(
+ base_select=select(Author).where(Author.id == UUID("5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2"))
+ )
+ assert all_count == 2
+ assert filtered_count == 1
+
+
async def test_repo_list_and_count_method(raw_authors: list[dict[str, Any]], author_repo: AuthorRepository) -> None:
"""Test SQLALchemy list with count in sqlite.
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-22T03:26:11 |
litestar-org/litestar | 1,366 | litestar-org__litestar-1366 | [
"4321",
"1234"
] | 8925a6d31452bb527dad9d714e243fd626263a0a | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,5 +1,13 @@
import importlib.metadata
import os
+from functools import partial
+from typing import Any
+
+from sphinx.addnodes import document
+from sphinx.application import Sphinx
+
+__all__ = ["setup", "update_html_context"]
+
project = "Starlite"
copyright = "2023, Starlite-API"
@@ -18,7 +26,6 @@
"sphinxcontrib.mermaid",
]
-templates_path = ["_templates"]
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
@@ -120,53 +127,48 @@
"ref.python", # TODO: remove when https://github.com/sphinx-doc/sphinx/issues/4961 is fixed
]
-html_theme = "pydata_sphinx_theme"
+html_theme = "starlite_sphinx_theme"
html_static_path = ["_static"]
-html_css_files = ["style.css"]
html_js_files = ["versioning.js"]
-html_favicon = "images/favicon.ico"
-html_logo = "images/logo.svg"
+html_css_files = ["style.css"]
html_show_sourcelink = False
-html_sidebars = {"about/*": []}
html_title = "Starlite Framework"
-html_additional_pages = {"index": "landing-page.html"}
-
html_theme_options = {
- "use_edit_page_button": False,
- "show_toc_level": 4,
- "navbar_align": "left",
- "icon_links": [
- {
- "name": "GitHub",
- "url": "https://github.com/starlite-api/starlite",
- "icon": "fa-brands fa-github",
- "type": "fontawesome",
- },
- {
- "name": "Discord",
- "url": "https://discord.gg/X3FJqy8d2j",
- "icon": "fa-brands fa-discord",
- "type": "fontawesome",
- },
- ],
- "navbar_end": ["navbar-icon-links"],
- "navbar_persistent": ["search-button", "theme-switcher"],
-}
-
-
-html_context = {
- "navbar_items": {
- "Documentation": "lib/index",
+ "use_page_nav": False,
+ "github_repo_name": "starlite",
+ "logo": {
+ "link": "https://starliteproject.dev",
+ },
+ "extra_navbar_items": {
+ "Documentation": "index",
"Community": {
- "Contribution guide": "community/contribution-guide",
+ "Contribution Guide": "contribution-guide",
"Code of Conduct": "https://github.com/starlite-api/.github/blob/main/CODE_OF_CONDUCT.md",
},
"About": {
- "Organization": "about/organization",
- "Releases": "about/starlite-releases",
+ "Organization": "https://starliteproject.dev/about/organization",
+ "Releases": "https://starliteproject.dev/about/starlite-releases",
},
- "Release notes": "release-notes/index",
- }
+ "Release notes": {
+ "2.0 migration guide": "release-notes/migration_guide_2.html",
+ "2.x Changelog": "https://docs.starliteproject.dev/2/release-notes/changelog.html",
+ "1.x Changelog": "https://docs.starliteproject.dev/1/changelog.html",
+ },
+ },
}
+
+
+def update_html_context(
+ app: Sphinx, pagename: str, templatename: str, context: dict[str, Any], doctree: document
+) -> None:
+ context["generate_toctree_html"] = partial(context["generate_toctree_html"], startdepth=0)
+
+
+def setup(app: Sphinx) -> dict[str, bool]:
+ app.setup_extension("starlite_sphinx_theme")
+ app.setup_extension("pydata_sphinx_theme")
+ app.connect("html-page-context", update_html_context)
+
+ return {"parallel_read_safe": True, "parallel_write_safe": True}
diff --git a/tools/build_docs.py b/tools/build_docs.py
new file mode 100644
--- /dev/null
+++ b/tools/build_docs.py
@@ -0,0 +1,90 @@
+from __future__ import annotations
+
+import argparse
+import importlib.metadata
+import json
+import os
+import shutil
+import subprocess
+from contextlib import contextmanager
+from pathlib import Path
+from typing import TypedDict
+
+REDIRECT_TEMPLATE = """
+<!DOCTYPE HTML>
+<html lang="en-US">
+ <head>
+ <title>Page Redirection</title>
+ <meta charset="UTF-8">
+ <meta http-equiv="refresh" content="0; url={target}">
+ <script type="text/javascript">window.location.href = "{target}"</script>
+ </head>
+ <body>
+ You are being redirected. If this does not work, click <a href='{target}'>this link</a>
+ </body>
+</html>
+"""
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--version", required=False)
+parser.add_argument("output")
+
+
+class VersionSpec(TypedDict):
+ versions: list[str]
+ latest: str
+
+
+@contextmanager
+def checkout(branch: str) -> None:
+ subprocess.run(["git", "checkout", branch], check=True)
+ yield
+ subprocess.run(["git", "checkout", "-"], check=True)
+
+
+def load_version_spec() -> VersionSpec:
+ versions_file = Path("docs/_static/versions.json")
+ if versions_file.exists():
+ return json.loads(versions_file.read_text())
+ return {"versions": [], "latest": ""}
+
+
+def build(output_dir: str, version: str | None) -> None:
+ if version is None:
+ version = importlib.metadata.version("starlite").rsplit(".")[0]
+ else:
+ os.environ["_STARLITE_DOCS_BUILD_VERSION"] = version
+
+ subprocess.run(["make", "docs"], check=True)
+
+ output_dir = Path(output_dir)
+ output_dir.mkdir()
+ output_dir.joinpath(".nojekyll").touch(exist_ok=True)
+
+ version_spec = load_version_spec()
+ is_latest = version == version_spec["latest"]
+
+ docs_src_path = Path("docs/_build/html")
+
+ output_dir.joinpath("index.html").write_text(REDIRECT_TEMPLATE.format(target="latest"))
+
+ if is_latest:
+ shutil.copytree(docs_src_path, output_dir / "latest", dirs_exist_ok=True)
+ shutil.copytree(docs_src_path, output_dir / version, dirs_exist_ok=True)
+
+ # copy existing versions into our output dir to preserve them when cleaning the branch
+ with checkout("gh-pages"):
+ for other_version in [*version_spec["versions"], "latest"]:
+ other_version_path = Path(other_version)
+ other_version_target_path = output_dir / other_version
+ if other_version_path.exists() and not other_version_target_path.exists():
+ shutil.copytree(other_version_path, other_version_target_path)
+
+
+def main() -> None:
+ args = parser.parse_args()
+ build(output_dir=args.output, version=args.version)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/publish_docs.py b/tools/publish_docs.py
deleted file mode 100644
--- a/tools/publish_docs.py
+++ /dev/null
@@ -1,105 +0,0 @@
-from __future__ import annotations
-
-import argparse
-import importlib.metadata
-import json
-import os
-import shutil
-import subprocess
-from contextlib import contextmanager
-from pathlib import Path
-from typing import TypedDict
-
-parser = argparse.ArgumentParser()
-parser.add_argument("--version", required=False)
-parser.add_argument("--push", action="store_true")
-
-
-class VersionSpec(TypedDict):
- versions: list[str]
- latest: str
- docs_latest: str
-
-
-@contextmanager
-def checkout(branch: str) -> None:
- subprocess.run(["git", "checkout", branch], check=True)
- yield
- subprocess.run(["git", "checkout", "-"], check=True)
-
-
-def add_to_versions_file(version: str) -> VersionSpec:
- versions_file = Path("versions.json")
- version_spec: VersionSpec
- if versions_file.exists():
- version_spec = json.loads(versions_file.read_text())
- else:
- version_spec = {"versions": [], "latest": "", "docs_latest": ""}
-
- if version not in version_spec["versions"]:
- version_spec["versions"].append(version)
-
- versions_file.write_text(json.dumps(version_spec))
-
- return version_spec
-
-
-def make_version(version: str | None, push: bool) -> None:
- if version is None:
- version = importlib.metadata.version("starlite").rsplit(".")[0]
- else:
- os.environ["_STARLITE_DOCS_BUILD_VERSION"] = version
-
- git_add = [".nojekyll", "versions.json", version]
- subprocess.run(["make", "docs"], check=True)
-
- with checkout("gh-pages"):
- Path(".nojekyll").touch(exist_ok=True)
-
- version_spec = add_to_versions_file(version)
- rebuild_page = version_spec["docs_latest"] == version
- is_latest = version == version_spec["latest"]
-
- docs_src_path = Path("docs/_build/html")
-
- inventory_file = docs_src_path / "objects.inv"
- shutil.copytree(docs_src_path / "lib", version, dirs_exist_ok=True)
- shutil.copy2(inventory_file, version)
- git_add.append(f"{version}/objects.inv")
-
- if rebuild_page:
- for path in docs_src_path.iterdir():
- git_add.append(path.name)
- if path.is_dir():
- if path == docs_src_path / "lib" and not is_latest:
- continue
- shutil.copytree(path, path.name, dirs_exist_ok=True)
- else:
- shutil.copy2(path, ".")
-
- if is_latest:
- shutil.copytree(docs_src_path / "lib", "lib", dirs_exist_ok=True)
- shutil.copy2(inventory_file, "lib")
- git_add.append("lib/objects.inv")
-
- shutil.rmtree("docs/_build")
-
- for file in git_add:
- subprocess.run(["git", "add", file], check=True)
-
- subprocess.run(
- ["git", "commit", "-m", f"Automatic docs build for version {version!r}", "--no-verify"],
- check=True,
- )
-
- if push:
- subprocess.run(["git", "push"], check=True)
-
-
-def main() -> None:
- args = parser.parse_args()
- make_version(version=args.version, push=args.push)
-
-
-if __name__ == "__main__":
- main()
| diff --git a/docs/lib/reference/testing.rst b/docs/reference/testing.rst
similarity index 100%
rename from docs/lib/reference/testing.rst
rename to docs/reference/testing.rst
diff --git a/docs/lib/usage/testing.rst b/docs/usage/testing.rst
similarity index 98%
rename from docs/lib/usage/testing.rst
rename to docs/usage/testing.rst
--- a/docs/lib/usage/testing.rst
+++ b/docs/usage/testing.rst
@@ -132,7 +132,7 @@ We would then be able to rewrite our test like so:
Using sessions
++++++++++++++
-If you are using :ref:`session middleware <lib/usage/middleware/builtin-middleware:session middleware>` for session persistence
+If you are using :ref:`session middleware <usage/middleware/builtin-middleware:session middleware>` for session persistence
across requests, then you might want to inject or inspect session data outside a request. For this,
:class:`TestClient <.testing.TestClient>` provides two methods:
@@ -246,7 +246,7 @@ Another helper is the :class:`RequestFactory <starlite.testing.RequestFactory>`
you need to test logic that expects to receive a request object.
For example, lets say we wanted to unit test a *guard* function in isolation, to which end we'll reuse the examples
-from the :doc:`route guards </lib/usage/security/guards>` documentation:
+from the :doc:`route guards </usage/security/guards>` documentation:
.. code-block:: python
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-22T11:28:29 |
litestar-org/litestar | 1,367 | litestar-org__litestar-1367 | [
"4321",
"1234"
] | d2dff5848e54d9e4bc61a7e2060dbb007dc61dca | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,5 +1,15 @@
+from __future__ import annotations
+
import importlib.metadata
import os
+from functools import partial
+from typing import Any, TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+ from sphinx.addnodes import document
+ from sphinx.application import Sphinx
+
project = "Starlite"
copyright = "2023, Starlite-API"
@@ -54,53 +64,44 @@
suppress_warnings = ["autosectionlabel.*"]
-html_theme = "pydata_sphinx_theme"
+html_theme = "starlite_sphinx_theme"
html_static_path = ["_static"]
-html_css_files = ["style.css"]
html_js_files = ["versioning.js"]
-html_favicon = "images/favicon.ico"
-html_logo = "images/logo.svg"
+html_css_files = ["style.css"]
html_show_sourcelink = False
-html_sidebars = {"about/*": []}
html_title = "Starlite Framework"
-html_additional_pages = {"index": "landing-page.html"}
-
html_theme_options = {
- "use_edit_page_button": False,
- "show_toc_level": 4,
- "navbar_align": "left",
- "icon_links": [
- {
- "name": "GitHub",
- "url": "https://github.com/starlite-api/starlite",
- "icon": "fa-brands fa-github",
- "type": "fontawesome",
- },
- {
- "name": "Discord",
- "url": "https://discord.gg/X3FJqy8d2j",
- "icon": "fa-brands fa-discord",
- "type": "fontawesome",
- },
- ],
- "navbar_end": ["navbar-icon-links"],
- "navbar_persistent": ["search-button", "theme-switcher"],
-}
-
-
-html_context = {
- "navbar_items": {
- "Documentation": "lib/index",
+ "use_page_nav": False,
+ "github_repo_name": "starlite",
+ "logo": {
+ "link": "https://starliteproject.dev",
+ },
+ "extra_navbar_items": {
+ "Documentation": "index",
"Community": {
- "Contribution guide": "community/contribution-guide",
+ "Contribution Guide": "contribution-guide",
"Code of Conduct": "https://github.com/starlite-api/.github/blob/main/CODE_OF_CONDUCT.md",
},
"About": {
- "Organization": "about/organization",
- "Releases": "about/starlite-releases",
+ "Organization": "https://starliteproject.dev/about/organization",
+ "Releases": "https://starliteproject.dev/about/starlite-releases",
},
- "Release notes": "release-notes/index",
- }
+ "Release notes": {
+ "1.x Changelog": "https://docs.starliteproject.dev/1/changelog.html",
+ },
+ },
}
+
+
+def update_html_context(
+ app: Sphinx, pagename: str, templatename: str, context: dict[str, Any], doctree: document
+) -> None:
+ context["generate_toctree_html"] = partial(context["generate_toctree_html"], startdepth=0)
+
+
+def setup(app: Sphinx) -> dict[str, bool]:
+ app.connect("html-page-context", update_html_context, priority=1000)
+
+ return {"parallel_read_safe": True, "parallel_write_safe": True}
diff --git a/tools/build_docs.py b/tools/build_docs.py
new file mode 100644
--- /dev/null
+++ b/tools/build_docs.py
@@ -0,0 +1,90 @@
+from __future__ import annotations
+
+import argparse
+import importlib.metadata
+import json
+import os
+import shutil
+import subprocess
+from contextlib import contextmanager
+from pathlib import Path
+from typing import TypedDict
+
+REDIRECT_TEMPLATE = """
+<!DOCTYPE HTML>
+<html lang="en-US">
+ <head>
+ <title>Page Redirection</title>
+ <meta charset="UTF-8">
+ <meta http-equiv="refresh" content="0; url={target}">
+ <script type="text/javascript">window.location.href = "{target}"</script>
+ </head>
+ <body>
+ You are being redirected. If this does not work, click <a href='{target}'>this link</a>
+ </body>
+</html>
+"""
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--version", required=False)
+parser.add_argument("output")
+
+
+class VersionSpec(TypedDict):
+ versions: list[str]
+ latest: str
+
+
+@contextmanager
+def checkout(branch: str) -> None:
+ subprocess.run(["git", "checkout", branch], check=True)
+ yield
+ subprocess.run(["git", "checkout", "-"], check=True)
+
+
+def load_version_spec() -> VersionSpec:
+ versions_file = Path("docs/_static/versions.json")
+ if versions_file.exists():
+ return json.loads(versions_file.read_text())
+ return {"versions": [], "latest": ""}
+
+
+def build(output_dir: str, version: str | None) -> None:
+ if version is None:
+ version = importlib.metadata.version("starlite").rsplit(".")[0]
+ else:
+ os.environ["_STARLITE_DOCS_BUILD_VERSION"] = version
+
+ subprocess.run(["make", "docs"], check=True)
+
+ output_dir = Path(output_dir)
+ output_dir.mkdir()
+ output_dir.joinpath(".nojekyll").touch(exist_ok=True)
+
+ version_spec = load_version_spec()
+ is_latest = version == version_spec["latest"]
+
+ docs_src_path = Path("docs/_build/html")
+
+ output_dir.joinpath("index.html").write_text(REDIRECT_TEMPLATE.format(target="latest"))
+
+ if is_latest:
+ shutil.copytree(docs_src_path, output_dir / "latest", dirs_exist_ok=True)
+ shutil.copytree(docs_src_path, output_dir / version, dirs_exist_ok=True)
+
+ # copy existing versions into our output dir to preserve them when cleaning the branch
+ with checkout("gh-pages"):
+ for other_version in [*version_spec["versions"], "latest"]:
+ other_version_path = Path(other_version)
+ other_version_target_path = output_dir / other_version
+ if other_version_path.exists() and not other_version_target_path.exists():
+ shutil.copytree(other_version_path, other_version_target_path)
+
+
+def main() -> None:
+ args = parser.parse_args()
+ build(output_dir=args.output, version=args.version)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/publish_docs.py b/tools/publish_docs.py
deleted file mode 100644
--- a/tools/publish_docs.py
+++ /dev/null
@@ -1,103 +0,0 @@
-import argparse
-import importlib.metadata
-import json
-import os
-import shutil
-import subprocess
-from contextlib import contextmanager
-from pathlib import Path
-from typing import TypedDict
-
-parser = argparse.ArgumentParser()
-parser.add_argument("--version", required=False)
-parser.add_argument("--push", action="store_true")
-
-
-class VersionSpec(TypedDict):
- versions: list[str]
- latest: str
- docs_latest: str
-
-
-@contextmanager
-def checkout(branch: str) -> None:
- subprocess.run(["git", "checkout", branch], check=True)
- yield
- subprocess.run(["git", "checkout", "-"], check=True)
-
-
-def add_to_versions_file(version: str) -> VersionSpec:
- versions_file = Path("versions.json")
- version_spec: VersionSpec
- if versions_file.exists():
- version_spec = json.loads(versions_file.read_text())
- else:
- version_spec = {"versions": [], "latest": "", "docs_latest": ""}
-
- if version not in version_spec["versions"]:
- version_spec["versions"].append(version)
-
- versions_file.write_text(json.dumps(version_spec))
-
- return version_spec
-
-
-def make_version(version: str | None, push: bool) -> None:
- if version is None:
- version = importlib.metadata.version("starlite").rsplit(".")[0]
- else:
- os.environ["_STARLITE_DOCS_BUILD_VERSION"] = version
-
- git_add = [".nojekyll", "versions.json", version]
- subprocess.run(["make", "docs"], check=True)
-
- with checkout("gh-pages"):
- Path(".nojekyll").touch(exist_ok=True)
-
- version_spec = add_to_versions_file(version)
- rebuild_page = version_spec["docs_latest"] == version
- is_latest = version == version_spec["latest"]
-
- docs_src_path = Path("docs/_build/html")
-
- inventory_file = docs_src_path / "objects.inv"
- shutil.copytree(docs_src_path / "lib", version, dirs_exist_ok=True)
- shutil.copy2(inventory_file, version)
- git_add.append(f"{version}/objects.inv")
-
- if rebuild_page:
- for path in docs_src_path.iterdir():
- git_add.append(path.name)
- if path.is_dir():
- if path == docs_src_path / "lib" and not is_latest:
- continue
- shutil.copytree(path, path.name, dirs_exist_ok=True)
- else:
- shutil.copy2(path, ".")
-
- if is_latest:
- shutil.copytree(docs_src_path / "lib", "lib", dirs_exist_ok=True)
- shutil.copy2(inventory_file, "lib")
- git_add.append("lib/objects.inv")
-
- shutil.rmtree("docs/_build")
-
- for file in git_add:
- subprocess.run(["git", "add", file], check=True)
-
- subprocess.run(
- ["git", "commit", "-m", f"Automatic docs build for version {version!r}", "--no-verify"],
- check=True,
- )
-
- if push:
- subprocess.run(["git", "push"], check=True)
-
-
-def main() -> None:
- args = parser.parse_args()
- make_version(version=args.version, push=args.push)
-
-
-if __name__ == "__main__":
- main()
| diff --git a/docs/lib/reference/testing.rst b/docs/reference/testing.rst
similarity index 100%
rename from docs/lib/reference/testing.rst
rename to docs/reference/testing.rst
diff --git a/docs/lib/usage/testing.rst b/docs/usage/testing.rst
similarity index 98%
rename from docs/lib/usage/testing.rst
rename to docs/usage/testing.rst
--- a/docs/lib/usage/testing.rst
+++ b/docs/usage/testing.rst
@@ -148,7 +148,7 @@ We would then be able to rewrite our test like so:
Using sessions
++++++++++++++
-If you are using :ref:`session middleware <lib/usage/middleware/builtin-middleware:session middleware>` for session persistence
+If you are using :ref:`session middleware <usage/middleware/builtin-middleware:session middleware>` for session persistence
across requests, then you might want to inject or inspect session data outside a request. For this,
:class:`TestClient <.testing.TestClient>` provides two methods:
@@ -261,7 +261,7 @@ Another helper is the :class:`RequestFactory <starlite.testing.RequestFactory>`
you need to test logic that expects to receive a request object.
For example, lets say we wanted to unit test a *guard* function in isolation, to which end we'll reuse the examples
-from the :doc:`route guards </lib/usage/security/guards>` documentation:
+from the :doc:`route guards </usage/security/guards>` documentation:
.. code-block:: python
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-22T11:30:15 |
litestar-org/litestar | 1,370 | litestar-org__litestar-1370 | [
"1369"
] | 4a68df8c6f3def4f4eb4c5b35b0968841cda6334 | diff --git a/starlite/constants.py b/starlite/constants.py
--- a/starlite/constants.py
+++ b/starlite/constants.py
@@ -1,6 +1,5 @@
from inspect import Signature
-from msgspec.inspect import UNSET
from pydantic.fields import Undefined
from starlite.types import Empty
@@ -15,5 +14,5 @@
SCOPE_STATE_DEPENDENCY_CACHE = "dependency_cache"
SCOPE_STATE_NAMESPACE = "__starlite__"
SCOPE_STATE_RESPONSE_COMPRESSED = "response_compressed"
-UNDEFINED_SENTINELS = {Undefined, Signature.empty, UNSET, Empty, Ellipsis}
+UNDEFINED_SENTINELS = {Undefined, Signature.empty, Empty, Ellipsis}
SKIP_VALIDATION_NAMES = {"request", "socket", "scope", "receive", "send"}
| Bug: msgspec version needs updating
### Description
Starlite currently needs msgspec >= 0.11.0 from here https://github.com/starlite-api/starlite/blob/8a49aff754d5b409397183eb395cac5a44193d72/pyproject.toml#L52.
Starlite uses msgspec.inspect:
https://github.com/starlite-api/starlite/blob/41089a2d5dff696a25c78a58322d3a8a08ebcac3/starlite/constants.py#L3
msgspec.inspect wasn't introduced in msgspec until 0.12.0:
https://github.com/jcrist/msgspec/releases/tag/0.12.0
### URL to code causing the issue
_No response_
### MCVE
_No response_
### Steps to reproduce
```bash
1. In a new environment, pip install starlite msgspec==0.11.0
2. Try to import starlite
```
### Screenshots
_No response_
### Logs
_No response_
### Starlite Version
1.51.7
### Platform
- [ ] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| 2023-03-22T15:07:27 |
||
litestar-org/litestar | 1,376 | litestar-org__litestar-1376 | [
"4321",
"1234"
] | 18337975649c160dce4671fdd72ce18dad0517b2 | diff --git a/starlite/app.py b/starlite/app.py
--- a/starlite/app.py
+++ b/starlite/app.py
@@ -801,7 +801,7 @@ def update_openapi_schema(self) -> None:
)
operation_ids.append(operation_id)
- async def emit(self, event_id: str, *args: Any, **kwargs: Any) -> None:
+ def emit(self, event_id: str, *args: Any, **kwargs: Any) -> None:
"""Emit an event to all attached listeners.
Args:
@@ -812,4 +812,4 @@ async def emit(self, event_id: str, *args: Any, **kwargs: Any) -> None:
Returns:
None
"""
- await self.event_emitter.emit(event_id, *args, **kwargs)
+ self.event_emitter.emit(event_id, *args, **kwargs)
diff --git a/starlite/events/emitter.py b/starlite/events/emitter.py
--- a/starlite/events/emitter.py
+++ b/starlite/events/emitter.py
@@ -36,7 +36,7 @@ def __init__(self, listeners: Sequence[EventListener]):
self.listeners[event_id].add(listener)
@abstractmethod
- async def emit(self, event_id: str, *args: Any, **kwargs: Any) -> None: # pragma: no cover
+ def emit(self, event_id: str, *args: Any, **kwargs: Any) -> None: # pragma: no cover
"""Emit an event to all attached listeners.
Args:
@@ -126,7 +126,7 @@ async def on_shutdown(self) -> None:
self._worker_task = None
self._queue = None
- async def emit(self, event_id: str, *args: Any, **kwargs: Any) -> None:
+ def emit(self, event_id: str, *args: Any, **kwargs: Any) -> None:
"""Emit an event to all attached listeners.
Args:
| diff --git a/tests/events/test_listener.py b/tests/events/test_listener.py
--- a/tests/events/test_listener.py
+++ b/tests/events/test_listener.py
@@ -1,4 +1,4 @@
-from asyncio import sleep
+from time import sleep
from typing import Any
from unittest.mock import MagicMock
@@ -10,7 +10,7 @@
from starlite.events.listener import EventListener, listener
from starlite.exceptions import ImproperlyConfiguredException
from starlite.status_codes import HTTP_200_OK
-from starlite.testing import create_async_test_client, create_test_client
+from starlite.testing import create_test_client
@pytest.fixture()
@@ -37,17 +37,17 @@ async def listener_fn(*args: Any, **kwargs: Any) -> None:
@pytest.mark.parametrize("listener", [lazy_fixture("sync_listener"), lazy_fixture("async_listener")])
-async def test_event_listener(mock: MagicMock, listener: EventListener) -> None:
+def test_event_listener(mock: MagicMock, listener: EventListener) -> None:
test_value = {"key": "123"}
@get("/")
- async def route_handler(request: Request[Any, Any, Any]) -> None:
- await request.app.emit("test_event", test_value)
+ def route_handler(request: Request[Any, Any, Any]) -> None:
+ request.app.emit("test_event", test_value)
with create_test_client(route_handlers=[route_handler], listeners=[listener]) as client:
response = client.get("/")
assert response.status_code == HTTP_200_OK
- await sleep(0.01)
+ sleep(0.01)
mock.assert_called_with(test_value)
@@ -56,50 +56,46 @@ async def test_shutdown_awaits_pending(async_listener: EventListener, mock: Magi
await emitter.on_startup()
for _ in range(100):
- await emitter.emit("test_event")
+ emitter.emit("test_event")
await emitter.on_shutdown()
assert mock.call_count == 100
-async def test_multiple_event_listeners(
- async_listener: EventListener, sync_listener: EventListener, mock: MagicMock
-) -> None:
+def test_multiple_event_listeners(async_listener: EventListener, sync_listener: EventListener, mock: MagicMock) -> None:
@get("/")
- async def route_handler(request: Request[Any, Any, Any]) -> None:
- await request.app.emit("test_event")
-
- async with create_async_test_client(
- route_handlers=[route_handler], listeners=[async_listener, sync_listener]
- ) as client:
- response = await client.get("/")
- await sleep(0.01)
+ def route_handler(request: Request[Any, Any, Any]) -> None:
+ request.app.emit("test_event")
+
+ with create_test_client(route_handlers=[route_handler], listeners=[async_listener, sync_listener]) as client:
+ response = client.get("/")
+ sleep(0.01)
assert response.status_code == HTTP_200_OK
assert mock.call_count == 2
-async def test_multiple_event_ids(mock: MagicMock) -> None:
+def test_multiple_event_ids(mock: MagicMock) -> None:
@listener("test_event_1", "test_event_2")
def event_handler() -> None:
mock()
@get("/{event_id:int}")
- async def route_handler(request: Request[Any, Any, Any], event_id: int) -> None:
- await request.app.emit(f"test_event_{event_id}")
+ def route_handler(request: Request[Any, Any, Any], event_id: int) -> None:
+ request.app.emit(f"test_event_{event_id}")
- async with create_async_test_client(route_handlers=[route_handler], listeners=[event_handler]) as client:
- response = await client.get("/1")
- await sleep(0.01)
+ with create_test_client(route_handlers=[route_handler], listeners=[event_handler]) as client:
+ response = client.get("/1")
+ sleep(0.01)
assert response.status_code == HTTP_200_OK
assert mock.call_count == 1
- response = await client.get("/2")
- await sleep(0.01)
+ response = client.get("/2")
+ sleep(0.01)
assert response.status_code == HTTP_200_OK
assert mock.call_count == 2
-def test_raises_when_decorator_called_without_callable() -> None:
+async def test_raises_when_decorator_called_without_callable() -> None:
with pytest.raises(ImproperlyConfiguredException):
listener("test_even")(True) # type: ignore
@@ -108,18 +104,18 @@ async def test_raises_when_not_initialized() -> None:
app = Starlite([])
with pytest.raises(ImproperlyConfiguredException):
- await app.emit("x")
+ app.emit("x")
async def test_raises_for_wrong_async_backend(async_listener: EventListener) -> None:
- async with create_async_test_client([], listeners=[async_listener], backend="trio") as client:
+ with create_test_client([], listeners=[async_listener], backend="trio") as client:
assert not client.app.event_emitter._queue
assert not client.app.event_emitter._worker_task
with pytest.raises(ImproperlyConfiguredException):
- await client.app.emit("test_event")
+ client.app.emit("test_event")
async def test_raises_when_not_listener_are_registered_for_an_event_id(async_listener: EventListener) -> None:
- async with create_async_test_client(route_handlers=[], listeners=[async_listener]) as client:
+ with create_test_client(route_handlers=[], listeners=[async_listener]) as client:
with pytest.raises(ImproperlyConfiguredException):
- await client.app.emit("x")
+ client.app.emit("x")
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-24T11:24:33 |
litestar-org/litestar | 1,377 | litestar-org__litestar-1377 | [
"4321",
"1234"
] | 79a6d870dcd5dc6d627df885d8cb56ca0681b57b | diff --git a/starlite/events/emitter.py b/starlite/events/emitter.py
--- a/starlite/events/emitter.py
+++ b/starlite/events/emitter.py
@@ -93,7 +93,7 @@ async def _worker(self) -> None:
"""
while self._queue:
fn, args, kwargs = await self._queue.get()
- await fn(*args, *kwargs)
+ await fn(*args, **kwargs)
self._queue.task_done()
async def on_startup(self) -> None:
| diff --git a/tests/events/test_listener.py b/tests/events/test_listener.py
--- a/tests/events/test_listener.py
+++ b/tests/events/test_listener.py
@@ -38,17 +38,15 @@ async def listener_fn(*args: Any, **kwargs: Any) -> None:
@pytest.mark.parametrize("listener", [lazy_fixture("sync_listener"), lazy_fixture("async_listener")])
def test_event_listener(mock: MagicMock, listener: EventListener) -> None:
- test_value = {"key": "123"}
-
@get("/")
def route_handler(request: Request[Any, Any, Any]) -> None:
- request.app.emit("test_event", test_value)
+ request.app.emit("test_event", "positional", keyword="keyword-value")
with create_test_client(route_handlers=[route_handler], listeners=[listener]) as client:
response = client.get("/")
assert response.status_code == HTTP_200_OK
sleep(0.01)
- mock.assert_called_with(test_value)
+ mock.assert_called_with("positional", keyword="keyword-value")
async def test_shutdown_awaits_pending(async_listener: EventListener, mock: MagicMock) -> None:
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-24T12:32:12 |
litestar-org/litestar | 1,389 | litestar-org__litestar-1389 | [
"1388",
"4321"
] | 36f337a574e17d9ac4c88f7d6869101ec3a64b2f | diff --git a/starlite/plugins.py b/starlite/plugins.py
--- a/starlite/plugins.py
+++ b/starlite/plugins.py
@@ -1,3 +1,4 @@
+from collections.abc import Iterable
from typing import (
TYPE_CHECKING,
Any,
@@ -19,6 +20,7 @@
from typing_extensions import TypeGuard, get_args
from starlite.types.protocols import DataclassProtocol
+from starlite.utils.predicates import is_class_and_subclass
__all__ = (
"InitPluginProtocol",
@@ -209,8 +211,8 @@ def get_plugin_for_value(
if plugins:
if value and isinstance(value, (list, tuple)):
value = value[0]
- if get_args(value):
- value = get_args(value)[0]
+ if is_class_and_subclass(value, Iterable) and (args := get_args(value)): # type:ignore[type-abstract]
+ value = args[0]
for plugin in plugins:
if plugin.is_plugin_supported_type(value):
return plugin
| diff --git a/tests/test_plugins.py b/tests/test_plugins.py
--- a/tests/test_plugins.py
+++ b/tests/test_plugins.py
@@ -1,13 +1,17 @@
-from typing import TYPE_CHECKING, Any, Dict, Type
+from typing import TYPE_CHECKING, Any, Dict, List, Type
+from unittest.mock import MagicMock
import pytest
from pydantic import BaseModel
+from typing_extensions import get_origin
from starlite import MediaType, Starlite, get
+from starlite.connection import Request
from starlite.plugins import (
InitPluginProtocol,
PluginMapping,
SerializationPluginProtocol,
+ get_plugin_for_value,
)
from starlite.testing import create_test_client
@@ -89,3 +93,14 @@ def on_app_init(self, app: "Starlite") -> None:
assert tag in client.app.tags
assert client.app.state.called
+
+
[email protected](("value", "tested_type"), [(List[int], int), (Request[Any, Any, Any], Request)])
+def test_get_plugin_for_value(value: Any, tested_type: Any) -> None:
+ mock_plugin = MagicMock(spec=SerializationPluginProtocol)
+ mock_plugin.is_plugin_supported_type.return_value = False
+ get_plugin_for_value(value, [mock_plugin])
+ assert mock_plugin.is_plugin_supported_type.called_once()
+ call = mock_plugin.is_plugin_supported_type.mock_calls[0]
+ assert len(call.args) == 1
+ assert get_origin(call.args[0]) or call.args[0] is tested_type
| Bug: SQLAlchemy 1 Plugin - 'Request' object has no attribute 'dict'
### Description
Changing dependency function from
```py
async def provide_user(request: Request[User, Token, Any]) -> User:
```
to
```py
async def provide_user(request: Request) -> User:
```
alleviates the error.
### URL to code causing the issue
_No response_
### MCVE
```python
"""Minimal Starlite application."""
from typing import Any
from sqlalchemy import Column, Integer
from sqlalchemy.orm import declarative_base
from starlite import Request, Starlite, get
from starlite.contrib.sqlalchemy_1.plugin import SQLAlchemyPlugin
from starlite.testing import TestClient
Base = declarative_base()
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
@get("/")
def hello_world(request: Request[User, Any, Any]) -> dict[str, Any]:
"""Route Handler that outputs hello world."""
return {"hello": "world"}
app = Starlite(route_handlers=[hello_world], debug=True, plugins=[SQLAlchemyPlugin()])
if __name__ == "__main__":
with TestClient(app=app) as client:
client.get("/")
```
### Steps to reproduce
_No response_
### Screenshots
_No response_
### Logs
```bash
Traceback (most recent call last):
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/middleware/exceptions/middleware.py", line 149, in __call__
await self.app(scope, receive, send)
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/routes/http.py", line 77, in handle
response = await self._get_response_for_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/routes/http.py", line 129, in _get_response_for_request
response = await self._call_handler_function(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/routes/http.py", line 158, in _call_handler_function
response_data, cleanup_group = await self._get_response_data(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/routes/http.py", line 195, in _get_response_data
cleanup_group = await parameter_model.resolve_dependencies(request, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/_kwargs/kwargs_model.py", line 383, in resolve_dependencies
await resolve_dependency(next(iter(batch)), connection, kwargs, cleanup_group)
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/_kwargs/dependencies.py", line 63, in resolve_dependency
signature_model.parse_values_from_connection_kwargs(connection=connection, **kwargs)
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/_signature/models.py", line 268, in parse_values_from_connection_kwargs
return signature.to_dict()
^^^^^^^^^^^^^^^^^^^
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/_signature/models.py", line 290, in to_dict
return {key: self._resolve_field_value(key) for key in self.__fields__}
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/_signature/models.py", line 290, in <dictcomp>
return {key: self._resolve_field_value(key) for key in self.__fields__}
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/_signature/models.py", line 281, in _resolve_field_value
return mapping.get_model_instance_for_value(value) if mapping else value
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/plugins.py", line 241, in get_model_instance_for_value
return self.plugin.from_data_container_instance(self.model_class, value)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/oracle/Code/Starlite/starlite-full-stack-example/.venv/lib/python3.11/site-packages/starlite/contrib/sqlalchemy_1/plugin.py", line 423, in from_data_container_instance
return model_class(**data_container_instance.dict())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
AttributeError: 'Request' object has no attribute 'dict'
```
```
### Starlite Version
main
### Platform
- [ ] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| 2023-03-26T02:52:23 |
|
litestar-org/litestar | 1,391 | litestar-org__litestar-1391 | [
"1368",
"4321"
] | 36f337a574e17d9ac4c88f7d6869101ec3a64b2f | diff --git a/starlite/contrib/sqlalchemy_1/plugin.py b/starlite/contrib/sqlalchemy_1/plugin.py
--- a/starlite/contrib/sqlalchemy_1/plugin.py
+++ b/starlite/contrib/sqlalchemy_1/plugin.py
@@ -85,9 +85,9 @@ def on_app_init(self, app: "Starlite") -> None:
if self._config is not None:
app.dependencies[self._config.dependency_key] = Provide(self._config.create_db_session_dependency)
app.before_send.append(self._config.before_send_handler) # type: ignore[arg-type]
+ app.on_startup.append(self._config.update_app_state)
app.on_shutdown.append(self._config.on_shutdown)
self._config.config_sql_alchemy_logging(app.logging_config)
- self._config.update_app_state(state=app.state)
@staticmethod
def is_plugin_supported_type(value: Any) -> "TypeGuard[DeclarativeMeta]":
| diff --git a/tests/contrib/sqlalchemy_1/sql_alchemy_plugin/test_sql_alchemy_config.py b/tests/contrib/sqlalchemy_1/sql_alchemy_plugin/test_sql_alchemy_config.py
--- a/tests/contrib/sqlalchemy_1/sql_alchemy_plugin/test_sql_alchemy_config.py
+++ b/tests/contrib/sqlalchemy_1/sql_alchemy_plugin/test_sql_alchemy_config.py
@@ -24,17 +24,17 @@
@pytest.mark.parametrize("connection_string", ["sqlite+aiosqlite://", "sqlite://"])
def test_sets_engine_and_session_maker(connection_string: str) -> None:
config = SQLAlchemyConfig(connection_string=connection_string, use_async_engine="+aiosqlite" in connection_string)
- app = Starlite(plugins=[SQLAlchemyPlugin(config=config)])
- assert app.state.get(config.engine_app_state_key)
- assert app.state.get(config.session_maker_app_state_key)
+ with create_test_client([], plugins=[SQLAlchemyPlugin(config=config)]) as client:
+ assert client.app.state.get(config.engine_app_state_key)
+ assert client.app.state.get(config.session_maker_app_state_key)
@pytest.mark.parametrize("connection_string", ["sqlite+aiosqlite://", "sqlite://"])
def test_dependency_creates_session(connection_string: str) -> None:
config = SQLAlchemyConfig(connection_string=connection_string, use_async_engine="+aiosqlite" in connection_string)
- app = Starlite(plugins=[SQLAlchemyPlugin(config=config)])
- request = RequestFactory().get()
- session = config.create_db_session_dependency(state=app.state, scope=request.scope)
+ with create_test_client([], plugins=[SQLAlchemyPlugin(config=config)]) as client:
+ request = RequestFactory().get()
+ session = config.create_db_session_dependency(state=client.app.state, scope=request.scope)
assert session
assert request.scope[SESSION_SCOPE_KEY] # type: ignore
| Bug: SQLAlchemy 1 plugin mutates app state destructively
### Description
When using the SQLAlchemy 1 plugin, repeatedly running through the application lifecycle (as done when testing an application not provided by a factory function), causes a `KeyError` on the second pass.
This is caused be the plugin's `on_shutdown` handler deleting the `engine_app_state_key` from the application's state on application shutdown, but only *adding* it on application init.
This can be solved by setting up the application state within `on_startup`.
### URL to code causing the issue
_No response_
### MCVE
```python
from starlite.contrib.sqlalchemy_1.config import SQLAlchemyConfig
from starlite.contrib.sqlalchemy_1.plugin import SQLAlchemyPlugin
from starlite import Starlite
from starlite.testing import TestClient
app = Starlite(
plugins=[
SQLAlchemyPlugin(
config=SQLAlchemyConfig(
connection_string="sqlite+pysqlite:///:memory:",
use_async_engine=False,
)
)
]
)
with TestClient(app) as client:
pass
with TestClient(app) as client:
pass
```
### Steps to reproduce
_No response_
### Screenshots
```bash
""
```
### Logs
_No response_
### Starlite Version
This affects both 1.51 and the main branch.
### Platform
- [ ] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| Can confirm.
We have this patch in `starlite-pg-redis-docker`:
```py
@pytest.fixture(scope="session", autouse=True)
def _patch_sqlalchemy_plugin() -> "abc.Iterator":
monkeypatch = pytest.MonkeyPatch()
monkeypatch.setattr(
sqlalchemy_plugin.SQLAlchemyConfig, # type:ignore[attr-defined]
"on_shutdown",
MagicMock(),
)
yield
monkeypatch.undo()
```
Adding an `on_startup` handler feels like the correct way to handle. Good idea! | 2023-03-26T03:26:28 |
litestar-org/litestar | 1,397 | litestar-org__litestar-1397 | [
"4321",
"1234"
] | 6166361d78f15c2963a7555e767d0f3d30caa3b2 | diff --git a/starlite/app.py b/starlite/app.py
--- a/starlite/app.py
+++ b/starlite/app.py
@@ -16,6 +16,7 @@
from starlite.config.app import AppConfig
from starlite.config.response_cache import ResponseCacheConfig
from starlite.connection import Request, WebSocket
+from starlite.constants import OPENAPI_NOT_INITIALIZED
from starlite.datastructures.state import State
from starlite.events.emitter import BaseEventEmitterBackend, SimpleEventEmitter
from starlite.exceptions import (
@@ -458,17 +459,24 @@ async def __call__(
await self.asgi_handler(scope, receive, self._wrap_send(send=send, scope=scope)) # type: ignore[arg-type]
@property
- def openapi_schema(self) -> OpenAPI | None:
+ def openapi_schema(self) -> OpenAPI:
"""Access the OpenAPI schema of the application.
Returns:
The :class:`OpenAPI`
<pydantic_openapi_schema.open_api.OpenAPI> instance of the
- application's.
+ application.
+
+ Raises:
+ ImproperlyConfiguredException: If the application ``openapi_config`` attribute is ``None``.
"""
- if self.openapi_config and not self._openapi_schema:
+ if not self.openapi_config:
+ raise ImproperlyConfiguredException(OPENAPI_NOT_INITIALIZED)
+
+ if not self._openapi_schema:
self._openapi_schema = self.openapi_config.to_openapi_schema()
self.update_openapi_schema()
+
return self._openapi_schema
@classmethod
diff --git a/starlite/constants.py b/starlite/constants.py
--- a/starlite/constants.py
+++ b/starlite/constants.py
@@ -16,3 +16,4 @@
SCOPE_STATE_RESPONSE_COMPRESSED = "response_compressed"
UNDEFINED_SENTINELS = {Undefined, Signature.empty, Empty, Ellipsis}
SKIP_VALIDATION_NAMES = {"request", "socket", "scope", "receive", "send"}
+OPENAPI_NOT_INITIALIZED = "Starlite has not been instantiated with OpenAPIConfig"
diff --git a/starlite/openapi/controller.py b/starlite/openapi/controller.py
--- a/starlite/openapi/controller.py
+++ b/starlite/openapi/controller.py
@@ -6,6 +6,7 @@
from yaml import dump as dump_yaml
+from starlite.constants import OPENAPI_NOT_INITIALIZED
from starlite.controller import Controller
from starlite.enums import MediaType, OpenAPIMediaType
from starlite.exceptions import ImproperlyConfiguredException
@@ -21,8 +22,6 @@
from starlite.connection.request import Request
from starlite.openapi.spec.open_api import OpenAPI
-MSG_OPENAPI_NOT_INITIALIZED = "Starlite has not been instantiated with OpenAPIConfig"
-
class OpenAPISchemaResponse(Response):
"""Response class for OpenAPI Schemas."""
@@ -98,12 +97,7 @@ def get_schema_from_request(request: Request) -> "OpenAPI":
Returns:
An :class:`OpenAPI <starlite.openapi.spec.open_api.OpenAPI>` instance.
-
- Raises:
- ImproperlyConfiguredException: If the application ``openapi_config`` attribute is ``None``.
"""
- if not request.app.openapi_schema: # pragma: no cover
- raise ImproperlyConfiguredException(MSG_OPENAPI_NOT_INITIALIZED)
return request.app.openapi_schema
def should_serve_endpoint(self, request: "Request") -> bool:
@@ -173,9 +167,6 @@ def retrieve_schema_yaml(self, request: Request) -> Response:
Returns:
A Response instance with the YAML object rendered into a string.
"""
- if not request.app.openapi_config: # pragma: no cover
- raise ImproperlyConfiguredException(MSG_OPENAPI_NOT_INITIALIZED)
-
if self.should_serve_endpoint(request):
return OpenAPISchemaResponse(
content=self.get_schema_from_request(request), media_type=OpenAPIMediaType.OPENAPI_YAML
@@ -193,9 +184,6 @@ def retrieve_schema_json(self, request: Request) -> Response:
Returns:
A Response instance with the JSON object rendered into a string.
"""
- if not request.app.openapi_config: # pragma: no cover
- raise ImproperlyConfiguredException(MSG_OPENAPI_NOT_INITIALIZED)
-
if self.should_serve_endpoint(request):
return OpenAPISchemaResponse(
content=self.get_schema_from_request(request), media_type=OpenAPIMediaType.OPENAPI_JSON
@@ -221,7 +209,7 @@ def root(self, request: Request) -> Response:
"""
config = request.app.openapi_config
if not config: # pragma: no cover
- raise ImproperlyConfiguredException(MSG_OPENAPI_NOT_INITIALIZED)
+ raise ImproperlyConfiguredException(OPENAPI_NOT_INITIALIZED)
render_method = self.render_methods_map[config.root_schema_site]
@@ -245,9 +233,6 @@ def swagger_ui(self, request: Request) -> Response:
Returns:
A response with a rendered swagger documentation site
"""
- if not request.app.openapi_config: # pragma: no cover
- raise ImproperlyConfiguredException(MSG_OPENAPI_NOT_INITIALIZED)
-
if self.should_serve_endpoint(request):
return Response(content=self.render_swagger_ui(request), media_type=MediaType.HTML)
return Response(
@@ -267,9 +252,6 @@ def stoplight_elements(self, request: Request) -> Response:
Returns:
A response with a rendered stoplight elements documentation site
"""
- if not request.app.openapi_config: # pragma: no cover
- raise ImproperlyConfiguredException(MSG_OPENAPI_NOT_INITIALIZED)
-
if self.should_serve_endpoint(request):
return Response(content=self.render_stoplight_elements(request), media_type=MediaType.HTML)
return Response(content=self.render_404_page(), status_code=HTTP_404_NOT_FOUND, media_type=MediaType.HTML)
@@ -285,9 +267,6 @@ def redoc(self, request: Request) -> Response: # pragma: no cover
Returns:
A response with a rendered redoc documentation site
"""
- if not request.app.openapi_config: # pragma: no cover
- raise ImproperlyConfiguredException(MSG_OPENAPI_NOT_INITIALIZED)
-
if self.should_serve_endpoint(request):
return Response(content=self.render_redoc(request), media_type=MediaType.HTML)
return Response(content=self.render_404_page(), status_code=HTTP_404_NOT_FOUND, media_type=MediaType.HTML)
| diff --git a/tests/app/test_app.py b/tests/app/test_app.py
new file mode 100644
--- /dev/null
+++ b/tests/app/test_app.py
@@ -0,0 +1,13 @@
+from __future__ import annotations
+
+import pytest
+
+from starlite import Starlite
+from starlite.exceptions import ImproperlyConfiguredException
+
+
+def test_access_openapi_schema_raises_if_not_configured() -> None:
+ """Test that accessing the openapi schema raises if not configured."""
+ app = Starlite(openapi_config=None)
+ with pytest.raises(ImproperlyConfiguredException):
+ app.openapi_schema
diff --git a/tests/contrib/jwt/test_auth.py b/tests/contrib/jwt/test_auth.py
--- a/tests/contrib/jwt/test_auth.py
+++ b/tests/contrib/jwt/test_auth.py
@@ -362,7 +362,7 @@ def login_custom_handler() -> Response["User"]:
assert jwt_auth.security_requirement == {"BearerToken": []}
app = Starlite(on_app_init=[jwt_auth.on_app_init])
- assert app.openapi_schema.to_schema() == { # type: ignore
+ assert app.openapi_schema.to_schema() == {
"openapi": "3.1.0",
"info": {"title": "Starlite API", "version": "1.0.0"},
"servers": [{"url": "/"}],
diff --git a/tests/openapi/test_config.py b/tests/openapi/test_config.py
--- a/tests/openapi/test_config.py
+++ b/tests/openapi/test_config.py
@@ -83,7 +83,7 @@ def handler_2() -> None:
openapi_config=OpenAPIConfig(title="my title", version="1.0.0", operation_id_creator=operation_id_creator),
)
- assert app.openapi_schema.to_schema()["paths"] == { # type: ignore[union-attr]
+ assert app.openapi_schema.to_schema()["paths"] == {
"/1": {
"get": {
"deprecated": False,
diff --git a/tests/openapi/test_request_body.py b/tests/openapi/test_request_body.py
--- a/tests/openapi/test_request_body.py
+++ b/tests/openapi/test_request_body.py
@@ -49,7 +49,7 @@ async def handle_file_list_upload(
return None
app = Starlite(route_handlers=[handle_form_upload, handle_file_upload, handle_file_list_upload])
- schema_dict = app.openapi_schema.to_schema() # type: ignore[union-attr]
+ schema_dict = app.openapi_schema.to_schema()
paths = schema_dict["paths"]
components = schema_dict["components"]
assert paths["/file-upload"]["post"]["requestBody"]["content"]["multipart/form-data"]["schema"] == {
diff --git a/tests/openapi/test_tags.py b/tests/openapi/test_tags.py
--- a/tests/openapi/test_tags.py
+++ b/tests/openapi/test_tags.py
@@ -1,4 +1,4 @@
-from typing import TYPE_CHECKING, Any, Type, cast
+from typing import TYPE_CHECKING, Any, Type
import pytest
@@ -43,7 +43,7 @@ def app(handler: HTTPRouteHandler, controller: Type[Controller], router: Router)
@pytest.fixture()
def openapi_schema(app: Starlite) -> "OpenAPI":
- return cast("OpenAPI", app.openapi_schema)
+ return app.openapi_schema
def test_openapi_schema_handler_tags(openapi_schema: "OpenAPI") -> None:
diff --git a/tests/security/test_session_auth.py b/tests/security/test_session_auth.py
--- a/tests/security/test_session_auth.py
+++ b/tests/security/test_session_auth.py
@@ -80,7 +80,7 @@ def test_session_auth_openapi(session_backend_config_memory: "ServerSideSessionC
session_backend_config=session_backend_config_memory,
)
app = Starlite(on_app_init=[session_auth.on_app_init])
- assert app.openapi_schema.to_schema() == { # type: ignore
+ assert app.openapi_schema.to_schema() == {
"openapi": "3.1.0",
"info": {"title": "Starlite API", "version": "1.0.0"},
"servers": [{"url": "/"}],
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-27T23:57:42 |
litestar-org/litestar | 1,400 | litestar-org__litestar-1400 | [
"1372",
"4321"
] | ea386bc550ab0cd392283a9c718af3dec5a81e31 | diff --git a/starlite/_openapi/schema_generation/schema.py b/starlite/_openapi/schema_generation/schema.py
--- a/starlite/_openapi/schema_generation/schema.py
+++ b/starlite/_openapi/schema_generation/schema.py
@@ -27,11 +27,11 @@
Set,
Tuple,
cast,
- get_args,
)
from uuid import UUID
from _decimal import Decimal
+from typing_extensions import get_args, get_type_hints
from starlite._openapi.schema_generation.constrained_fields import (
create_constrained_field_schema,
@@ -530,16 +530,17 @@ def create_schema_for_pydantic_model(
Returns:
A schema instance.
"""
+ field_type_hints = get_type_hints(field_type, include_extras=False)
return Schema(
required=[field.alias or field.name for field in field_type.__fields__.values() if field.required],
properties={
(f.alias or f.name): create_schema(
- field=SignatureField.create(field_type=v, name=f.alias or f.name),
+ field=SignatureField.create(field_type=field_type_hints[f.name], name=f.alias or f.name),
generate_examples=generate_examples,
plugins=plugins,
schemas=schemas,
)
- for f, v in zip(field_type.__fields__.values(), field_type.__annotations__.values())
+ for f in field_type.__fields__.values()
},
type=OpenAPIType.OBJECT,
title=_get_type_schema_name(field_type),
@@ -575,7 +576,7 @@ def create_schema_for_dataclass(
plugins=plugins,
schemas=schemas,
)
- for k, v in field_type.__annotations__.items()
+ for k, v in get_type_hints(field_type, include_extras=False).items()
},
type=OpenAPIType.OBJECT,
title=_get_type_schema_name(field_type),
@@ -609,7 +610,7 @@ def create_schema_for_typed_dict(
plugins=plugins,
schemas=schemas,
)
- for k, v in field_type.__annotations__.items()
+ for k, v in get_type_hints(field_type, include_extras=False).items()
},
type=OpenAPIType.OBJECT,
title=_get_type_schema_name(field_type),
| diff --git a/tests/openapi/test_schema.py b/tests/openapi/test_schema.py
--- a/tests/openapi/test_schema.py
+++ b/tests/openapi/test_schema.py
@@ -1,6 +1,6 @@
from dataclasses import dataclass
from enum import Enum
-from typing import Dict, Literal
+from typing import TYPE_CHECKING, Dict, Literal
import pytest
from pydantic import BaseModel
@@ -11,6 +11,9 @@
_process_schema_result,
create_schema,
create_schema_for_annotation,
+ create_schema_for_dataclass,
+ create_schema_for_pydantic_model,
+ create_schema_for_typed_dict,
)
from starlite._signature.models import PydanticSignatureModel, SignatureField
from starlite.app import DEFAULT_OPENAPI_CONFIG
@@ -24,6 +27,10 @@
from starlite.testing import create_test_client
from tests import Person, Pet
+if TYPE_CHECKING:
+ from types import ModuleType
+ from typing import Callable
+
def test_process_schema_result() -> None:
test_str = "abc"
@@ -177,3 +184,63 @@ def test_title_validation() -> None:
plugins=[],
schemas=schemas,
)
+
+
[email protected]("with_future_annotations", [True, False])
+def test_create_schema_for_pydantic_model_with_annotated_model_attribute(
+ with_future_annotations: bool, create_module: "Callable[[str], ModuleType]"
+) -> None:
+ """Test that a model with an annotated attribute is correctly handled."""
+ module = create_module(
+ f"""
+{'from __future__ import annotations' if with_future_annotations else ''}
+from typing_extensions import Annotated
+from pydantic import BaseModel
+
+class Foo(BaseModel):
+ foo: Annotated[int, "Foo description"]
+"""
+ )
+ schema = create_schema_for_pydantic_model(module.Foo, generate_examples=False, plugins=[], schemas={})
+ assert schema.properties and "foo" in schema.properties
+
+
[email protected]("with_future_annotations", [True, False])
+def test_create_schema_for_dataclass_with_annotated_model_attribute(
+ with_future_annotations: bool, create_module: "Callable[[str], ModuleType]"
+) -> None:
+ """Test that a model with an annotated attribute is correctly handled."""
+ module = create_module(
+ f"""
+{'from __future__ import annotations' if with_future_annotations else ''}
+from typing_extensions import Annotated
+from dataclasses import dataclass
+
+@dataclass
+class Foo:
+ foo: Annotated[int, "Foo description"]
+"""
+ )
+ schema = create_schema_for_dataclass(module.Foo, generate_examples=False, plugins=[], schemas={})
+ assert schema.properties and "foo" in schema.properties
+
+
[email protected]("with_future_annotations", [True, False])
+def test_create_schema_for_typedict_with_annotated_required_and_not_required_model_attributes(
+ with_future_annotations: bool, create_module: "Callable[[str], ModuleType]"
+) -> None:
+ """Test that a model with an annotated attribute is correctly handled."""
+ module = create_module(
+ f"""
+{'from __future__ import annotations' if with_future_annotations else ''}
+from typing_extensions import Annotated, Required, NotRequired
+from typing import TypedDict
+
+class Foo(TypedDict):
+ foo: Annotated[int, "Foo description"]
+ bar: Annotated[Required[int], "Bar description"]
+ baz: Annotated[NotRequired[int], "Baz description"]
+"""
+ )
+ schema = create_schema_for_typed_dict(module.Foo, generate_examples=False, plugins=[], schemas={})
+ assert schema.properties and all(key in schema.properties for key in ("foo", "bar", "baz"))
| Bug: Openapi schema generation crashes for models with `Annotated` type attribute
### Summary
I have a pydantic model that uses a list of [discriminated unions](https://docs.pydantic.dev/usage/types/#nested-discriminated-unions). Because of the list I need to annotate the union with the discriminator. But then the schema generation throws an error that it can't map the type.
### Basic Example
```python
class Base(BaseModel):
pass
class A(Base):
type: Literal["a"] = "a"
class B(Base):
type: Literal["b"] = "b"
MyUnion = Annotated[Union[A, B], Field(discriminator="type")]
class Test(BaseModel):
entries: List[MyUnion] = []
```
If you now use ``Test`` as a parameter in a handler function and open for example the swagger documentation you get an error like
> 500: Parameter '' with type 'typing.Annotated[typing.Union[A, B], FieldInfo(default=PydanticUndefined, discriminator='type', extra={})]' could not be mapped to an Open API type. This can occur if a user-defined generic type is resolved as a parameter. If '' should not be documented as a parameter, annotate it using the `Dependency` function, e.g., `: ... = Dependency(...)`.
### Drawbacks and Impact
_No response_
### Unresolved questions
_No response_
| Hey, I tried to reproduce, can you look at this and tell me what to change to get it to fail please?
```py
"""Minimal Starlite application."""
from __future__ import annotations
from typing import Annotated, Any, List, Literal, Union
from pydantic import BaseModel, Field
from starlite import Starlite, get
class Base(BaseModel):
"""A base model."""
class Foo(Base):
"""A Foo model."""
type: Literal["foo"] = "foo"
class Bar(Base):
"""A Bar model."""
type: Literal["bar"] = "bar"
AnnotatedUnion = Annotated[Union[Foo, Bar], Field(discriminator="type")]
class Test(BaseModel):
"""A Test model."""
entries: List[AnnotatedUnion] = []
@get("/")
def hello_world(test: Test) -> dict[str, Any]:
"""Route Handler that outputs hello world."""
return {"hello": "world"}
app = Starlite(route_handlers=[hello_world])
```
On starlite 1.51, python 3.11, I get:

from `/schema/swagger`
When I comment out ``from __future__ import annotations``, I get the error.
I'm running ``Python 3.10.9`` on top of the ``main`` (commit 23b84b67dd20d2836caf8e3b54797b2d15f42e7e) branch.
I seems that import is still needed in 3.10. Is starlite intentionally using the new behaviour?
Grepping through the code it is used alot, but I can't find any mention of it in the documentation.
The future import is very easy to miss. Is it possible to fix this when the user code does not do the import?
Oh - you're on main, OK that changes things. There were some pretty big changes to openapi internals for v2 which are in main and not in 1.51.
I have reproduced, thanks for the extra info.
> The future import is very easy to miss. Is it possible to fix this when the user code does not do the import?
It should work either way - I just use it without thinking.
I got the same problem, which is weird because I'm 95% sure that this use to work with my project (and I _hate_ setting hard constraints on my dependency versions, so I probably updated starlite to a newer version without noticing).
Note: i'm on startlite 1.51.7
I tested @peterschutt 's example, and it *does* work when you're using the `Test` class as an input time. However, if I happen to use the `AnnotatedUnion` as an input type (which i'm doing), I get the same error:
```python
"""Minimal Starlite application."""
from __future__ import annotations
from typing import Annotated, Any, Literal
import uvicorn
from pydantic import BaseModel, Field
from starlite import Starlite, get
class Base(BaseModel):
"""A base model."""
class Foo(Base):
"""A Foo model."""
type: Literal["foo"] = "foo"
class Bar(Base):
"""A Bar model."""
type: Literal["bar"] = "bar"
AnnotatedUnion = Annotated[Foo | Bar, Field(discriminator="type")]
@get("/")
def hello_world(test: AnnotatedUnion) -> dict[str, Any]:
"""Route Handler that outputs hello world."""
return {"hello": "world"}
app = Starlite(route_handlers=[hello_world])
if __name__ == '__main__':
uvicorn.run(app)
```
```
starlite.exceptions.http_exceptions.ImproperlyConfiguredException: 500: Parameter 'test' with type 'typing.Annotated[__main__.Foo | __main__.Bar, FieldInfo(default=PydanticUndefined, discriminator='type', extra={})]' could not be mapped to an Open API type. This can occur if a user-defined generic type is resolved as a parameter. If 'test' should not be documented as a parameter, annotate it using the `Dependency` function, e.g., `test: ... = Dependency(...)`.
```
Any idea on how to fix this as well (until some update comes along)?
EDIT: ok, I tested it on v2.0.0alpha2, and using the `AnnotatedUnion` as a type annot for `test` does work on this version. | 2023-03-28T23:11:58 |
litestar-org/litestar | 1,401 | litestar-org__litestar-1401 | [
"4321",
"1234"
] | d184f4e184df53f8ac1bd8386028dbd72edae716 | diff --git a/starlite/_signature/utils.py b/starlite/_signature/utils.py
--- a/starlite/_signature/utils.py
+++ b/starlite/_signature/utils.py
@@ -75,4 +75,4 @@ def get_fn_type_hints(fn: Any, namespace: dict[str, Any] | None = None) -> dict[
**vars(sys.modules[fn_to_inspect.__module__]),
**(namespace or {}),
}
- return get_type_hints(fn_to_inspect, globalns=namespace)
+ return get_type_hints(fn_to_inspect, globalns=namespace, include_extras=True)
| diff --git a/tests/signature/test_parsing.py b/tests/signature/test_parsing.py
--- a/tests/signature/test_parsing.py
+++ b/tests/signature/test_parsing.py
@@ -4,10 +4,11 @@
import pytest
from pydantic import BaseModel
+from typing_extensions import Annotated
from starlite import get
from starlite._signature import create_signature_model
-from starlite._signature.parsing import ParsedSignatureParameter
+from starlite._signature.parsing import ParsedSignatureParameter, parse_fn_signature
from starlite.di import Provide
from starlite.exceptions import ImproperlyConfiguredException, ValidationException
from starlite.params import Dependency, Parameter
@@ -228,3 +229,20 @@ def func(a: "int") -> None:
obj = object()
parsed_param = ParsedSignatureParameter.from_parameter("func", "a", signature.parameters["a"], {"a": obj})
assert parsed_param.annotation is obj
+
+
[email protected]("with_future_annotations", [True, False])
+def test_parsed_parameters_with_annotated_types(
+ with_future_annotations: bool, create_module: "Callable[[str], ModuleType]"
+) -> None:
+ module = create_module(
+ f"""
+{'from __future__ import annotations' if with_future_annotations else ''}
+from typing_extensions import Annotated
+
+def fn(a: Annotated[int, "a"]) -> None:
+ pass
+"""
+ )
+ (param, *_), _, __, ___ = parse_fn_signature(module.fn, [], set(), {})
+ assert param.annotation == Annotated[int, "a"]
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-29T06:44:57 |
litestar-org/litestar | 1,404 | litestar-org__litestar-1404 | [
"4321",
"1234"
] | e5095b6c8fe05f6852d6d372cdaabdcae10647db | diff --git a/docs/examples/stores/registry_configure_integrations.py b/docs/examples/stores/registry_configure_integrations.py
--- a/docs/examples/stores/registry_configure_integrations.py
+++ b/docs/examples/stores/registry_configure_integrations.py
@@ -8,7 +8,7 @@
app = Starlite(
stores={
"sessions": RedisStore.with_client(),
- "request_cache": FileStore(Path("request-cache")),
+ "response_cache": FileStore(Path("response-cache")),
},
middleware=[ServerSideSessionConfig().middleware],
)
diff --git a/docs/examples/stores/registry_default_factory_namespacing.py b/docs/examples/stores/registry_default_factory_namespacing.py
--- a/docs/examples/stores/registry_default_factory_namespacing.py
+++ b/docs/examples/stores/registry_default_factory_namespacing.py
@@ -9,7 +9,7 @@
@get(cache=True)
def cached_handler() -> str:
- # this will use app.stores.get("request_cache")
+ # this will use app.stores.get("response_cache")
return "Hello, world!"
diff --git a/starlite/config/response_cache.py b/starlite/config/response_cache.py
--- a/starlite/config/response_cache.py
+++ b/starlite/config/response_cache.py
@@ -40,7 +40,7 @@ class ResponseCacheConfig:
"""Default cache expiration in seconds."""
key_builder: CacheKeyBuilder = field(default=default_cache_key_builder)
""":class:`CacheKeyBuilder <.types.CacheKeyBuilder>`. Defaults to :func:`default_cache_key_builder`."""
- store: str = "request_cache"
+ store: str = "response_cache"
"""Name of the :class:`Store <.stores.base.Store>` to use."""
def get_store_from_app(self, app: Starlite) -> Store:
| diff --git a/docs/examples/tests/test_stores.py b/docs/examples/tests/test_stores.py
--- a/docs/examples/tests/test_stores.py
+++ b/docs/examples/tests/test_stores.py
@@ -97,7 +97,7 @@ async def test_configure_integrations(mock_redis: MagicMock) -> None:
assert isinstance(session_store, RedisStore)
assert isinstance(cache_store, FileStore)
- assert cache_store.path == Path("request-cache")
+ assert cache_store.path == Path("response-cache")
async def test_registry_default_factory() -> None:
diff --git a/tests/test_response_caching.py b/tests/test_response_caching.py
--- a/tests/test_response_caching.py
+++ b/tests/test_response_caching.py
@@ -101,7 +101,7 @@ async def handler() -> str:
with TestClient(app) as client:
client.get("/cached")
- store = app.stores.get("request_cache")
+ store = app.stores.get("response_cache")
assert await store.exists("/cached:::cached")
@@ -131,7 +131,7 @@ async def test_with_stores(store: Store, mock: MagicMock) -> None:
def handler() -> str:
return mock() # type: ignore[no-any-return]
- app = Starlite([handler], stores={"request_cache": store})
+ app = Starlite([handler], stores={"response_cache": store})
with TestClient(app=app) as client:
response_one = client.get("/")
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-29T13:08:13 |
litestar-org/litestar | 1,406 | litestar-org__litestar-1406 | [
"1375"
] | 1fdfca6229f2cff14f49a8e2a1fa0f809ff71bd5 | diff --git a/starlite/exceptions/websocket_exceptions.py b/starlite/exceptions/websocket_exceptions.py
--- a/starlite/exceptions/websocket_exceptions.py
+++ b/starlite/exceptions/websocket_exceptions.py
@@ -10,9 +10,8 @@ class WebSocketException(StarliteException):
"""Exception class for websocket related events."""
code: int
- """Exception code.
-
- Should be a number in the 4000+ range.
+ """Exception code. For custom exceptions, this should be a number in the 4000+ range. Other codes can be found in
+ ``starlite.status_code`` with the ``WS_`` prefix.
"""
def __init__(self, *args: Any, detail: str, code: int = 4500) -> None:
diff --git a/starlite/status_codes.py b/starlite/status_codes.py
--- a/starlite/status_codes.py
+++ b/starlite/status_codes.py
@@ -1,118 +1,242 @@
-"""Includes code adapted from https://github.com/encode/starlette/blob/master/starlette/status.py.
-
-Copyright © 2018, [Encode OSS Ltd](https://www.encode.io/).
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-* Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
-
-* Neither the name of the copyright holder nor the names of its
- contributors may be used to endorse or promote products derived from
- this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-"""
-
from typing import Literal
# HTTP Status Codes
HTTP_100_CONTINUE: Literal[100] = 100
+"""HTTP status code 'Continue'"""
+
HTTP_101_SWITCHING_PROTOCOLS: Literal[101] = 101
+"""HTTP status code 'Switching Protocols'"""
+
HTTP_102_PROCESSING: Literal[102] = 102
+"""HTTP status code 'Processing'"""
+
HTTP_103_EARLY_HINTS: Literal[103] = 103
+"""HTTP status code 'Early Hints'"""
+
HTTP_200_OK: Literal[200] = 200
+"""HTTP status code 'OK'"""
+
HTTP_201_CREATED: Literal[201] = 201
+"""HTTP status code 'Created'"""
+
HTTP_202_ACCEPTED: Literal[202] = 202
+"""HTTP status code 'Accepted'"""
+
HTTP_203_NON_AUTHORITATIVE_INFORMATION: Literal[203] = 203
+"""HTTP status code 'Non Authoritative Information'"""
+
HTTP_204_NO_CONTENT: Literal[204] = 204
+"""HTTP status code 'No Content'"""
+
HTTP_205_RESET_CONTENT: Literal[205] = 205
+"""HTTP status code 'Reset Content'"""
+
HTTP_206_PARTIAL_CONTENT: Literal[206] = 206
+"""HTTP status code 'Partial Content'"""
+
HTTP_207_MULTI_STATUS: Literal[207] = 207
+"""HTTP status code 'Multi Status'"""
+
HTTP_208_ALREADY_REPORTED: Literal[208] = 208
+"""HTTP status code 'Already Reported'"""
+
HTTP_226_IM_USED: Literal[226] = 226
+"""HTTP status code 'I'm Used'"""
+
HTTP_300_MULTIPLE_CHOICES: Literal[300] = 300
+"""HTTP status code 'Multiple Choices'"""
+
HTTP_301_MOVED_PERMANENTLY: Literal[301] = 301
+"""HTTP status code 'Moved Permanently'"""
+
HTTP_302_FOUND: Literal[302] = 302
+"""HTTP status code 'Found'"""
+
HTTP_303_SEE_OTHER: Literal[303] = 303
+"""HTTP status code 'See Other'"""
+
HTTP_304_NOT_MODIFIED: Literal[304] = 304
+"""HTTP status code 'Not Modified'"""
+
HTTP_305_USE_PROXY: Literal[305] = 305
+"""HTTP status code 'Use Proxy'"""
+
HTTP_306_RESERVED: Literal[306] = 306
+"""HTTP status code 'Reserved'"""
+
HTTP_307_TEMPORARY_REDIRECT: Literal[307] = 307
+"""HTTP status code 'Temporary Redirect'"""
+
HTTP_308_PERMANENT_REDIRECT: Literal[308] = 308
+"""HTTP status code 'Permanent Redirect'"""
+
HTTP_400_BAD_REQUEST: Literal[400] = 400
+"""HTTP status code 'Bad Request'"""
+
HTTP_401_UNAUTHORIZED: Literal[401] = 401
+"""HTTP status code 'Unauthorized'"""
+
HTTP_402_PAYMENT_REQUIRED: Literal[402] = 402
+"""HTTP status code 'Payment Required'"""
+
HTTP_403_FORBIDDEN: Literal[403] = 403
+"""HTTP status code 'Forbidden'"""
+
HTTP_404_NOT_FOUND: Literal[404] = 404
+"""HTTP status code 'Not Found'"""
+
HTTP_405_METHOD_NOT_ALLOWED: Literal[405] = 405
+"""HTTP status code 'Method Not Allowed'"""
+
HTTP_406_NOT_ACCEPTABLE: Literal[406] = 406
+"""HTTP status code 'Not Acceptable'"""
+
HTTP_407_PROXY_AUTHENTICATION_REQUIRED: Literal[407] = 407
+"""HTTP status code 'Proxy Authentication Required'"""
+
HTTP_408_REQUEST_TIMEOUT: Literal[408] = 408
+"""HTTP status code 'Request Timeout'"""
+
HTTP_409_CONFLICT: Literal[409] = 409
+"""HTTP status code 'Conflict'"""
+
HTTP_410_GONE: Literal[410] = 410
+"""HTTP status code 'Gone'"""
+
HTTP_411_LENGTH_REQUIRED: Literal[411] = 411
+"""HTTP status code 'Length Required'"""
+
HTTP_412_PRECONDITION_FAILED: Literal[412] = 412
+"""HTTP status code 'Precondition Failed'"""
+
HTTP_413_REQUEST_ENTITY_TOO_LARGE: Literal[413] = 413
+"""HTTP status code 'Request Entity Too Large'"""
+
HTTP_414_REQUEST_URI_TOO_LONG: Literal[414] = 414
+"""HTTP status code 'Request URI Too Long'"""
+
HTTP_415_UNSUPPORTED_MEDIA_TYPE: Literal[415] = 415
+"""HTTP status code 'Unsupported Media Type'"""
+
HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE: Literal[416] = 416
+"""HTTP status code 'Requested Range Not Satisfiable'"""
+
HTTP_417_EXPECTATION_FAILED: Literal[417] = 417
+"""HTTP status code 'Expectation Failed'"""
+
HTTP_418_IM_A_TEAPOT: Literal[418] = 418
+"""HTTP status code 'I'm A Teapot'"""
+
HTTP_421_MISDIRECTED_REQUEST: Literal[421] = 421
+"""HTTP status code 'Misdirected Request'"""
+
HTTP_422_UNPROCESSABLE_ENTITY: Literal[422] = 422
+"""HTTP status code 'Unprocessable Entity'"""
+
HTTP_423_LOCKED: Literal[423] = 423
+"""HTTP status code 'Locked'"""
+
HTTP_424_FAILED_DEPENDENCY: Literal[424] = 424
+"""HTTP status code 'Failed Dependency'"""
+
HTTP_425_TOO_EARLY: Literal[425] = 425
+"""HTTP status code 'Too Early'"""
+
HTTP_426_UPGRADE_REQUIRED: Literal[426] = 426
+"""HTTP status code 'Upgrade Required'"""
+
HTTP_428_PRECONDITION_REQUIRED: Literal[428] = 428
+"""HTTP status code 'Precondition Required'"""
+
HTTP_429_TOO_MANY_REQUESTS: Literal[429] = 429
+"""HTTP status code 'Too Many Requests'"""
+
HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE: Literal[431] = 431
+"""HTTP status code 'Request Header Fields Too Large'"""
+
HTTP_451_UNAVAILABLE_FOR_LEGAL_REASONS: Literal[451] = 451
+"""HTTP status code 'Unavailable For Legal Reasons'"""
+
HTTP_500_INTERNAL_SERVER_ERROR: Literal[500] = 500
+"""HTTP status code 'Internal Server Error'"""
+
HTTP_501_NOT_IMPLEMENTED: Literal[501] = 501
+"""HTTP status code 'Not Implemented'"""
+
HTTP_502_BAD_GATEWAY: Literal[502] = 502
+"""HTTP status code 'Bad Gateway'"""
+
HTTP_503_SERVICE_UNAVAILABLE: Literal[503] = 503
+"""HTTP status code 'Service Unavailable'"""
+
HTTP_504_GATEWAY_TIMEOUT: Literal[504] = 504
+"""HTTP status code 'Gateway Timeout'"""
+
HTTP_505_HTTP_VERSION_NOT_SUPPORTED: Literal[505] = 505
+"""HTTP status code 'Http Version Not Supported'"""
+
HTTP_506_VARIANT_ALSO_NEGOTIATES: Literal[506] = 506
+"""HTTP status code 'Variant Also Negotiates'"""
+
HTTP_507_INSUFFICIENT_STORAGE: Literal[507] = 507
+"""HTTP status code 'Insufficient Storage'"""
+
HTTP_508_LOOP_DETECTED: Literal[508] = 508
+"""HTTP status code 'Loop Detected'"""
+
HTTP_510_NOT_EXTENDED: Literal[510] = 510
+"""HTTP status code 'Not Extended'"""
+
HTTP_511_NETWORK_AUTHENTICATION_REQUIRED: Literal[511] = 511
+"""HTTP status code 'Network Authentication Required'"""
+
# Websocket Codes
WS_1000_NORMAL_CLOSURE: Literal[1000] = 1000
+"""WebSocket status code 'Normal Closure'"""
+
WS_1001_GOING_AWAY: Literal[1001] = 1001
+"""WebSocket status code 'Going Away'"""
+
WS_1002_PROTOCOL_ERROR: Literal[1002] = 1002
+"""WebSocket status code 'Protocol Error'"""
+
WS_1003_UNSUPPORTED_DATA: Literal[1003] = 1003
+"""WebSocket status code 'Unsupported Data'"""
+
WS_1005_NO_STATUS_RECEIVED: Literal[1005] = 1005
+"""WebSocket status code 'No Status Received'"""
+
WS_1006_ABNORMAL_CLOSURE: Literal[1006] = 1006
+"""WebSocket status code 'Abnormal Closure'"""
+
WS_1007_INVALID_FRAME_PAYLOAD_DATA: Literal[1007] = 1007
+"""WebSocket status code 'Invalid Frame Payload Data'"""
+
WS_1008_POLICY_VIOLATION: Literal[1008] = 1008
+"""WebSocket status code 'Policy Violation'"""
+
WS_1009_MESSAGE_TOO_BIG: Literal[1009] = 1009
+"""WebSocket status code 'Message Too Big'"""
+
WS_1010_MANDATORY_EXT: Literal[1010] = 1010
+"""WebSocket status code 'Mandatory Ext.'"""
+
WS_1011_INTERNAL_ERROR: Literal[1011] = 1011
+"""WebSocket status code 'Internal Error'"""
+
WS_1012_SERVICE_RESTART: Literal[1012] = 1012
+"""WebSocket status code 'Service Restart'"""
+
WS_1013_TRY_AGAIN_LATER: Literal[1013] = 1013
+"""WebSocket status code 'Try Again Later'"""
+
WS_1014_BAD_GATEWAY: Literal[1014] = 1014
+"""WebSocket status code 'Bad Gateway'"""
+
WS_1015_TLS_HANDSHAKE: Literal[1015] = 1015
+"""WebSocket status code 'TLS Handshake'"""
__all__ = (
| Docs: WebSocketException code value guidelines unclear
### Summary
_https://github.com/starlite-api/starlite/blob/18337975649c160dce4671fdd72ce18dad0517b2/starlite/exceptions/websocket_exceptions.py#LL9-L27C25_
```py
class WebSocketException(StarLiteException):
"""Exception class for websocket related events."""
code: int
"""Exception code.
Should be a number in the 4000+ range.
"""
def __init__(self, *args: Any, detail: str, code: int = 4500) -> None:
"""Initialize ``WebSocketException``.
Args:
*args: Any exception args.
detail: Exception details.
code: Exception code. Should be a number in the >= 1000.
"""
super().__init__(*args, detail=detail)
self.code = code
```
The comment below `code: int` says `Should be a number in the 4000+ range.`
The doc-string for `__init__` says `code: Exception code. Should be a number in the >= 1000.`
I am not entirely sure which guideline is correct. Hopefully someone more familiar with websocket exception codes can provide some insight below.
| 2023-03-29T14:20:41 |
||
litestar-org/litestar | 1,407 | litestar-org__litestar-1407 | [
"4321",
"1234"
] | a075c4bf5d84a4557fbfa2c0071222d7d8a2eeb1 | diff --git a/starlite/plugins/sql_alchemy/plugin.py b/starlite/plugins/sql_alchemy/plugin.py
--- a/starlite/plugins/sql_alchemy/plugin.py
+++ b/starlite/plugins/sql_alchemy/plugin.py
@@ -426,7 +426,7 @@ def to_dict(self, model_instance: "DeclarativeMeta") -> Dict[str, Any]:
pydantic_model = self._model_namespace_map.get(model_class.__qualname__) or self.to_pydantic_model_class(
model_class=model_class
)
- return pydantic_model.from_orm(model_instance).dict() # type:ignore[pydantic-unexpected]
+ return pydantic_model.from_orm(model_instance).dict()
def from_dict(self, model_class: "Type[DeclarativeMeta]", **kwargs: Any) -> DeclarativeMeta:
"""Given a dictionary of kwargs, return an instance of the given model_class.
diff --git a/starlite/routes/http.py b/starlite/routes/http.py
--- a/starlite/routes/http.py
+++ b/starlite/routes/http.py
@@ -251,7 +251,9 @@ async def _set_cached_response(
await cache.set(
key=cache_key,
value=pickle.dumps(response, pickle.HIGHEST_PROTOCOL),
- expiration=route_handler.cache if isinstance(route_handler.cache, int) else None,
+ expiration=route_handler.cache
+ if isinstance(route_handler.cache, int) and route_handler.cache is not True
+ else None,
)
def create_options_handler(self, path: str) -> "HTTPRouteHandler":
| diff --git a/tests/caching/test_response_caching.py b/tests/caching/test_response_caching.py
--- a/tests/caching/test_response_caching.py
+++ b/tests/caching/test_response_caching.py
@@ -2,16 +2,68 @@
from time import sleep
import pytest
+from _pytest.fixtures import FixtureRequest
+from fakeredis.aioredis import FakeRedis
from freezegun import freeze_time
+from pytest_mock import MockerFixture
from starlite import CacheConfig, Request, get
+from starlite.cache.base import CacheBackendProtocol
+from starlite.cache.memcached_cache_backend import (
+ MemcachedCacheBackend,
+ MemcachedCacheBackendConfig,
+)
+from starlite.cache.redis_cache_backend import (
+ RedisCacheBackend,
+ RedisCacheBackendConfig,
+)
+from starlite.cache.simple_cache_backend import SimpleCacheBackend
from starlite.testing import create_test_client
+from ..mocks import FakeAsyncMemcached
from . import after_request_handler, slow_handler
[email protected]()
+def fake_redis(mocker: MockerFixture) -> FakeRedis:
+ redis = FakeRedis()
+ mocker.patch("starlite.cache.redis_cache_backend.Redis")
+ mocker.patch("starlite.cache.redis_cache_backend.RedisCacheBackend._redis", redis)
+
+ return redis
+
+
[email protected]
+def redis_backend(fake_redis: FakeRedis) -> RedisCacheBackend:
+ return RedisCacheBackend(config=RedisCacheBackendConfig(url="redis://something"))
+
+
[email protected]()
+def fake_memcached(mocker: MockerFixture) -> FakeAsyncMemcached:
+ memcached = FakeAsyncMemcached()
+ mocker.patch("starlite.cache.memcached_cache_backend.Client")
+ mocker.patch("starlite.cache.memcached_cache_backend.MemcachedCacheBackend._memcached_client", memcached)
+
+ return memcached
+
+
[email protected]()
+def memcached_backend(fake_memcached: FakeAsyncMemcached) -> MemcachedCacheBackend:
+ return MemcachedCacheBackend(config=MemcachedCacheBackendConfig(host="localhost"))
+
+
[email protected]()
+def simple_cache_backend() -> SimpleCacheBackend:
+ return SimpleCacheBackend()
+
+
[email protected](params=["redis_backend", "memcached_backend", "simple_cache_backend"])
+def cache_backend(request: FixtureRequest) -> CacheBackendProtocol:
+ return request.getfixturevalue(request.param) # type: ignore[no-any-return]
+
+
@pytest.mark.parametrize("sync_to_thread", (True, False))
-def test_default_cache_response(sync_to_thread: bool) -> None:
+def test_default_cache_response(sync_to_thread: bool, cache_backend: CacheBackendProtocol) -> None:
with create_test_client(
route_handlers=[
get(
@@ -24,6 +76,7 @@ def test_default_cache_response(sync_to_thread: bool) -> None:
)(slow_handler)
],
after_request=after_request_handler,
+ cache_config=CacheConfig(backend=cache_backend),
) as client:
first_response = client.get("/cached")
assert first_response.status_code == 200
@@ -38,10 +91,12 @@ def test_default_cache_response(sync_to_thread: bool) -> None:
assert first_response.json() == second_response.json()
-def test_handler_expiration() -> None:
+def test_handler_expiration(cache_backend: CacheBackendProtocol) -> None:
now = datetime.now()
with freeze_time(now) as frozen_datetime, create_test_client(
- route_handlers=[get("/cached-local", cache=10)(slow_handler)], after_request=after_request_handler
+ route_handlers=[get("/cached-local", cache=10)(slow_handler)],
+ after_request=after_request_handler,
+ cache_config=CacheConfig(backend=cache_backend),
) as client:
first_response = client.get("/cached-local")
frozen_datetime.tick(delta=timedelta(seconds=5))
@@ -52,7 +107,7 @@ def test_handler_expiration() -> None:
assert first_response.headers["unique-identifier"] != third_response.headers["unique-identifier"]
-def test_default_expiration() -> None:
+def test_default_expiration(cache_backend: CacheBackendProtocol) -> None:
with create_test_client(
route_handlers=[get("/cached-default", cache=True)(slow_handler)],
after_request=after_request_handler,
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-29T15:11:54 |
litestar-org/litestar | 1,410 | litestar-org__litestar-1410 | [
"1409",
"4321"
] | 0eddf7f8184abfb96354fb390aac295bd9ac50f6 | diff --git a/starlite/_openapi/responses.py b/starlite/_openapi/responses.py
--- a/starlite/_openapi/responses.py
+++ b/starlite/_openapi/responses.py
@@ -4,6 +4,7 @@
from copy import copy
from dataclasses import asdict
from http import HTTPStatus
+from inspect import Signature
from operator import attrgetter
from typing import TYPE_CHECKING, Any, Iterator
@@ -11,6 +12,7 @@
from starlite._openapi.schema_generation import create_schema
from starlite._signature.models import SignatureField
+from starlite._signature.utils import get_signature_model
from starlite.enums import MediaType
from starlite.exceptions import HTTPException, ValidationException
from starlite.openapi.spec import OpenAPIResponse
@@ -20,6 +22,7 @@
from starlite.openapi.spec.schema import Schema
from starlite.response import Response as StarliteResponse
from starlite.response_containers import File, Redirect, Stream, Template
+from starlite.types.builtin_types import NoneType
from starlite.utils import get_enum_string_value, get_name, is_class_and_subclass
if TYPE_CHECKING:
@@ -67,7 +70,7 @@ def create_success_response(
schemas: dict[str, "Schema"],
) -> OpenAPIResponse:
"""Create the schema for a success response."""
- signature = route_handler.signature
+ return_annotation = get_signature_model(route_handler).return_annotation
default_descriptions: dict[Any, str] = {
Stream: "Stream Response",
Redirect: "Redirect Response",
@@ -75,17 +78,16 @@ def create_success_response(
}
description = (
route_handler.response_description
- or default_descriptions.get(signature.return_annotation)
+ or default_descriptions.get(return_annotation)
or HTTPStatus(route_handler.status_code).description
)
- if signature.return_annotation not in {signature.empty, None, Redirect, File, Stream}:
- return_annotation = signature.return_annotation
- if signature.return_annotation is Template:
+ if return_annotation not in {Signature.empty, None, NoneType, Redirect, File, Stream}:
+ if return_annotation is Template:
return_annotation = str
route_handler.media_type = get_enum_string_value(MediaType.HTML)
- elif is_class_and_subclass(get_origin(signature.return_annotation), StarliteResponse):
- return_annotation = get_args(signature.return_annotation)[0] or Any
+ elif is_class_and_subclass(get_origin(return_annotation), StarliteResponse):
+ return_annotation = get_args(return_annotation)[0] or Any
result = create_schema(
field=SignatureField.create(field_type=return_annotation),
@@ -108,7 +110,7 @@ def create_success_response(
description=description,
)
- elif signature.return_annotation is Redirect:
+ elif return_annotation is Redirect:
response = OpenAPIResponse(
content=None,
description=description,
@@ -119,7 +121,7 @@ def create_success_response(
},
)
- elif signature.return_annotation in (File, Stream):
+ elif return_annotation in (File, Stream):
response = OpenAPIResponse(
content={
route_handler.media_type: OpenAPIMediaType(
diff --git a/starlite/_signature/parsing.py b/starlite/_signature/parsing.py
--- a/starlite/_signature/parsing.py
+++ b/starlite/_signature/parsing.py
@@ -183,7 +183,7 @@ def parse_fn_signature(
parsed_params.append(parameter)
- return parsed_params, signature.return_annotation, field_plugin_mappings, dependency_names
+ return parsed_params, fn_type_hints.get("return", Signature.empty), field_plugin_mappings, dependency_names
def create_signature_model(
| diff --git a/tests/openapi/test_responses.py b/tests/openapi/test_responses.py
--- a/tests/openapi/test_responses.py
+++ b/tests/openapi/test_responses.py
@@ -1,13 +1,14 @@
from dataclasses import dataclass
from http import HTTPStatus
from pathlib import Path
-from typing import Dict
+from types import ModuleType
+from typing import Callable, Dict
import pytest
from pydantic import BaseModel
from typing_extensions import TypedDict
-from starlite import MediaType, Response, Starlite, get
+from starlite import Controller, MediaType, Response, Starlite, get
from starlite._openapi.responses import (
create_additional_responses,
create_error_responses,
@@ -20,6 +21,7 @@
PermissionDeniedException,
ValidationException,
)
+from starlite.handlers import HTTPRouteHandler
from starlite.openapi.datastructures import ResponseSpec
from starlite.openapi.spec import OpenAPIHeader, OpenAPIMediaType, Reference, Schema
from starlite.openapi.spec.enums import OpenAPIType
@@ -36,6 +38,11 @@
from tests.openapi.utils import PersonController, PetController, PetException
+def get_registered_route_handler(handler: "HTTPRouteHandler | type[Controller]", name: str) -> HTTPRouteHandler:
+ app = Starlite(route_handlers=[handler])
+ return app.asgi_router.route_handler_index[name] # type: ignore[return-value]
+
+
def test_create_responses() -> None:
for route in Starlite(route_handlers=[PersonController]).routes:
assert isinstance(route, HTTPRoute)
@@ -52,8 +59,9 @@ def test_create_responses() -> None:
assert str(route_handler.status_code) in responses
assert str(HTTP_400_BAD_REQUEST) in responses
+ handler = get_registered_route_handler(PetController, "tests.openapi.utils.PetController.get_pets_or_owners")
responses = create_responses(
- route_handler=PetController.get_pets_or_owners,
+ route_handler=handler,
raises_validation_error=False,
generate_examples=True,
plugins=[],
@@ -137,10 +145,12 @@ def test_create_success_response_with_headers() -> None:
response_description="test",
content_encoding="base64",
content_media_type="image/png",
+ name="test",
)
def handler() -> list:
return []
+ handler = get_registered_route_handler(handler, "test")
response = create_success_response(handler, True, plugins=[], schemas={})
assert response.description == "test"
@@ -166,10 +176,12 @@ def test_create_success_response_with_cookies() -> None:
Cookie(key="first-cookie", httponly=True, samesite="strict", description="the first cookie", secure=True),
Cookie(key="second-cookie", max_age=500, description="the second cookie"),
],
+ name="test",
)
def handler() -> list:
return []
+ handler = get_registered_route_handler(handler, "test")
response = create_success_response(handler, True, plugins=[], schemas={})
assert isinstance(response.headers, dict)
@@ -191,10 +203,11 @@ def handler() -> list:
def test_create_success_response_with_response_class() -> None:
- @get(path="/test")
+ @get(path="/test", name="test")
def handler() -> Response[Person]:
return Response(content=PersonFactory.build())
+ handler = get_registered_route_handler(handler, "test")
schemas: Dict[str, Schema] = {}
response = create_success_response(handler, True, plugins=[], schemas=schemas)
@@ -208,20 +221,23 @@ def handler() -> Response[Person]:
def test_create_success_response_with_stream() -> None:
- @get(path="/test")
+ @get(path="/test", name="test")
def handler() -> Stream:
return Stream(iterator=iter([]))
+ handler = get_registered_route_handler(handler, "test")
response = create_success_response(handler, True, plugins=[], schemas={})
assert response.description == "Stream Response"
def test_create_success_response_redirect() -> None:
- @get(path="/test", status_code=HTTP_307_TEMPORARY_REDIRECT)
+ @get(path="/test", status_code=HTTP_307_TEMPORARY_REDIRECT, name="test")
def redirect_handler() -> Redirect:
return Redirect(path="/target")
- response = create_success_response(redirect_handler, True, plugins=[], schemas={})
+ handler = get_registered_route_handler(redirect_handler, "test")
+
+ response = create_success_response(handler, True, plugins=[], schemas={})
assert response.description == "Redirect Response"
assert response.headers
location = response.headers["location"]
@@ -232,11 +248,13 @@ def redirect_handler() -> Redirect:
def test_create_success_response_file_data() -> None:
- @get(path="/test")
+ @get(path="/test", name="test")
def file_handler() -> File:
return File(path=Path("test_responses.py"))
- response = create_success_response(file_handler, True, plugins=[], schemas={})
+ handler = get_registered_route_handler(file_handler, "test")
+
+ response = create_success_response(handler, True, plugins=[], schemas={})
assert response.description == "File Download"
assert response.headers
@@ -257,11 +275,13 @@ def file_handler() -> File:
def test_create_success_response_template() -> None:
- @get(path="/template")
+ @get(path="/template", name="test")
def template_handler() -> Template:
return Template(name="none")
- response = create_success_response(template_handler, True, plugins=[], schemas={})
+ handler = get_registered_route_handler(template_handler, "test")
+
+ response = create_success_response(handler, True, plugins=[], schemas={})
assert response.description == "Request fulfilled, document follows"
assert response.content
assert response.content[MediaType.HTML.value]
@@ -329,10 +349,11 @@ def test_additional_responses_overlap_with_other_responses() -> None:
class OkResponse(BaseModel):
message: str
- @get(responses={200: ResponseSpec(data_container=OkResponse, description="Overwritten response")})
+ @get(responses={200: ResponseSpec(data_container=OkResponse, description="Overwritten response")}, name="test")
def handler() -> Person:
return PersonFactory.build()
+ handler = get_registered_route_handler(handler, "test")
responses = create_responses(handler, raises_validation_error=True, generate_examples=False, plugins=[], schemas={})
assert responses is not None
@@ -347,10 +368,13 @@ class ErrorResponse(BaseModel):
@get(
raises=[ValidationException],
responses={400: ResponseSpec(data_container=ErrorResponse, description="Overwritten response")},
+ name="test",
)
def handler() -> Person:
raise ValidationException()
+ handler = get_registered_route_handler(handler, "test")
+
responses = create_responses(handler, raises_validation_error=True, generate_examples=False, plugins=[], schemas={})
assert responses is not None
@@ -362,10 +386,12 @@ def test_create_response_for_response_subclass() -> None:
class CustomResponse(Response[T]):
pass
- @get(path="/test")
+ @get(path="/test", name="test")
def handler() -> CustomResponse[Person]:
return CustomResponse(content=PersonFactory.build())
+ handler = get_registered_route_handler(handler, "test")
+
schemas: Dict[str, Schema] = {}
response = create_success_response(handler, True, plugins=[], schemas=schemas)
assert response.content
@@ -374,3 +400,19 @@ def handler() -> CustomResponse[Person]:
assert isinstance(reference, Reference)
schema = schemas[reference.value]
assert schema.title == "Person"
+
+
+def test_success_response_with_future_annotations(create_module: Callable[[str], ModuleType]) -> None:
+ module = create_module(
+ """
+from __future__ import annotations
+from starlite import get
+
+@get(path="/test", name="test")
+def handler() -> int:
+ ...
+"""
+ )
+ handler = get_registered_route_handler(module.handler, "test")
+ response = create_success_response(handler, True, plugins=[], schemas={})
+ assert next(iter(response.content.values())).schema.type == OpenAPIType.INTEGER # type: ignore[union-attr]
| Bug: Openapi schema generation using stringized return annotation
### Description
If a handler has a string return annotation, either through ` -> "Model":` or `from __future__ import annotations` the responses generated for openapi do not make an attempt to de-stringize them leading to a no-content response being generated.

### URL to code causing the issue
_No response_
### MCVE
```python
"""Minimal Starlite application."""
from __future__ import annotations
from starlite import Starlite, get
@get("/")
def hello_world() -> dict[str, str]:
"""Route Handler that outputs hello world."""
return {"hello": "world"}
app = Starlite(route_handlers=[hello_world], debug=True)
```
### Steps to reproduce
```bash
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
```
### Screenshots
```bash
""
```
### Logs
_No response_
### Starlite Version
main
### Platform
- [ ] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| 2023-03-30T03:34:17 |
|
litestar-org/litestar | 1,412 | litestar-org__litestar-1412 | [
"1371"
] | 2bbdfcc1c9625aa5411d30c5bf5c34980383be84 | diff --git a/starlite/handlers/http_handlers/base.py b/starlite/handlers/http_handlers/base.py
--- a/starlite/handlers/http_handlers/base.py
+++ b/starlite/handlers/http_handlers/base.py
@@ -8,14 +8,12 @@
from starlite._layers.utils import narrow_response_cookies, narrow_response_headers
from starlite._signature.utils import get_signature_model
-from starlite.constants import REDIRECT_STATUS_CODES
from starlite.datastructures.cookie import Cookie
from starlite.datastructures.response_header import ResponseHeader
from starlite.enums import HttpMethod, MediaType
from starlite.exceptions import (
HTTPException,
ImproperlyConfiguredException,
- ValidationException,
)
from starlite.handlers.base import BaseRouteHandler
from starlite.handlers.http_handlers._utils import (
@@ -500,15 +498,6 @@ def _validate_handler_function(self) -> None:
"If the function should return a value, change the route handler status code to an appropriate value.",
)
- if (
- is_class_and_subclass(self.signature.return_annotation, Redirect)
- and self.status_code not in REDIRECT_STATUS_CODES
- ):
- raise ValidationException(
- f"Redirect responses should have one of "
- f"the following status codes: {', '.join([str(s) for s in REDIRECT_STATUS_CODES])}"
- )
-
if (
is_class_and_subclass(self.signature.return_annotation, File)
or is_class_and_subclass(self.signature.return_annotation, FileResponse)
diff --git a/starlite/response_containers.py b/starlite/response_containers.py
--- a/starlite/response_containers.py
+++ b/starlite/response_containers.py
@@ -215,6 +215,8 @@ class Redirect(ResponseContainer[RedirectResponse]):
"""If defined, overrides the media type configured in the route decorator."""
encoding: str = field(default="utf-8")
"""The encoding to be used for the response headers."""
+ status_code: Literal[301, 302, 303, 307, 308] | None = None
+ """Redirect status code"""
def to_response( # type: ignore[override]
self,
@@ -242,7 +244,7 @@ def to_response( # type: ignore[override]
background=self.background,
encoding=self.encoding,
headers=headers,
- status_code=status_code,
+ status_code=self.status_code or status_code,
url=self.path,
)
| diff --git a/tests/handlers/http/test_validations.py b/tests/handlers/http/test_validations.py
--- a/tests/handlers/http/test_validations.py
+++ b/tests/handlers/http/test_validations.py
@@ -34,19 +34,13 @@ def test_route_handler_validation_http_method() -> None:
route(http_method=[HttpMethod.GET, "poft"], status_code=HTTP_200_OK) # type: ignore
-async def test_function_validation(anyio_backend: str) -> None:
+async def test_function_validation() -> None:
with pytest.raises(ImproperlyConfiguredException):
@get(path="/")
def method_with_no_annotation(): # type: ignore
pass
- with pytest.raises(ValidationException):
-
- @get(path="/", status_code=HTTP_200_OK)
- def redirect_method_without_proper_status() -> Redirect:
- return Redirect(path="/redirected")
-
with pytest.raises(ImproperlyConfiguredException):
@delete(path="/")
diff --git a/tests/test_response_containers.py b/tests/test_response_containers.py
--- a/tests/test_response_containers.py
+++ b/tests/test_response_containers.py
@@ -2,7 +2,7 @@
from inspect import iscoroutine
from os import stat
from pathlib import Path
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional
import pytest
from fsspec.implementations.local import LocalFileSystem
@@ -11,7 +11,7 @@
from starlite.datastructures import ETag
from starlite.exceptions import ImproperlyConfiguredException
from starlite.file_system import BaseLocalFileSystem
-from starlite.response_containers import File
+from starlite.response_containers import File, Redirect
from starlite.status_codes import HTTP_200_OK
from starlite.testing import RequestFactory, create_test_client
@@ -155,3 +155,34 @@ def open(self) -> None:
path=path,
file_system=ImplementedFS(),
)
+
+
[email protected](
+ "status_code,expected_status_code",
+ [
+ (301, 301),
+ (302, 302),
+ (303, 303),
+ (307, 307),
+ (308, 308),
+ ],
+)
+def test_redirect_dynamic_status_code(status_code: Optional[int], expected_status_code: int) -> None:
+ @get("/")
+ def handler() -> Redirect:
+ return Redirect(path="/something-else", status_code=status_code) # type: ignore[arg-type]
+
+ with create_test_client([handler]) as client:
+ res = client.get("/", follow_redirects=False)
+ assert res.status_code == expected_status_code
+
+
[email protected]("handler_status_code", [301, 307, None])
+def test_redirect(handler_status_code: Optional[int]) -> None:
+ @get("/", status_code=handler_status_code)
+ def handler() -> Redirect:
+ return Redirect(path="/something-else", status_code=301)
+
+ with create_test_client([handler]) as client:
+ res = client.get("/", follow_redirects=False)
+ assert res.status_code == 301
| Combining redirects and templated responses
## Context
I'm working on a project with non-API/non-RESTful routes where, depending on the result of backend processing, either one of these two types of responses should be returned:
1. A templated HTML response ([`Template`]); or
2. A redirect to another (external) URL ([`Redirect`])
I'm struggling on how to implement this and not sure whether I'm running into a documentation issue where it's just not clear, or an actual API design limitation of Starlite's response handling in general.
## Environment info
- `Linux l-057 6.1.18-200.fc37.x86_64 #1 SMP PREEMPT_DYNAMIC Sat Mar 11 16:09:14 UTC 2023 x86_64 x86_64 x86_64 GNU/Linux`
- `Python 3.11.2`
- `starlite 1.51.7` ([full requirements.txt](https://gist.github.com/zoni/c3b0c2afc8f40c801f18ecc5b9f00d7b/8b1ec72b3895dddb0cd795c025ae90ac0e73cbbd#file-requirements-txt))
## Partial solutions
I've gotten quite close to a working solution that meets both goals, but it doesn't _quite_ get me where I want.
For clarity, I'll iterate over all possible approaches I've attempted so far.
### Template response
Point (1) can be achieved with simple [`Template`] response, which looks as follows:
https://gist.github.com/zoni/c3b0c2afc8f40c801f18ecc5b9f00d7b/000c36e412b093fc8ced1ab4e98d57d9f2aea119
### Redirect response
Point (2) can be achieved with simple [`Redirect`] response, which looks as follows:
https://gist.github.com/zoni/c3b0c2afc8f40c801f18ecc5b9f00d7b/c16f9c20f173eceea2be13181ff8fbb9b7649fc0
### Combining Template + Redirect
A naive attempt was to return a union of `Template` and `Redirect`:
https://gist.github.com/zoni/c3b0c2afc8f40c801f18ecc5b9f00d7b/44e0e0e6e8ffe474b8ff75c1610720800fe5537d
This however fails with a `ValueError` on OpenAPI schema generation:
<details>
<summary>Traceback</summary>
<pre>
Traceback (most recent call last):
File "/usr/lib64/python3.11/multiprocessing/process.py", line 314, in _bootstrap
self.run()
File "/usr/lib64/python3.11/multiprocessing/process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "/home/work/.cache/pypoetry/virtualenvs/starlite-issue-5tyW83TP-py3.11/lib/python3.11/site-packages/uvicorn/_subprocess.py", line 76, in subprocess_started
target(sockets=sockets)
File "/home/work/.cache/pypoetry/virtualenvs/starlite-issue-5tyW83TP-py3.11/lib/python3.11/site-packages/uvicorn/server.py", line 59, in run
return asyncio.run(self.serve(sockets=sockets))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/lib64/python3.11/asyncio/runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/usr/lib64/python3.11/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/lib64/python3.11/asyncio/base_events.py", line 653, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "/home/work/.cache/pypoetry/virtualenvs/starlite-issue-5tyW83TP-py3.11/lib/python3.11/site-packages/uvicorn/server.py", line 66, in serve
config.load()
File "/home/work/.cache/pypoetry/virtualenvs/starlite-issue-5tyW83TP-py3.11/lib/python3.11/site-packages/uvicorn/config.py", line 471, in load
self.loaded_app = import_from_string(self.app)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/work/.cache/pypoetry/virtualenvs/starlite-issue-5tyW83TP-py3.11/lib/python3.11/site-packages/uvicorn/importer.py", line 21, in import_from_string
module = importlib.import_module(module_str)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/lib64/python3.11/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<frozen importlib._bootstrap>", line 1206, in _gcd_import
File "<frozen importlib._bootstrap>", line 1178, in _find_and_load
File "<frozen importlib._bootstrap>", line 1149, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 690, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 940, in exec_module
File "<frozen importlib._bootstrap>", line 241, in _call_with_frames_removed
File "/home/work/starlite_issue/app.py", line 16, in <module>
app = Starlite(
^^^^^^^^^
File "/home/work/.cache/pypoetry/virtualenvs/starlite-issue-5tyW83TP-py3.11/lib/python3.11/site-packages/starlite/app.py", line 390, in __init__
self.update_openapi_schema()
File "/home/work/.cache/pypoetry/virtualenvs/starlite-issue-5tyW83TP-py3.11/lib/python3.11/site-packages/starlite/app.py", line 732, in update_openapi_schema
self.openapi_schema = construct_open_api_with_schema_class(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/work/.cache/pypoetry/virtualenvs/starlite-issue-5tyW83TP-py3.11/lib/python3.11/site-packages/pydantic_openapi_schema/utils/utils.py", line 53, in construct_open_api_with_schema_class
schema_definitions = schema(schema_classes, ref_prefix=REF_PREFIX, by_alias=by_alias)["definitions"]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "pydantic/schema.py", line 146, in pydantic.schema.schema
File "pydantic/schema.py", line 581, in pydantic.schema.model_process_schema
File "pydantic/schema.py", line 622, in pydantic.schema.model_type_schema
File "pydantic/schema.py", line 255, in pydantic.schema.field_schema
File "pydantic/schema.py", line 527, in pydantic.schema.field_type_schema
File "pydantic/schema.py", line 850, in pydantic.schema.field_singleton_schema
File "pydantic/schema.py", line 745, in pydantic.schema.field_singleton_sub_fields_schema
File "pydantic/schema.py", line 527, in pydantic.schema.field_type_schema
File "pydantic/schema.py", line 949, in pydantic.schema.field_singleton_schema
ValueError: Value not declarable with JSON Schema, field: name='background_BackgroundTask' type=BackgroundTask required=True
</pre>
</details>
I don't actually care about OpenAPI for these endpoints, so setting `include_in_schema=False` would be fine for me as well:
https://gist.github.com/zoni/c3b0c2afc8f40c801f18ecc5b9f00d7b/5710d90b2a6de48e3b251e8820f9d0d0abbc8552
This removes the exception, but doesn't get us the intended behavior however. Instead, it results in a JSON rendering of the [`Template`] and [`Redirect`] objects:
<details>
<summary>Result of the template response</summary>
<pre>
[17:00:13] work in ~
➜ http localhost:5000/world
HTTP/1.1 307 Temporary Redirect
content-length: 132
content-type: application/json
date: Wed, 22 Mar 2023 16:00:16 GMT
server: uvicorn
{
"background": null,
"context": {
"name": "world"
},
"cookies": [],
"encoding": "utf-8",
"headers": {},
"media_type": null,
"name": "hello.html.j2"
}
</pre>
</details>
<details>
<summary>Result of the Redirect response</summary>
<pre>
[17:00:16] work in ~
➜ http localhost:5000/foo
HTTP/1.1 307 Temporary Redirect
content-length: 114
content-type: application/json
date: Wed, 22 Mar 2023 16:00:19 GMT
server: uvicorn
{
"background": null,
"cookies": [],
"encoding": "utf-8",
"headers": {},
"media_type": null,
"path": "http://example.com/foo"
}
</pre>
</details>
### Using only `Response`
The closest to achieving this goal that I've come up with is to use a `Response`, but then I don't have access to a Template engine to return a ***templated*** response anymore:
https://gist.github.com/zoni/c3b0c2afc8f40c801f18ecc5b9f00d7b/48d7b837f33a4b522657027d78bd006399b4f04e
Lack of templating aside, we can see this lets us mix a redirect with an arbitrary content response:
<details>
<summary>text/html content response</summary>
<pre>
[17:09:26] work in ~
➜ http localhost:5000/world
HTTP/1.1 200 OK
content-length: 11
content-type: text/html; charset=utf-8
date: Wed, 22 Mar 2023 16:09:30 GMT
server: uvicorn
Hello world
</pre>
</details>
<details>
<summary>redirect response (no content body)</summary>
<pre>
[17:09:30] work in ~
➜ http localhost:5000/foo
HTTP/1.1 307 Temporary Redirect
Transfer-Encoding: chunked
content-type: text/plain; charset=utf-8
date: Wed, 22 Mar 2023 16:09:38 GMT
location: http://example.com/foo
server: uvicorn
</pre>
</details>
(Also worth noting this requires setting `content="", media_type="text/plain"` on `Response()` otherwise it defaults to JSON with a `""` body as well, so it's a bit less ergonomic)
## Desired solution
Ideally, I'd like to be able to combine [`Template`] + [`Redirect`]. That is, a function signature like `def hello(name: str = "world") -> Template | Redirect:`
(Or for older Python versions, `def hello(name: str = "world") -> typing.Union[Template, Redirect]:`)
I'm also quite happy just returning `Response`, but then I would like a way to get access to the templating engine myself, so that it's possible to do something along the lines of (pseudo-code):
```python
@get("/{name:str}", include_in_schema=False)
def hello(name: str = "world", templater: TemplateEngine) -> Response:
response_content = templater.get_template("hello.html.j2").render(context={"name": name})
return Response(
content=response_content,
media_type="text/html",
)
```
This _might_ already be possible and I just don't know how to get the TemplateEngine injected. If so, I'd love to be pointed at how this could be achieved, and I'll happily make a PR to at least document this better.
## Miscellaneous notes
While I'm talking only about Template and Redirect here as that is the use-case I'm trying to solve, I imagine the same issues likely exist around mixing in [`StreamingResponse`] and [`FileResponse`].
[`Template`]: https://docs.starliteproject.dev/latest/reference/datastructures/response_containers.html#starlite.datastructures.response_containers.Template
[`Redirect`]: https://docs.starliteproject.dev/latest/reference/datastructures/response_containers.html#starlite.datastructures.response_containers.Redirect
[`StreamingResponse`]: https://docs.starliteproject.dev/latest/reference/response.html#starlite.response.StreamingResponse
[`FileResponse`]: https://docs.starliteproject.dev/latest/reference/response.html#starlite.response.FileResponse
| Thanks a lot for the very detailed write up!
So, this is actually possible, but not very ergonomic.
Truth be told, templates haven't received a lot of attention but could certainly use some. Unfortunately I can't make any promises as to when that might happen.
Anyway, here's (one) possible way of doing this:
```python
from pathlib import Path
from starlite import (
Redirect,
Starlite,
Template,
TemplateConfig,
Request,
get,
Response,
ResponseContainer,
)
from starlite.contrib.jinja import JinjaTemplateEngine
from starlite.status_codes import HTTP_307_TEMPORARY_REDIRECT
@get("/{name:str}", include_in_schema=False)
def hello(name: str, request: Request) -> ResponseContainer | Response:
if name == "foo":
return Redirect(path=f"http://example.com/{name}").to_response(
headers={},
status_code=HTTP_307_TEMPORARY_REDIRECT,
media_type="text/plain",
request=request,
app=request.app,
)
return Template(name="hello.html.j2", context={"name": name})
app = Starlite(
route_handlers=[hello],
template_config=TemplateConfig(
directory=Path("templates"),
engine=JinjaTemplateEngine,
),
)
```
or alternatively:
```python
@get("/{name:str}", include_in_schema=False)
def hello(name: str) -> ResponseContainer | Response:
if name == "foo":
return Response(
content=b"",
headers={"location": f"http://example.com/{name}"},
status_code=HTTP_307_TEMPORARY_REDIRECT,
)
return Template(name="hello.html.j2", context={"name": name})
```
Both aren't that ergonomic, and there should definitely be a better way.
The actual issue though lies within the `Redirect` response container. Because it does not accept a `status_code`, you cannot simply return it. If this were the case, you could do:
```python
@get("/{name:str}", include_in_schema=False)
def hello(name: str) -> ResponseContainer:
if name == "foo":
return Redirect(path=f"http://example.com/{name}", status_code=HTTP_307_TEMPORARY_REDIRECT)
return Template(name="hello.html.j2", context={"name": name})
```
which is quite a lot nicer.
I'll open an issue to support this, although it will most likely end up in 2.0.
<hr>
From a documentation perspective, we should definitely have a section about returning optional redirect like that. | 2023-03-30T11:00:51 |
litestar-org/litestar | 1,414 | litestar-org__litestar-1414 | [
"1365"
] | a5e7a8c1a08a94ba6950b12305be0be4409c07d2 | diff --git a/starlite/config/response_cache.py b/starlite/config/response_cache.py
--- a/starlite/config/response_cache.py
+++ b/starlite/config/response_cache.py
@@ -4,7 +4,7 @@
from typing import TYPE_CHECKING, Any
from urllib.parse import urlencode
-__all__ = ("ResponseCacheConfig", "default_cache_key_builder")
+__all__ = ("ResponseCacheConfig", "default_cache_key_builder", "CACHE_FOREVER")
if TYPE_CHECKING:
@@ -14,6 +14,12 @@
from starlite.types import CacheKeyBuilder
+class CACHE_FOREVER: # noqa: N801
+ """Sentinel value indicating that a cached response should be stored without an expiration, explicitly skipping the
+ default expiration
+ """
+
+
def default_cache_key_builder(request: Request[Any, Any, Any]) -> str:
"""Given a request object, returns a cache key by combining the path with the sorted query params.
@@ -36,8 +42,8 @@ class ResponseCacheConfig:
``response_cache_config`` key.
"""
- default_expiration: int = field(default=60)
- """Default cache expiration in seconds."""
+ default_expiration: int | None = 60
+ """Default cache expiration in seconds used when a route handler is configured with ``cache=True``."""
key_builder: CacheKeyBuilder = field(default=default_cache_key_builder)
""":class:`CacheKeyBuilder <.types.CacheKeyBuilder>`. Defaults to :func:`default_cache_key_builder`."""
store: str = "response_cache"
diff --git a/starlite/handlers/http_handlers/base.py b/starlite/handlers/http_handlers/base.py
--- a/starlite/handlers/http_handlers/base.py
+++ b/starlite/handlers/http_handlers/base.py
@@ -52,6 +52,7 @@
from starlite.app import Starlite
from starlite.background_tasks import BackgroundTask, BackgroundTasks
+ from starlite.config.response_cache import CACHE_FOREVER
from starlite.connection import Request
from starlite.datastructures import CacheControlHeader, ETag
from starlite.datastructures.headers import Header
@@ -61,6 +62,7 @@
from starlite.plugins import SerializationPluginProtocol
from starlite.types import MaybePartial # noqa: F401
+
__all__ = ("HTTPRouteHandler", "route")
@@ -120,7 +122,7 @@ def __init__(
after_response: AfterResponseHookHandler | None = None,
background: BackgroundTask | BackgroundTasks | None = None,
before_request: BeforeRequestHookHandler | None = None,
- cache: bool | int = False,
+ cache: bool | int | type[CACHE_FOREVER] = False,
cache_control: CacheControlHeader | None = None,
cache_key_builder: CacheKeyBuilder | None = None,
dependencies: Mapping[str, Provide] | None = None,
diff --git a/starlite/handlers/http_handlers/decorators.py b/starlite/handlers/http_handlers/decorators.py
--- a/starlite/handlers/http_handlers/decorators.py
+++ b/starlite/handlers/http_handlers/decorators.py
@@ -14,6 +14,7 @@
from typing import Any, Mapping
from starlite.background_tasks import BackgroundTask, BackgroundTasks
+ from starlite.config.response_cache import CACHE_FOREVER
from starlite.datastructures import CacheControlHeader, ETag
from starlite.di import Provide
from starlite.openapi.datastructures import ResponseSpec
@@ -32,6 +33,7 @@
TypeEncodersMap,
)
+
__all__ = ("get", "head", "post", "put", "patch", "delete")
MSG_SEMANTIC_ROUTE_HANDLER_WITH_HTTP = "semantic route handlers cannot define http_method"
@@ -51,7 +53,7 @@ def __init__(
after_response: AfterResponseHookHandler | None = None,
background: BackgroundTask | BackgroundTasks | None = None,
before_request: BeforeRequestHookHandler | None = None,
- cache: bool | int = False,
+ cache: bool | int | type[CACHE_FOREVER] = False,
cache_control: CacheControlHeader | None = None,
cache_key_builder: CacheKeyBuilder | None = None,
dependencies: dict[str, Provide] | None = None,
@@ -204,7 +206,7 @@ def __init__(
after_response: AfterResponseHookHandler | None = None,
background: BackgroundTask | BackgroundTasks | None = None,
before_request: BeforeRequestHookHandler | None = None,
- cache: bool | int = False,
+ cache: bool | int | type[CACHE_FOREVER] = False,
cache_control: CacheControlHeader | None = None,
cache_key_builder: CacheKeyBuilder | None = None,
dependencies: dict[str, Provide] | None = None,
@@ -358,7 +360,7 @@ def __init__(
after_response: AfterResponseHookHandler | None = None,
background: BackgroundTask | BackgroundTasks | None = None,
before_request: BeforeRequestHookHandler | None = None,
- cache: bool | int = False,
+ cache: bool | int | type[CACHE_FOREVER] = False,
cache_control: CacheControlHeader | None = None,
cache_key_builder: CacheKeyBuilder | None = None,
dependencies: dict[str, Provide] | None = None,
@@ -530,7 +532,7 @@ def __init__(
after_response: AfterResponseHookHandler | None = None,
background: BackgroundTask | BackgroundTasks | None = None,
before_request: BeforeRequestHookHandler | None = None,
- cache: bool | int = False,
+ cache: bool | int | type[CACHE_FOREVER] = False,
cache_control: CacheControlHeader | None = None,
cache_key_builder: CacheKeyBuilder | None = None,
dependencies: dict[str, Provide] | None = None,
@@ -683,7 +685,7 @@ def __init__(
after_response: AfterResponseHookHandler | None = None,
background: BackgroundTask | BackgroundTasks | None = None,
before_request: BeforeRequestHookHandler | None = None,
- cache: bool | int = False,
+ cache: bool | int | type[CACHE_FOREVER] = False,
cache_control: CacheControlHeader | None = None,
cache_key_builder: CacheKeyBuilder | None = None,
dependencies: dict[str, Provide] | None = None,
@@ -836,7 +838,7 @@ def __init__(
after_response: AfterResponseHookHandler | None = None,
background: BackgroundTask | BackgroundTasks | None = None,
before_request: BeforeRequestHookHandler | None = None,
- cache: bool | int = False,
+ cache: bool | int | type[CACHE_FOREVER] = False,
cache_control: CacheControlHeader | None = None,
cache_key_builder: CacheKeyBuilder | None = None,
dependencies: dict[str, Provide] | None = None,
| diff --git a/tests/test_response_caching.py b/tests/test_response_caching.py
--- a/tests/test_response_caching.py
+++ b/tests/test_response_caching.py
@@ -1,14 +1,15 @@
import random
from datetime import timedelta
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Optional
from unittest.mock import MagicMock
from uuid import uuid4
import pytest
from starlite import Request, Starlite, get
-from starlite.config.response_cache import ResponseCacheConfig
+from starlite.config.response_cache import CACHE_FOREVER, ResponseCacheConfig
from starlite.stores.base import Store
+from starlite.stores.memory import MemoryStore
from starlite.testing import TestClient, create_test_client
if TYPE_CHECKING:
@@ -88,6 +89,42 @@ async def handler() -> str:
assert mock.call_count == 2
[email protected]("expiration,expected_expiration", [(True, None), (10, 10)])
+def test_default_expiration_none(
+ memory_store: MemoryStore, expiration: int, expected_expiration: Optional[int]
+) -> None:
+ @get("/cached", cache=expiration)
+ def handler() -> None:
+ return None
+
+ app = Starlite(
+ [handler],
+ stores={"response_cache": memory_store},
+ response_cache_config=ResponseCacheConfig(default_expiration=None),
+ )
+
+ with TestClient(app) as client:
+ client.get("/cached")
+
+ if expected_expiration is None:
+ assert memory_store._store["/cached"].expires_at is None
+ else:
+ assert memory_store._store["/cached"].expires_at
+
+
+def test_cache_forever(memory_store: MemoryStore) -> None:
+ @get("/cached", cache=CACHE_FOREVER)
+ async def handler() -> None:
+ return None
+
+ app = Starlite([handler], stores={"response_cache": memory_store})
+
+ with TestClient(app) as client:
+ client.get("/cached")
+
+ assert memory_store._store["/cached"].expires_at is None
+
+
@pytest.mark.parametrize("sync_to_thread", (True, False))
async def test_custom_cache_key(sync_to_thread: bool, anyio_backend: str, mock: MagicMock) -> None:
def custom_cache_key_builder(request: Request) -> str:
| Enhancement: Add (or document) method for caching a route indefinitely
### Summary
Currently, the low-level cache access permits setting a value which will be cached indefinitely, by passing `expiration=None` to the cache's `set()` method. However, the `cache` argument to route decorators does not accept `None` (and raises an exception if `cache=None` is passed), and I can find no other mechanism for specifying that the response from a route should be cached indefinitely.
### Basic Example
It would be possible to write:
```python
from starlite import get
@get("/", cache=<sentinel value indicating no expiration>)
async def my_indefinitely_cached_handler() -> str:
...
```
and have the response be cached with no explicit expiration.
### Drawbacks and Impact
I'm not sure that there would be any negative impact beyond enabling a use case like this; no existing cache functionality would be removed or changed in a backwards incompatible way, as this is purely adding to the cache functionality.
For positive impact, it would be possible to have endpoints that effectively serve out of cache *forever*.
For a concrete example, I currently have an application which wants to do this due to having a data set that changes rarely and is expensive to query on the fly; I've instrumented its data-update logic to both insert the new data set into the database and also calculate and clear the affected set of cache key(s), after which I'd like the endpoints which serve the data to just query the DB *once* and then cache "forever" (or until the next time a data update occurs and clears some keys out of the cache).
At the moment I'm simulating this as best I can by doing the caching in my DB query layer. If I could reliably work out how generate the correct Starlite `Response` objects I could just pickle them and do a low-level cache `set()` with the appropriate key and with `expiration=None`, but ideally I would just let Starlite itself generate the responses and cache them with indefinite expiration.
### Unresolved questions
The main question I can see is what the correct value for the `cache` argument would be. With the low-level cache `set()` method (at least on Redis, which is the cache backend I'm using), a key can be set not to expire by passing `expiration=None`, but passing `cache=None` in a route decorator feels like it should have the semantics of not caching at all, rather than of caching indefinitely. So probably some kind of special sentinel value would be needed which could be passed in -- perhaps something like:
```python
from starlite import get
from starlite.cache import NO_EXPIRATION
@get("/", cache=NO_EXPIRATION)
async def my_indefinitely_cached_handler() -> str:
...
```
I believe internally Redis implements a non-expiring key as having an expiration "timestamp" of `-1`, so perhaps either that value, or a named constant holding that value, would work.
| So, we actually discussed this previously, but to be honest, I can't recall what the verdict was. @Goldziher?
In any case, there should be a way. I remember raising the point that, from a semantic perspective, `cache=True` should cache indefinitely, but that would then make the default expiration require some sort of sentinel value.
Personally I don't particularly care for `-1`, as it's not very explicit. We could however add a sentinel value like `NO_EXPIRATION` which would achieve the same effect.
Alternatively, and as a workaround, you could just pass something like `timedelta(days=365).total_seconds()` to cache for a year (multiply as your heart desires).
Sentinel sounds good
Related issue: #1301 | 2023-03-30T12:00:08 |
litestar-org/litestar | 1,418 | litestar-org__litestar-1418 | [
"1234",
"4321"
] | 5b4ba567251c30932de726d00f96e5cf546498a9 | diff --git a/starlite/contrib/sqlalchemy_1/plugin.py b/starlite/contrib/sqlalchemy_1/plugin.py
--- a/starlite/contrib/sqlalchemy_1/plugin.py
+++ b/starlite/contrib/sqlalchemy_1/plugin.py
@@ -223,8 +223,6 @@ def providers_map(self) -> Dict[Type[TypeEngine], Callable[[Union[TypeEngine, Ty
sqlalchemy_type.TupleType: self.handle_tuple_type, # pyright: ignore
sqlalchemy_type.Unicode: self.handle_string_type,
sqlalchemy_type.UnicodeText: self.handle_string_type,
- sqlalchemy_type.Uuid: lambda x: UUID,
- sqlalchemy_type.UUID: lambda x: UUID,
sqlalchemy_type.VARBINARY: self.handle_string_type,
sqlalchemy_type.VARCHAR: self.handle_string_type,
# mssql
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-03-30T22:01:25 |
|
litestar-org/litestar | 1,425 | litestar-org__litestar-1425 | [
"4321",
"1234"
] | 93396ca0d475f7a8b46aaa74606aa3d10c8269fc | diff --git a/starlite/types/__init__.py b/starlite/types/__init__.py
--- a/starlite/types/__init__.py
+++ b/starlite/types/__init__.py
@@ -83,6 +83,7 @@
RouteHandlerType,
)
from .protocols import DataclassProtocol, Logger
+from .serialization import StarliteEncodableType
__all__ = (
"ASGIApp",
@@ -153,6 +154,7 @@
"Scopes",
"Send",
"Serializer",
+ "StarliteEncodableType",
"SyncOrAsyncUnion",
"TypeEncodersMap",
"TypedDictClass",
diff --git a/starlite/types/serialization.py b/starlite/types/serialization.py
new file mode 100644
--- /dev/null
+++ b/starlite/types/serialization.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from collections import deque
+ from collections.abc import Collection
+ from datetime import date, datetime, time
+ from decimal import Decimal
+ from enum import Enum, IntEnum
+ from ipaddress import (
+ IPv4Address,
+ IPv4Interface,
+ IPv4Network,
+ IPv6Address,
+ IPv6Interface,
+ IPv6Network,
+ )
+ from pathlib import Path, PurePath
+ from re import Pattern
+ from uuid import UUID
+
+ from msgspec import Raw, Struct
+ from msgspec.msgpack import Ext
+ from pydantic import (
+ BaseModel,
+ ByteSize,
+ ConstrainedBytes,
+ ConstrainedDate,
+ NameEmail,
+ SecretField,
+ StrictBool,
+ )
+ from pydantic.color import Color
+ from typing_extensions import TypeAlias
+
+ from starlite.types import DataclassProtocol
+
+__all__ = ("StarliteEncodableType",)
+
+EncodableBuiltinType: TypeAlias = "None | bool | int | float | str | bytes | bytearray"
+EncodableBuiltinCollectionType: TypeAlias = "list | tuple | set | frozenset | dict | Collection"
+EncodableStdLibType: TypeAlias = (
+ "date | datetime | deque | time | UUID | Decimal | Enum | IntEnum | DataclassProtocol | Path | PurePath | Pattern"
+)
+EncodableStdLibIPType: TypeAlias = (
+ "IPv4Address | IPv4Interface | IPv4Network | IPv6Address | IPv6Interface | IPv6Network"
+)
+EncodableMsgSpecType: TypeAlias = "Ext | Raw | Struct"
+EncodablePydanticType: TypeAlias = (
+ "BaseModel | ByteSize | ConstrainedBytes | ConstrainedDate | NameEmail | SecretField | StrictBool | Color"
+)
+
+StarliteEncodableType: TypeAlias = "EncodableBuiltinType | EncodableBuiltinCollectionType | EncodableStdLibType | EncodableStdLibIPType | EncodableMsgSpecType | EncodablePydanticType"
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-01T02:50:39 |
|
litestar-org/litestar | 1,426 | litestar-org__litestar-1426 | [
"4321",
"1234"
] | 93396ca0d475f7a8b46aaa74606aa3d10c8269fc | diff --git a/starlite/contrib/sqlalchemy/init_plugin/config/common.py b/starlite/contrib/sqlalchemy/init_plugin/config/common.py
--- a/starlite/contrib/sqlalchemy/init_plugin/config/common.py
+++ b/starlite/contrib/sqlalchemy/init_plugin/config/common.py
@@ -239,3 +239,11 @@ def create_app_state_items(self) -> dict[str, Any]:
self.engine_app_state_key: self.create_engine(),
self.session_maker_app_state_key: self.create_session_maker(),
}
+
+ def update_app_state(self, state: State) -> None:
+ """Set the app state with engine and session.
+
+ Args:
+ state: The ``Starlite.state`` instance.
+ """
+ state.update(self.create_app_state_items())
diff --git a/starlite/contrib/sqlalchemy/init_plugin/plugin.py b/starlite/contrib/sqlalchemy/init_plugin/plugin.py
--- a/starlite/contrib/sqlalchemy/init_plugin/plugin.py
+++ b/starlite/contrib/sqlalchemy/init_plugin/plugin.py
@@ -39,7 +39,7 @@ def on_app_init(self, app_config: AppConfig) -> AppConfig:
}
)
app_config.before_send.append(self._config.before_send_handler)
+ app_config.on_startup.append(self._config.update_app_state)
app_config.on_shutdown.append(self._config.on_shutdown)
- app_config.state.update(self._config.create_app_state_items())
app_config.signature_namespace.update(self._config.signature_namespace)
return app_config
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-01T05:51:12 |
|
litestar-org/litestar | 1,428 | litestar-org__litestar-1428 | [
"4321",
"1234"
] | 593f536ac683dc163e581905f5f49d2beadb28c7 | diff --git a/starlite/handlers/base.py b/starlite/handlers/base.py
--- a/starlite/handlers/base.py
+++ b/starlite/handlers/base.py
@@ -22,7 +22,7 @@
from starlite.di import Provide
from starlite.params import ParameterKwarg
from starlite.router import Router
- from starlite.types import AnyCallable, AsyncAnyCallable, ExceptionHandler
+ from starlite.types import AsyncAnyCallable, ExceptionHandler
from starlite.types.composite_types import MaybePartial
T = TypeVar("T", bound="BaseRouteHandler")
@@ -34,10 +34,10 @@ class BaseRouteHandler(Generic[T]):
Serves as a subclass for all route handlers
"""
- fn: Ref[MaybePartial[AnyCallable]]
signature: Signature
__slots__ = (
+ "_fn",
"_resolved_dependencies",
"_resolved_guards",
"_resolved_layered_parameters",
@@ -45,7 +45,6 @@ class BaseRouteHandler(Generic[T]):
"_resolved_type_encoders",
"dependencies",
"exception_handlers",
- "fn",
"guards",
"middleware",
"name",
@@ -114,11 +113,25 @@ def __init__(
def __call__(self, fn: AsyncAnyCallable) -> Self:
"""Replace a function with itself."""
- self.fn = Ref["MaybePartial[AsyncAnyCallable]"](fn)
+ self._fn = Ref["MaybePartial[AsyncAnyCallable]"](fn)
self.signature = Signature.from_callable(fn)
self._validate_handler_function()
return self
+ @property
+ def fn(self) -> Ref[MaybePartial[AsyncAnyCallable]]:
+ """Get the handler function.
+
+ Raises:
+ ImproperlyConfiguredException: if handler fn is not set.
+
+ Returns:
+ Handler function
+ """
+ if not hasattr(self, "_fn"):
+ raise ImproperlyConfiguredException("Handler has not decorated a function")
+ return self._fn
+
@property
def handler_name(self) -> str:
"""Get the name of the handler function.
@@ -129,9 +142,6 @@ def handler_name(self) -> str:
Returns:
Name of the handler function
"""
- fn = getattr(self, "fn", None)
- if not fn:
- raise ImproperlyConfiguredException("cannot access handler name before setting the handler function")
return get_name(unwrap_partial(self.fn.value))
@property
@@ -279,8 +289,6 @@ def _validate_dependency_is_unique(dependencies: dict[str, Provide], key: str, p
def _validate_handler_function(self) -> None:
"""Validate the route handler function once set by inspecting its return annotations."""
- if not getattr(self, "fn", None):
- raise ImproperlyConfiguredException("Cannot call _validate_handler_function without first setting self.fn")
def __str__(self) -> str:
"""Return a unique identifier for the route handler.
@@ -288,6 +296,7 @@ def __str__(self) -> str:
Returns:
A string
"""
+ target: type[AsyncAnyCallable] | AsyncAnyCallable
target = unwrap_partial(self.fn.value)
if not hasattr(target, "__qualname__"):
target = type(target)
| diff --git a/tests/handlers/base/test_validations.py b/tests/handlers/base/test_validations.py
--- a/tests/handlers/base/test_validations.py
+++ b/tests/handlers/base/test_validations.py
@@ -8,4 +8,4 @@ def test_raise_no_fn_validation() -> None:
handler = BaseRouteHandler[BaseRouteHandler](path="/")
with pytest.raises(ImproperlyConfiguredException):
- handler._validate_handler_function()
+ handler.fn
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-01T08:02:54 |
litestar-org/litestar | 1,460 | litestar-org__litestar-1460 | [
"4321",
"1234"
] | 663e3dc49f4092a97019e8f5e01626df926487ad | diff --git a/litestar/app.py b/litestar/app.py
--- a/litestar/app.py
+++ b/litestar/app.py
@@ -348,7 +348,7 @@ def __init__(
opt=dict(opt or {}),
parameters=parameters or {},
plugins=list(plugins or []),
- preferred_validation_backend=preferred_validation_backend or "attrs",
+ preferred_validation_backend=preferred_validation_backend or "pydantic",
request_class=request_class,
response_cache_config=response_cache_config or ResponseCacheConfig(),
response_class=response_class,
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-07T17:36:03 |
|
litestar-org/litestar | 1,474 | litestar-org__litestar-1474 | [
"4321",
"1234"
] | 2ab2a988c704b18f0f6387d8d4f262e1e217185e | diff --git a/litestar/constants.py b/litestar/constants.py
--- a/litestar/constants.py
+++ b/litestar/constants.py
@@ -2,6 +2,7 @@
from pydantic.fields import Undefined
+from litestar.enums import MediaType
from litestar.types import Empty
DEFAULT_ALLOWED_CORS_HEADERS = {"Accept", "Accept-Language", "Content-Language", "Content-Type"}
@@ -12,6 +13,7 @@
ONE_MEGABYTE = 1024 * 1024
OPENAPI_NOT_INITIALIZED = "Litestar has not been instantiated with OpenAPIConfig"
REDIRECT_STATUS_CODES = {301, 302, 303, 307, 308}
+REDIRECT_ALLOWED_MEDIA_TYPES = {MediaType.TEXT, MediaType.HTML}
RESERVED_KWARGS = {"state", "headers", "cookies", "request", "socket", "data", "query", "scope", "body"}
SCOPE_STATE_DEPENDENCY_CACHE = "dependency_cache"
SCOPE_STATE_NAMESPACE = "__litestar__"
diff --git a/litestar/response/redirect.py b/litestar/response/redirect.py
--- a/litestar/response/redirect.py
+++ b/litestar/response/redirect.py
@@ -3,7 +3,7 @@
from typing import TYPE_CHECKING, Any, Literal
from urllib.parse import quote
-from litestar.constants import REDIRECT_STATUS_CODES
+from litestar.constants import REDIRECT_ALLOWED_MEDIA_TYPES, REDIRECT_STATUS_CODES
from litestar.enums import MediaType
from litestar.exceptions import ImproperlyConfiguredException
from litestar.response.base import Response
@@ -29,6 +29,7 @@ def __init__(
headers: dict[str, Any] | None = None,
cookies: ResponseCookies | None = None,
encoding: str = "utf-8",
+ media_type: str | MediaType = MediaType.TEXT,
) -> None:
"""Initialize the response.
@@ -41,9 +42,11 @@ def __init__(
cookies: A list of :class:`Cookie <.datastructures.Cookie>` instances to be set under the response
``Set-Cookie`` header.
encoding: The encoding to be used for the response headers.
+ media_type: A value for the response ``Content-Type`` header.
+
Raises:
- ImproperlyConfiguredException: If status code is not a redirect status code.
+ ImproperlyConfiguredException: Either if status code is not a redirect status code or media type is not supported.
"""
if status_code not in REDIRECT_STATUS_CODES:
raise ImproperlyConfiguredException(
@@ -51,12 +54,18 @@ def __init__(
f"Redirect responses should have one of "
f"the following status codes: {', '.join([str(s) for s in REDIRECT_STATUS_CODES])}"
)
+ if media_type not in REDIRECT_ALLOWED_MEDIA_TYPES:
+ raise ImproperlyConfiguredException(
+ f"{media_type} media type is not supported yet. "
+ f"Media type should be one of "
+ f"the following values: {', '.join([str(s) for s in REDIRECT_ALLOWED_MEDIA_TYPES])}"
+ )
super().__init__(
background=background,
content=b"",
cookies=cookies,
headers={**(headers or {}), "location": quote(url, safe="/#%[]=:;$&()+,!?*@'~")},
- media_type=MediaType.TEXT,
+ media_type=media_type,
status_code=status_code,
encoding=encoding,
)
| diff --git a/tests/response/test_redirect_response.py b/tests/response/test_redirect_response.py
--- a/tests/response/test_redirect_response.py
+++ b/tests/response/test_redirect_response.py
@@ -10,7 +10,7 @@
from litestar import Response
from litestar.exceptions import ImproperlyConfiguredException
from litestar.response import RedirectResponse
-from litestar.status_codes import HTTP_200_OK
+from litestar.status_codes import HTTP_200_OK, HTTP_307_TEMPORARY_REDIRECT
from litestar.testing import TestClient
if TYPE_CHECKING:
@@ -62,3 +62,22 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_redirect_response_status_validation() -> None:
with pytest.raises(ImproperlyConfiguredException):
RedirectResponse("/", status_code=HTTP_200_OK) # type: ignore
+
+
+def test_redirect_response_html_media_type() -> None:
+ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ if scope["path"] == "/":
+ response = Response("hello")
+ else:
+ response = RedirectResponse("/", media_type="text/html")
+ await response(scope, receive, send)
+
+ client: TestClient = TestClient(app)
+ response = client.request("GET", "/redirect", follow_redirects=False)
+ assert str(response.url) == "http://testserver.local/redirect"
+ assert "text/html" in str(response.headers["Content-Type"])
+
+
+def test_redirect_response_media_type_validation() -> None:
+ with pytest.raises(ImproperlyConfiguredException):
+ RedirectResponse("/", status_code=HTTP_307_TEMPORARY_REDIRECT, media_type="application/json")
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-10T15:36:41 |
litestar-org/litestar | 1,482 | litestar-org__litestar-1482 | [
"4321",
"1234"
] | 05b05ba0bf8c9e19d09f6bf34852f013648aacef | diff --git a/litestar/app.py b/litestar/app.py
--- a/litestar/app.py
+++ b/litestar/app.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import logging
from datetime import date, datetime, time, timedelta
from functools import partial
from itertools import chain
@@ -132,6 +133,7 @@ class Litestar(Router):
"""
__slots__ = (
+ "_debug",
"_openapi_schema",
"after_exception",
"after_shutdown",
@@ -145,7 +147,6 @@ class Litestar(Router):
"compression_config",
"cors_config",
"csrf_config",
- "debug",
"event_emitter",
"get_logger",
"logger",
@@ -373,6 +374,7 @@ def __init__(
config = handler(config)
self._openapi_schema: OpenAPI | None = None
+ self._debug: bool = True
self.get_logger: GetLogger = get_logger_placeholder
self.logger: Logger | None = None
self.routes: list[HTTPRoute | ASGIRoute | WebSocketRoute] = []
@@ -389,7 +391,6 @@ def __init__(
self.compression_config = config.compression_config
self.cors_config = config.cors_config
self.csrf_config = config.csrf_config
- self.debug = config.debug
self.event_emitter = config.event_emitter_backend(listeners=config.listeners)
self.logging_config = config.logging_config
self.multipart_form_part_limit = config.multipart_form_part_limit
@@ -405,6 +406,7 @@ def __init__(
self.static_files_config = config.static_files_config
self.template_engine = config.template_config.engine_instance if config.template_config else None
self.websocket_class = config.websocket_class or WebSocket
+ self.debug = config.debug
super().__init__(
after_request=config.after_request,
@@ -435,9 +437,6 @@ def __init__(
for route_handler in config.route_handlers:
self.register(route_handler)
- if self.debug and isinstance(self.logging_config, LoggingConfig):
- self.logging_config.loggers["litestar"]["level"] = "DEBUG"
-
if self.logging_config:
self.get_logger = self.logging_config.configure()
self.logger = self.get_logger("litestar")
@@ -450,7 +449,21 @@ def __init__(
self.asgi_handler = self._create_asgi_handler()
- self.stores = config.stores if isinstance(config.stores, StoreRegistry) else StoreRegistry(config.stores)
+ self.stores: StoreRegistry = (
+ config.stores if isinstance(config.stores, StoreRegistry) else StoreRegistry(config.stores)
+ )
+
+ @property
+ def debug(self) -> bool:
+ return self._debug
+
+ @debug.setter
+ def debug(self, value: bool) -> None:
+ if self.logger:
+ self.logger.setLevel(logging.DEBUG if value else logging.INFO)
+ if isinstance(self.logging_config, LoggingConfig):
+ self.logging_config.loggers["litestar"]["level"] = "DEBUG" if value else "INFO"
+ self._debug = value
async def __call__(
self,
diff --git a/litestar/types/protocols.py b/litestar/types/protocols.py
--- a/litestar/types/protocols.py
+++ b/litestar/types/protocols.py
@@ -79,6 +79,16 @@ def critical(self, event: str, *args: Any, **kwargs: Any) -> Any:
**kwargs: Any kwargs.
"""
+ def setLevel(self, level: int) -> None: # noqa: N802
+ """Set the log level
+
+ Args:
+ level: Log level to set as an integer
+
+ Returns:
+ None
+ """
+
@runtime_checkable
class DataclassProtocol(Protocol):
| diff --git a/tests/app/test_app.py b/tests/app/test_app.py
--- a/tests/app/test_app.py
+++ b/tests/app/test_app.py
@@ -1,5 +1,7 @@
from __future__ import annotations
+import logging
+
import pytest
from litestar import Litestar
@@ -11,3 +13,16 @@ def test_access_openapi_schema_raises_if_not_configured() -> None:
app = Litestar(openapi_config=None)
with pytest.raises(ImproperlyConfiguredException):
app.openapi_schema
+
+
+def test_set_debug_updates_logging_level() -> None:
+ app = Litestar()
+
+ assert app.logger is not None
+ assert app.logger.level == logging.INFO # type: ignore[attr-defined]
+
+ app.debug = True
+ assert app.logger.level == logging.DEBUG # type: ignore[attr-defined]
+
+ app.debug = False
+ assert app.logger.level == logging.INFO # type: ignore[attr-defined]
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-11T15:04:28 |
litestar-org/litestar | 1,483 | litestar-org__litestar-1483 | [
"4321",
"1234"
] | e860e57420ed8f63b644c19f77636960bcea2d5d | diff --git a/starlite/cli/commands/core.py b/starlite/cli/commands/core.py
--- a/starlite/cli/commands/core.py
+++ b/starlite/cli/commands/core.py
@@ -1,4 +1,6 @@
import inspect
+import subprocess
+from typing import Any, Dict, List
from click import command, option
from rich.tree import Tree
@@ -8,6 +10,18 @@
from starlite.utils.helpers import unwrap_partial
+def _convert_uvicorn_args(args: Dict[str, Any]) -> List[str]:
+ process_args = []
+ for arg, value in args.items():
+ if isinstance(value, bool):
+ if value:
+ process_args.append(f"--{arg}")
+ else:
+ process_args.append(f"--{arg}={value}")
+
+ return process_args
+
+
@command(name="info")
def info_command(app: Starlite) -> None:
"""Show information about the detected Starlite app."""
@@ -38,24 +52,24 @@ def run_command(
"""
try:
- import uvicorn
+ import uvicorn # noqa: F401
except ImportError:
raise StarliteCLIException("Uvicorn needs to be installed to run an app") # pylint: disable=W0707
if debug or env.debug:
app.debug = True
- show_app_info(app)
+ # invoke uvicorn in a subprocess to be able to use the --reload flag. see
+ # https://github.com/litestar-org/litestar/issues/1191 and https://github.com/encode/uvicorn/issues/1045
- console.rule("[yellow]Starting server process", align="left")
+ process_args = {
+ "reload": env.reload or reload,
+ "host": env.host or host,
+ "port": env.port or port,
+ "factory": env.is_app_factory,
+ }
- uvicorn.run(
- env.app_path,
- reload=env.reload or reload,
- host=env.host or host,
- port=env.port or port,
- factory=env.is_app_factory,
- )
+ subprocess.run(["uvicorn", env.app_path, *_convert_uvicorn_args(process_args)], check=True)
@command(name="routes")
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-11T16:20:45 |
|
litestar-org/litestar | 1,484 | litestar-org__litestar-1484 | [
"4321",
"1234"
] | b8751a41adfa80d8c179d3ad6880f518067dbf1a | diff --git a/starlite/cli/commands/core.py b/starlite/cli/commands/core.py
--- a/starlite/cli/commands/core.py
+++ b/starlite/cli/commands/core.py
@@ -62,6 +62,8 @@ def run_command(
# invoke uvicorn in a subprocess to be able to use the --reload flag. see
# https://github.com/litestar-org/litestar/issues/1191 and https://github.com/encode/uvicorn/issues/1045
+ show_app_info(app)
+
process_args = {
"reload": env.reload or reload,
"host": env.host or host,
| diff --git a/tests/cli/conftest.py b/tests/cli/conftest.py
--- a/tests/cli/conftest.py
+++ b/tests/cli/conftest.py
@@ -86,8 +86,8 @@ def runner() -> CliRunner:
@pytest.fixture
-def mock_uvicorn_run(mocker: MockerFixture) -> "MagicMock":
- return mocker.patch("uvicorn.run")
+def mock_subprocess_run(mocker: MockerFixture) -> "MagicMock":
+ return mocker.patch("subprocess.run")
@pytest.fixture
diff --git a/tests/cli/test_run_command.py b/tests/cli/test_run_command.py
--- a/tests/cli/test_run_command.py
+++ b/tests/cli/test_run_command.py
@@ -26,7 +26,7 @@ def test_run_command(
mocker: MockerFixture,
runner: CliRunner,
monkeypatch: MonkeyPatch,
- mock_uvicorn_run: MagicMock,
+ mock_subprocess_run: MagicMock,
reload: Optional[bool],
port: Optional[int],
host: Optional[str],
@@ -72,13 +72,12 @@ def test_run_command(
assert result.exception is None
assert result.exit_code == 0
- mock_uvicorn_run.assert_called_once_with(
- f"{path.stem}:app",
- reload=reload,
- port=port,
- host=host,
- factory=False,
- )
+ expected_args = ["uvicorn", f"{path.stem}:app", f"--host={host}", f"--port={port}"]
+ if reload:
+ expected_args.append("--reload")
+
+ mock_subprocess_run.assert_called_once()
+ assert sorted(mock_subprocess_run.call_args_list[0].args[0]) == sorted(expected_args)
mock_show_app_info.assert_called_once()
@@ -93,7 +92,7 @@ def test_run_command(
)
def test_run_command_with_autodiscover_app_factory(
runner: CliRunner,
- mock_uvicorn_run: MagicMock,
+ mock_subprocess_run: MagicMock,
file_name: str,
file_content: str,
factory_name: str,
@@ -107,18 +106,14 @@ def test_run_command_with_autodiscover_app_factory(
assert result.exception is None
assert result.exit_code == 0
- mock_uvicorn_run.assert_called_once_with(
- f"{path.stem}:{factory_name}",
- reload=False,
- port=8000,
- host="127.0.0.1",
- factory=True,
- )
+ expected_args = ["uvicorn", f"{path.stem}:{factory_name}", "--host=127.0.0.1", "--port=8000", "--factory"]
+ mock_subprocess_run.assert_called_once()
+ assert sorted(mock_subprocess_run.call_args_list[0].args[0]) == sorted(expected_args)
def test_run_command_with_app_factory(
runner: CliRunner,
- mock_uvicorn_run: MagicMock,
+ mock_subprocess_run: MagicMock,
create_app_file: CreateAppFileFixture,
) -> None:
path = create_app_file("_create_app_with_path.py", content=CREATE_APP_FILE_CONTENT)
@@ -128,13 +123,9 @@ def test_run_command_with_app_factory(
assert result.exception is None
assert result.exit_code == 0
- mock_uvicorn_run.assert_called_once_with(
- f"{app_path}",
- reload=False,
- port=8000,
- host="127.0.0.1",
- factory=True,
- )
+ expected_args = ["uvicorn", str(app_path), "--host=127.0.0.1", "--port=8000", "--factory"]
+ mock_subprocess_run.assert_called_once()
+ assert sorted(mock_subprocess_run.call_args_list[0].args[0]) == sorted(expected_args)
def test_run_command_force_debug(app_file: Path, mocker: MockerFixture, runner: CliRunner) -> None:
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-11T18:40:12 |
litestar-org/litestar | 1,488 | litestar-org__litestar-1488 | [
"4321",
"1234"
] | 32481350ffe9a2c8db5969310e7f5ecd577462d1 | diff --git a/litestar/contrib/htmx/response.py b/litestar/contrib/htmx/response.py
--- a/litestar/contrib/htmx/response.py
+++ b/litestar/contrib/htmx/response.py
@@ -230,11 +230,7 @@ def to_response(
)
)
- template = Template(
- name=self.name,
- background=self.background,
- encoding=self.encoding,
- )
+ template = Template(name=self.name, background=self.background, context=self.context, encoding=self.encoding)
return template.to_response(
headers=hx_headers, media_type=media_type, app=app, status_code=status_code, request=request
| diff --git a/tests/contrib/htmx/test_htmx_response.py b/tests/contrib/htmx/test_htmx_response.py
--- a/tests/contrib/htmx/test_htmx_response.py
+++ b/tests/contrib/htmx/test_htmx_response.py
@@ -233,8 +233,16 @@ def handler() -> HXLocation:
@pytest.mark.parametrize(
"engine, template, expected",
(
- (JinjaTemplateEngine, "path: {{ request.scope['path'] }}", "path: /"),
- (MakoTemplateEngine, "path: ${request.scope['path']}", "path: /"),
+ (
+ JinjaTemplateEngine,
+ "path: {{ request.scope['path'] }} custom_key: {{ custom_key }}",
+ "path: / custom_key: custom_value",
+ ),
+ (
+ MakoTemplateEngine,
+ "path: ${request.scope['path']} custom_key: ${custom_key}",
+ "path: / custom_key: custom_value",
+ ),
),
)
def test_HTMXTemplate_response_success(engine: Any, template: str, expected: str, template_dir: Path) -> None:
@@ -244,7 +252,7 @@ def test_HTMXTemplate_response_success(engine: Any, template: str, expected: str
def handler() -> HTMXTemplate:
return HTMXTemplate(
name="abc.html",
- context={"request": {"scope": {"path": "nope"}}},
+ context={"request": {"scope": {"path": "nope"}}, "custom_key": "custom_value"},
push_url="/about",
re_swap="beforebegin",
re_target="#new-target-id",
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-12T19:14:25 |
litestar-org/litestar | 1,492 | litestar-org__litestar-1492 | [
"1491",
"4321"
] | f8deb0c8fb6cd6edb70d5be2bc602fce19e468e0 | diff --git a/litestar/_signature/models/attrs_signature_model.py b/litestar/_signature/models/attrs_signature_model.py
--- a/litestar/_signature/models/attrs_signature_model.py
+++ b/litestar/_signature/models/attrs_signature_model.py
@@ -85,7 +85,7 @@ def _structure_date(value: Any, cls: type[date]) -> date:
return value
if isinstance(value, (float, int, Decimal)):
- return cls.fromtimestamp(float(value))
+ return datetime.fromtimestamp(float(value), tz=timezone.utc).date()
dt = _structure_datetime(value=value, cls=datetime)
return cls(year=dt.year, month=dt.month, day=dt.day)
| diff --git a/tests/signature/test_attrs_signature_modelling.py b/tests/signature/test_attrs_signature_modelling.py
--- a/tests/signature/test_attrs_signature_modelling.py
+++ b/tests/signature/test_attrs_signature_modelling.py
@@ -7,7 +7,7 @@
from tests import Person, PersonFactory
now = datetime.now(tz=timezone.utc)
-today = date.today()
+today = now.date()
time_now = time(hour=now.hour, minute=now.minute, second=now.second, microsecond=now.microsecond)
one_minute = timedelta(minutes=1)
person = PersonFactory.build()
| Bug: test failures where local date different from UTC
### Description
Tests for the attrs signature modelling fail if local date is different to UTC.
The tests compare `now = datetime.now(tz=timezone.utc)` against `today = date.today()` which is local time.
Making `today` UTC is the obvious solution, however that creates one further issue, where `now.timestamp()` is structured using `date.fromtimestamp()` which returns local time, and then compared to the UTC `today`. This is a behavioral difference between pydantic and cattrs in this case, where pydantic would return the UTC date given a timestamp, while the cattrs converter returns the local date.
Create `now` as UTC, and demonstrate that local date is forward a day:
```python-console
>>> now = datetime.now(tz=timezone.utc)
>>> now
datetime.datetime(2023, 4, 13, 23, 19, 57, 285302, tzinfo=datetime.timezone.utc)
>>> datetime.now()
datetime.datetime(2023, 4, 14, 9, 21, 5, 809068)
```
Parse now as timestamp using the cattrs converter:
```python-console
>>> from litestar._signature.models.attrs_signature_model import _converter
>>> _converter.structure(now.timestamp(), date)
datetime.date(2023, 4, 14)
```
Parse now as timestamp using pydantic:
```python-console
>>> from pydantic import parse_obj_as
>>> parse_obj_as(date, now.timestamp())
datetime.date(2023, 4, 13)
```
Output from failing test:
```
___________________________________________________________ test_cattrs_converter_structure_date[2023-04-13T23:08:58.999269+00:00-date-2023-04-14] ___________________________________________________________
value = '2023-04-13T23:08:58.999269+00:00', cls = <class 'datetime.date'>, expected = '2023-04-14'
@pytest.mark.parametrize(
"value,cls,expected",
(
(now, date, today.isoformat()),
(now.isoformat(), date, today.isoformat()),
(now.timestamp(), date, today.isoformat()),
(today, date, today.isoformat()),
(today.isoformat(), date, today.isoformat()),
),
)
def test_cattrs_converter_structure_date(value: Any, cls: Any, expected: Any) -> None:
result = _converter.structure(value, cls).isoformat()
> assert result == expected
E AssertionError: assert '2023-04-13' == '2023-04-14'
E - 2023-04-14
E ? ^
E + 2023-04-13
E ? ^
```
### URL to code causing the issue
_No response_
### MCVE
```python
# Your MCVE code here
```
### Steps to reproduce
```bash
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
```
### Screenshots
```bash
""
```
### Logs
_No response_
### Litestar Version
main
### Platform
- [ ] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| 2023-04-13T23:32:31 |
|
litestar-org/litestar | 1,503 | litestar-org__litestar-1503 | [
"4321",
"1234"
] | 4f734ea5970d414d49b92cf3c49988aa158cad30 | diff --git a/litestar/contrib/repository/abc.py b/litestar/contrib/repository/abc.py
--- a/litestar/contrib/repository/abc.py
+++ b/litestar/contrib/repository/abc.py
@@ -258,7 +258,7 @@ def check_not_found(item_or_none: T | None) -> T:
return item_or_none
@classmethod
- def get_id_attribute_value(cls, item: T) -> Any:
+ def get_id_attribute_value(cls, item: T | type[T]) -> Any:
"""Get value of attribute named as :attr:`id_attribute <AbstractRepository.id_attribute>` on ``item``.
Args:
diff --git a/litestar/contrib/sqlalchemy/base.py b/litestar/contrib/sqlalchemy/base.py
--- a/litestar/contrib/sqlalchemy/base.py
+++ b/litestar/contrib/sqlalchemy/base.py
@@ -3,7 +3,7 @@
import re
from datetime import datetime
-from typing import Any, TypeVar
+from typing import TYPE_CHECKING, Any, ClassVar, Protocol, TypeVar, runtime_checkable
from uuid import UUID, uuid4
from pydantic import AnyHttpUrl, AnyUrl, EmailStr
@@ -19,6 +19,9 @@
registry,
)
+if TYPE_CHECKING:
+ from sqlalchemy.sql import FromClause
+
__all__ = ("AuditBase", "AuditColumns", "Base", "CommonTableAttributes", "UUIDPrimaryKey", "touch_updated_timestamp")
@@ -51,6 +54,22 @@ def touch_updated_timestamp(session: Session, *_: Any) -> None:
instance.updated = datetime.now() # noqa: DTZ005
+@runtime_checkable
+class ModelProtocol(Protocol):
+ """The base SQLAlchemy model protocol."""
+
+ __table__: FromClause
+ __name__: ClassVar[str]
+
+ def to_dict(self, exclude: set[str] | None = None) -> dict[str, Any]:
+ """Convert model to dictionary.
+
+ Returns:
+ dict[str, Any]: A dict representation of the model
+ """
+ ...
+
+
@declarative_mixin
class UUIDPrimaryKey:
"""UUID Primary Key Field Mixin."""
@@ -78,8 +97,8 @@ class CommonTableAttributes:
"""Common attributes for SQLALchemy tables."""
__abstract__ = True
- __name__: str
- __table__: Any
+ __name__: ClassVar[str]
+ __table__: FromClause
# noinspection PyMethodParameters
@declared_attr.directive
diff --git a/litestar/contrib/sqlalchemy/repository.py b/litestar/contrib/sqlalchemy/repository.py
--- a/litestar/contrib/sqlalchemy/repository.py
+++ b/litestar/contrib/sqlalchemy/repository.py
@@ -32,7 +32,7 @@
)
T = TypeVar("T")
-ModelT = TypeVar("ModelT", bound="base.Base | base.AuditBase")
+ModelT = TypeVar("ModelT", bound="base.ModelProtocol")
SQLARepoT = TypeVar("SQLARepoT", bound="SQLAlchemyRepository")
SelectT = TypeVar("SelectT", bound="Select[Any]")
RowT = TypeVar("RowT", bound=Tuple[Any, ...])
@@ -249,7 +249,7 @@ async def get_or_create(self, **kwargs: Any) -> tuple[ModelT, bool]:
existing = await self.get_one_or_none(**kwargs)
if existing:
return existing, False
- return await self.add(self.model_type(**kwargs)), True # type: ignore[arg-type]
+ return await self.add(self.model_type(**kwargs)), True
async def count(self, *filters: FilterTypes, **kwargs: Any) -> int:
"""Get the count of records returned by a query.
@@ -263,9 +263,7 @@ async def count(self, *filters: FilterTypes, **kwargs: Any) -> int:
"""
statement = kwargs.pop("base_select", self.statement)
statement = statement.with_only_columns(
- sql_func.count(
- self.model_type.id,
- ),
+ sql_func.count(self.get_id_attribute_value(self.model_type)),
maintain_column_froms=True,
).order_by(None)
statement = self._apply_filters(*filters, apply_pagination=False, statement=statement)
@@ -351,9 +349,7 @@ async def list_and_count(
statement = kwargs.pop("base_select", self.statement)
statement = statement.add_columns(
over(
- sql_func.count(
- self.model_type.id,
- ),
+ sql_func.count(self.get_id_attribute_value(self.model_type)),
)
)
statement = self._apply_filters(*filters, statement=statement)
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-16T13:50:02 |
|
litestar-org/litestar | 1,505 | litestar-org__litestar-1505 | [
"1504",
"4321"
] | 4f734ea5970d414d49b92cf3c49988aa158cad30 | diff --git a/litestar/utils/signature.py b/litestar/utils/signature.py
--- a/litestar/utils/signature.py
+++ b/litestar/utils/signature.py
@@ -45,6 +45,8 @@ def get_fn_type_hints(fn: Any, namespace: dict[str, Any] | None = None) -> dict[
"""
fn_to_inspect: Any = fn
+ module_name = fn_to_inspect.__module__
+
if isclass(fn_to_inspect):
fn_to_inspect = fn_to_inspect.__init__
@@ -61,7 +63,7 @@ def get_fn_type_hints(fn: Any, namespace: dict[str, Any] | None = None) -> dict[
namespace = {
**_GLOBAL_NAMES,
**vars(typing),
- **vars(sys.modules[fn_to_inspect.__module__]),
+ **vars(sys.modules[module_name]),
**(namespace or {}),
}
return get_type_hints(fn_to_inspect, globalns=namespace, include_extras=True)
| diff --git a/tests/utils/test_signature.py b/tests/utils/test_signature.py
--- a/tests/utils/test_signature.py
+++ b/tests/utils/test_signature.py
@@ -42,6 +42,18 @@ def test_get_fn_type_hints(fn: Any) -> None:
assert get_fn_type_hints(fn) == {"a": int, "b": str, "c": float, "return": NoneType}
+def test_get_fn_type_hints_class_no_init() -> None:
+ """Test that get_fn_type_hints works with classes that don't have an __init__ method.
+
+ Ref: https://github.com/litestar-org/litestar/issues/1504
+ """
+
+ class C:
+ ...
+
+ assert get_fn_type_hints(C) == {}
+
+
class _TD(TypedDict):
req_int: Required[int]
req_list_int: Required[List[int]]
| Bug: `AttributeError: 'wrapper_descriptor' object has no attribute '__module__'`
### Description
Litestar app fails to start, due to the above error.
### URL to code causing the issue
https://github.com/litestar-org/litestar/blob/4f734ea5970d414d49b92cf3c49988aa158cad30/litestar/utils/signature.py#L64
### MCVE
```python
from uuid import UUID
from litestar import Controller, Litestar, get
from litestar.di import Provide
from pydantic import BaseModel
class TestService:
...
class TestOutput(BaseModel):
id: UUID
class TestController(Controller):
path = "/test"
dependencies = {
"test_service": Provide(TestService),
}
@get("/{test_id:uuid}")
async def get_by_id(self, test_id: UUID) -> TestOutput:
return TestOutput(id=test_id)
app = Litestar(
route_handlers=[TestController],
)
```
### Steps to reproduce
Run the app in MCVE section above.
### Screenshots
_No response_
### Logs
```bash
File "/app/src/asgi.py", line 11, in <module>
app = Litestar(
^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/app.py", line 438, in __init__
self.register(route_handler)
File "/usr/local/lib/python3.11/site-packages/litestar/app.py", line 546, in register
self._create_handler_signature_model(route_handler=route_handler)
File "/usr/local/lib/python3.11/site-packages/litestar/app.py", line 775, in _create_handler_signature_model
parsed_signature=ParsedSignature.from_fn(
^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/utils/signature.py", line 283, in from_fn
fn_type_hints = get_fn_type_hints(fn, namespace=signature_namespace)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/utils/signature.py", line 64, in get_fn_type_hints
**vars(sys.modules[fn_to_inspect.__module__]),
^^^^^^^^^^^^^^^^^^^^^^^^
AttributeError: 'wrapper_descriptor' object has no attribute '__module__'. Did you mean: '__reduce__'?
```
### Litestar Version
- `main` branch
- v2.0.0alpha4
### Platform
- [X] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| Thanks for the report @lqmanh - if possible, can you provide an example of the code that reproduces this?
@peterschutt I've updated the MCVE code | 2023-04-16T20:30:07 |
litestar-org/litestar | 1,512 | litestar-org__litestar-1512 | [
"4321",
"1234"
] | 039b52b99620919d0225269b1d5e12aa02f54dca | diff --git a/docs/examples/conftest.py b/docs/examples/conftest.py
new file mode 100644
--- /dev/null
+++ b/docs/examples/conftest.py
@@ -0,0 +1 @@
+from tests.conftest import reset_httpx_logging # noqa: F401
| diff --git a/docs/examples/tests/application_hooks/test_application_after_exception_hook.py b/docs/examples/tests/application_hooks/test_application_after_exception_hook.py
--- a/docs/examples/tests/application_hooks/test_application_after_exception_hook.py
+++ b/docs/examples/tests/application_hooks/test_application_after_exception_hook.py
@@ -1,6 +1,8 @@
import logging
from typing import TYPE_CHECKING
+import pytest
+
from examples.application_hooks import after_exception_hook
from litestar.testing import TestClient
@@ -8,6 +10,7 @@
from _pytest.logging import LogCaptureFixture
[email protected]("reset_httpx_logging")
def test_application_shutdown_hooks(caplog: "LogCaptureFixture") -> None:
with caplog.at_level(logging.INFO), TestClient(app=after_exception_hook.app) as client:
assert len(caplog.messages) == 0
diff --git a/docs/examples/tests/application_state/test_using_application_state.py b/docs/examples/tests/application_state/test_using_application_state.py
--- a/docs/examples/tests/application_state/test_using_application_state.py
+++ b/docs/examples/tests/application_state/test_using_application_state.py
@@ -1,11 +1,14 @@
from logging import INFO
from typing import Any
+import pytest
+
from examples.application_state.using_application_state import app
from litestar.status_codes import HTTP_200_OK
from litestar.testing import TestClient
[email protected]("reset_httpx_logging")
def test_using_application_state(caplog: Any) -> None:
with caplog.at_level(INFO, "examples.application_state.using_application_state"), TestClient(app=app) as client:
response = client.get("/")
diff --git a/docs/examples/tests/middleware/test_logging_middleware.py b/docs/examples/tests/middleware/test_logging_middleware.py
--- a/docs/examples/tests/middleware/test_logging_middleware.py
+++ b/docs/examples/tests/middleware/test_logging_middleware.py
@@ -25,6 +25,7 @@ def get_logger() -> "GetLogger":
).configure()
[email protected]("reset_httpx_logging")
def test_logging_middleware_regular_logger(get_logger: "GetLogger", caplog: "LogCaptureFixture") -> None:
with TestClient(app=app) as client, caplog.at_level(logging.INFO):
client.app.get_logger = get_logger
diff --git a/docs/examples/tests/responses/test_background_tasks.py b/docs/examples/tests/responses/test_background_tasks.py
--- a/docs/examples/tests/responses/test_background_tasks.py
+++ b/docs/examples/tests/responses/test_background_tasks.py
@@ -1,6 +1,8 @@
import logging
from typing import TYPE_CHECKING
+import pytest
+
from examples.responses.background_tasks_1 import app as app_1
from examples.responses.background_tasks_2 import app as app_2
from examples.responses.background_tasks_3 import app as app_3
@@ -11,6 +13,9 @@
from _pytest.logging import LogCaptureFixture
+pytestmark = pytest.mark.usefixtures("reset_httpx_logging")
+
+
def test_background_tasks_1(caplog: "LogCaptureFixture") -> None:
with caplog.at_level(logging.INFO), TestClient(app=app_1) as client:
name = "Jane"
diff --git a/litestar/testing/client/async_client.py b/litestar/testing/client/async_client.py
--- a/litestar/testing/client/async_client.py
+++ b/litestar/testing/client/async_client.py
@@ -1,6 +1,5 @@
from __future__ import annotations
-import logging
from contextlib import AsyncExitStack
from typing import TYPE_CHECKING, Any, Generic, Mapping, TypeVar
@@ -31,9 +30,6 @@
T = TypeVar("T", bound=ASGIApp)
-# ensure that httpx logging is not interfering with our test client
-logging.getLogger("httpx").setLevel(logging.WARNING)
-
class AsyncTestClient(AsyncClient, BaseTestClient, Generic[T]): # type: ignore[misc]
lifespan_handler: LifeSpanHandler
diff --git a/litestar/testing/client/sync_client.py b/litestar/testing/client/sync_client.py
--- a/litestar/testing/client/sync_client.py
+++ b/litestar/testing/client/sync_client.py
@@ -1,6 +1,5 @@
from __future__ import annotations
-import logging
from contextlib import ExitStack
from typing import TYPE_CHECKING, Any, Generic, Mapping, TypeVar
from urllib.parse import urljoin
@@ -34,9 +33,6 @@
T = TypeVar("T", bound=ASGIApp)
-# ensure that httpx logging is not interfering with our test client
-logging.getLogger("httpx").setLevel(logging.WARNING)
-
class TestClient(Client, BaseTestClient, Generic[T]): # type: ignore[misc]
lifespan_handler: LifeSpanHandler
diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,4 +1,5 @@
import importlib.util
+import logging
import sys
from os import environ, urandom
from pathlib import Path
@@ -285,3 +286,13 @@ def mock_db() -> MemoryStore:
def frozen_datetime() -> Generator["FrozenDateTimeFactory", None, None]:
with freeze_time() as frozen:
yield cast("FrozenDateTimeFactory", frozen)
+
+
[email protected]()
+def reset_httpx_logging() -> Generator[None, None, None]:
+ # ensure that httpx logging is not interfering with our test client
+ httpx_logger = logging.getLogger("httpx")
+ initial_level = httpx_logger.level
+ httpx_logger.setLevel(logging.WARNING)
+ yield
+ httpx_logger.setLevel(initial_level)
diff --git a/tests/middleware/test_logging_middleware.py b/tests/middleware/test_logging_middleware.py
--- a/tests/middleware/test_logging_middleware.py
+++ b/tests/middleware/test_logging_middleware.py
@@ -21,6 +21,9 @@
from litestar.types.callable_types import GetLogger
+pytestmark = pytest.mark.usefixtures("reset_httpx_logging")
+
+
@get("/")
def handler() -> Response:
return Response(
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-17T11:45:26 |
litestar-org/litestar | 1,534 | litestar-org__litestar-1534 | [
"1525",
"4321"
] | 1538640da492f8fe1f21e9d9c7b15d3c4d182c3d | diff --git a/litestar/_kwargs/extractors.py b/litestar/_kwargs/extractors.py
--- a/litestar/_kwargs/extractors.py
+++ b/litestar/_kwargs/extractors.py
@@ -11,7 +11,6 @@
parse_url_encoded_form_data,
)
from litestar.datastructures.upload_file import UploadFile
-from litestar.dto.interface import DTOInterface
from litestar.enums import ParamType, RequestEncodingType
from litestar.exceptions import ValidationException
from litestar.params import BodyKwarg
@@ -22,7 +21,7 @@
from litestar._kwargs.parameter_definition import ParameterDefinition
from litestar._signature.field import SignatureField
from litestar.connection import ASGIConnection, Request
- from litestar.utils.signature import ParsedParameter
+ from litestar.dto.interface import DTOInterface
__all__ = (
"body_extractor",
@@ -383,7 +382,7 @@ def create_data_extractor(kwargs_model: KwargsModel) -> Callable[[dict[str, Any]
"Callable[[ASGIConnection[Any, Any, Any, Any]], Coroutine[Any, Any, Any]]", msgpack_extractor
)
elif kwargs_model.expected_dto_data:
- data_extractor = create_dto_extractor(*kwargs_model.expected_dto_data)
+ data_extractor = create_dto_extractor(kwargs_model.expected_dto_data)
else:
data_extractor = cast(
"Callable[[ASGIConnection[Any, Any, Any, Any]], Coroutine[Any, Any, Any]]", json_extractor
@@ -399,23 +398,18 @@ def extractor(
def create_dto_extractor(
- parsed_parameter: ParsedParameter, dto_type: type[DTOInterface]
+ dto_type: type[DTOInterface],
) -> Callable[[ASGIConnection[Any, Any, Any, Any]], Coroutine[Any, Any, Any]]:
"""Create a DTO data extractor.
Args:
- parsed_parameter: :class:`ParsedParameter` instance representing the ``"data"`` kwarg.
dto_type: The :class:`DTOInterface` subclass.
Returns:
An extractor function.
"""
- is_dto_annotated = parsed_parameter.parsed_type.is_subclass_of(DTOInterface)
async def dto_extractor(connection: Request[Any, Any, Any]) -> Any:
- dto = dto_type.from_bytes(await connection.body(), connection)
- if is_dto_annotated:
- return dto
- return dto.to_data_type()
+ return dto_type.from_bytes(await connection.body(), connection).to_data_type()
return dto_extractor # type:ignore[return-value]
diff --git a/litestar/_kwargs/kwargs_model.py b/litestar/_kwargs/kwargs_model.py
--- a/litestar/_kwargs/kwargs_model.py
+++ b/litestar/_kwargs/kwargs_model.py
@@ -32,7 +32,6 @@
from litestar._signature import SignatureModel, get_signature_model
from litestar._signature.field import SignatureField
from litestar.constants import RESERVED_KWARGS
-from litestar.dto.interface import DTOInterface
from litestar.enums import ParamType, RequestEncodingType
from litestar.exceptions import ImproperlyConfiguredException
from litestar.params import BodyKwarg, ParameterKwarg
@@ -43,7 +42,8 @@
if TYPE_CHECKING:
from litestar.connection import ASGIConnection
from litestar.di import Provide
- from litestar.utils.signature import ParsedParameter, ParsedSignature
+ from litestar.dto.interface import DTOInterface
+ from litestar.utils.signature import ParsedSignature
class KwargsModel:
@@ -72,7 +72,7 @@ def __init__(
self,
*,
expected_cookie_params: set[ParameterDefinition],
- expected_dto_data: tuple[ParsedParameter, type[DTOInterface]] | None,
+ expected_dto_data: type[DTOInterface] | None,
expected_dependencies: set[Dependency],
expected_form_data: tuple[RequestEncodingType | str, SignatureField] | None,
expected_msgpack_data: SignatureField | None,
@@ -304,7 +304,7 @@ def create_for_signature_model(
expected_form_data: tuple[RequestEncodingType | str, SignatureField] | None = None
expected_msgpack_data: SignatureField | None = None
- expected_dto_data: tuple[ParsedParameter, type[DTOInterface]] | None = None
+ expected_dto_data: type[DTOInterface] | None = None
data_signature_field = signature_fields.get("data")
@@ -322,13 +322,8 @@ def create_for_signature_model(
elif media_type == RequestEncodingType.MESSAGEPACK:
expected_msgpack_data = data_signature_field
- elif data_signature_field:
- parsed_parameter = parsed_signature.parameters["data"]
- parsed_type = parsed_parameter.parsed_type
- if parsed_type.is_subclass_of(DTOInterface):
- expected_dto_data = (parsed_parameter, parsed_type.annotation)
- elif data_dto:
- expected_dto_data = (parsed_parameter, data_dto)
+ elif data_signature_field and data_dto:
+ expected_dto_data = data_dto
for dependency in expected_dependencies:
dependency_kwargs_model = cls.create_for_signature_model(
| diff --git a/tests/dto/test_integration.py b/tests/dto/test_integration.py
--- a/tests/dto/test_integration.py
+++ b/tests/dto/test_integration.py
@@ -86,15 +86,3 @@ def handler(data: Model) -> Model:
response = client.post("/", json={"what": "ever"})
assert response.status_code == 201
assert response.json() == {"a": 1, "b": "2"}
-
-
-def test_dto_annotated_handler() -> None:
- @post()
- def handler(data: MockDTO) -> MockDTO:
- assert isinstance(data, MockDTO)
- return data
-
- with create_test_client(route_handlers=handler) as client:
- response = client.post("/", json={"what": "ever"})
- assert response.status_code == 201
- assert response.json() == {"a": 1, "b": "2"}
diff --git a/tests/kwargs/test_dto_extractor.py b/tests/kwargs/test_dto_extractor.py
--- a/tests/kwargs/test_dto_extractor.py
+++ b/tests/kwargs/test_dto_extractor.py
@@ -3,26 +3,9 @@
from unittest.mock import AsyncMock
from litestar._kwargs.extractors import create_dto_extractor
-from litestar.types.empty import Empty
-from litestar.utils.signature import ParsedParameter, ParsedType
from tests.dto import MockDTO, Model
-async def test_create_dto_extractor_not_dto_annotated() -> None:
- parsed_parameter = ParsedParameter(
- name="data",
- default=Empty,
- parsed_type=ParsedType.from_annotation(Model),
- )
- extractor = create_dto_extractor(parsed_parameter, MockDTO)
+async def test_create_dto_extractor() -> None:
+ extractor = create_dto_extractor(MockDTO)
assert await extractor(AsyncMock()) == Model(a=1, b="2")
-
-
-async def test_create_dto_extractor_dto_annotated() -> None:
- parsed_parameter = ParsedParameter(
- name="data",
- default=Empty,
- parsed_type=ParsedType.from_annotation(MockDTO),
- )
- extractor = create_dto_extractor(parsed_parameter, MockDTO)
- assert isinstance(await extractor(AsyncMock()), MockDTO)
| DTO Interface: consider removing support for declaring the DTO as data/return type
This is mostly a feature hangover from when the DTO types _were_ pydantic models, and were relying on being in the signature model.
Now that they are declarable at the layers, I'm not totally sure if having this support is necessary: less is more, can always be added later, want to hear actual use-cases for it, and all that.
| 2023-04-18T04:03:32 |
|
litestar-org/litestar | 1,540 | litestar-org__litestar-1540 | [
"1538",
"4321",
"1234"
] | ae2b8fcde70566bb9929c984525ee6bf98d16f0f | diff --git a/litestar/_openapi/schema_generation/examples.py b/litestar/_openapi/schema_generation/examples.py
--- a/litestar/_openapi/schema_generation/examples.py
+++ b/litestar/_openapi/schema_generation/examples.py
@@ -41,7 +41,7 @@ def _create_field_meta(field: "SignatureField") -> FieldMeta:
return FieldMeta(
name=field.name,
annotation=field.field_type,
- constant=field.is_const,
+ constraints={"constant": field.is_const},
default=field.default_value if field.default_value is not Empty else Null,
children=[_create_field_meta(child) for child in field.children] if field.children else None,
)
| diff --git a/tests/openapi/test_parameters.py b/tests/openapi/test_parameters.py
--- a/tests/openapi/test_parameters.py
+++ b/tests/openapi/test_parameters.py
@@ -115,7 +115,7 @@ def test_create_parameters() -> None:
{
"type": "string",
"enum": ["M", "F", "O", "A"],
- "examples": [{"description": "Example value", "value": "A"}],
+ "examples": [{"description": "Example value", "value": "M"}],
},
],
"examples": [{"value": "M"}, {"value": ["M", "O"]}],
diff --git a/tests/openapi/test_responses.py b/tests/openapi/test_responses.py
--- a/tests/openapi/test_responses.py
+++ b/tests/openapi/test_responses.py
@@ -322,7 +322,6 @@ def handler() -> Person:
schema = schemas[reference.ref.split("/")[-1]]
assert isinstance(schema, Schema)
assert schema.title == "AuthenticationError"
- assert schema.examples
second_response = next(responses)
assert second_response[0] == "500"
| Bug: FieldMeta unexpected keyword argument 'constant'
### Description
After going from `polyfactory==2.0.0alpha1` => `2.0.0` I end up with `FieldMeta.__init__() got an unexpected keyword argument 'constant'`
Looks like the example generation for the openapi docs is broken because the `constant` boolean field is removed from 2.0.0
https://github.com/litestar-org/polyfactory/blob/v2.0.0/polyfactory/field_meta.py#L39-L48 (2.0.0)
vs
https://github.com/litestar-org/polyfactory/blob/v2.0.0alpha1/polyfactory/field_meta.py#L12-L21 (2.0.0a1)
And is set by https://github.com/litestar-org/litestar/blob/v2.0.0alpha4/litestar/_openapi/schema_generation/examples.py#L44 (2.0.0a4)
Running on docker `python:3.11-alpine`
### URL to code causing the issue
_No response_
### MCVE
```python
class TestController(Controller):
path = "/test"
@post(
path="/route",
summary="Test Route",
tags=["Test"],
responses={503: ResponseSpec(data_container=ServiceUnavailableModel, description="Device or service unavailable")},
)
async def test_route(self, data: SomeDataModel) -> SomeResponseModel:
return {"test": data}
```
The `responses=` line causes this error.
### Steps to reproduce
_No response_
### Screenshots
_No response_
### Logs
```bash
Traceback (most recent call last):
File "/usr/local/lib/python3.11/site-packages/litestar/middleware/exceptions/middleware.py", line 149, in __call__
await self.app(scope, receive, send)
File "/usr/local/lib/python3.11/site-packages/litestar/routes/http.py", line 77, in handle
response = await self._get_response_for_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/routes/http.py", line 129, in _get_response_for_request
response = await self._call_handler_function(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/routes/http.py", line 158, in _call_handler_function
response_data, cleanup_group = await self._get_response_data(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/routes/http.py", line 210, in _get_response_data
data = route_handler.fn.value(**parsed_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/openapi/controller.py", line 221, in root
return Response(content=render_method(request), media_type=MediaType.HTML)
^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/openapi/controller.py", line 397, in render_redoc
schema = self.get_schema_from_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/openapi/controller.py", line 105, in get_schema_from_request
return request.app.openapi_schema
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/app.py", line 510, in openapi_schema
self.update_openapi_schema()
File "/usr/local/lib/python3.11/site-packages/litestar/app.py", line 825, in update_openapi_schema
path_item, created_operation_ids = create_path_item(
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/_openapi/path_item.py", line 125, in create_path_item
responses=create_responses(
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/_openapi/responses.py", line 259, in create_responses
for status_code, response in create_additional_responses(
File "/usr/local/lib/python3.11/site-packages/litestar/_openapi/responses.py", line 226, in create_additional_responses
schema = create_schema(
^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/_openapi/schema_generation/schema.py", line 724, in create_schema
result = create_schema_for_pydantic_model(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/_openapi/schema_generation/schema.py", line 541, in create_schema_for_pydantic_model
properties={
^
File "/usr/local/lib/python3.11/site-packages/litestar/_openapi/schema_generation/schema.py", line 542, in <dictcomp>
(f.alias or f.name): create_schema(
^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/_openapi/schema_generation/schema.py", line 769, in create_schema
return _process_schema_result(field=field, schema=result, generate_examples=generate_examples, schemas=schemas)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/_openapi/schema_generation/schema.py", line 680, in _process_schema_result
schema.examples = create_examples_for_field(field=field)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/_openapi/schema_generation/examples.py", line 60, in create_examples_for_field
field_meta = _create_field_meta(field)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/litestar/_openapi/schema_generation/examples.py", line 41, in _create_field_meta
return FieldMeta(
^^^^^^^^^^
TypeError: FieldMeta.__init__() got an unexpected keyword argument 'constant'
```
### Litestar Version
Litestar 2.0.0a4
polyfactory 2.0.0alpha1 (no error)
polyfactory 2.0.0 (error)
### Platform
- [ ] Linux
- [ ] Mac
- [ ] Windows
- [X] Other (Please specify in the description above)
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| Can you provide an example that reproduces this?
Can be reproduced by forcing `polyfactory==2.0.0` and running `pytest tests/openapi`
I agree. Do you want to submit a PR? | 2023-04-18T11:34:48 |
litestar-org/litestar | 1,542 | litestar-org__litestar-1542 | [
"4321",
"1234"
] | 8a6c7ed1043e32dbb2abe558654393a0ceb630a0 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -152,18 +152,35 @@
"extra_navbar_items": {
"Documentation": "index",
"Community": {
- "Contribution Guide": "contribution-guide",
- "Code of Conduct": "https://github.com/litestar-org/.github/blob/main/CODE_OF_CONDUCT.md",
+ "Contributing": {
+ "description": "Learn how to contribute to the Litestar project",
+ "link": "https://docs.litestar.dev/2/contribution-guide.html",
+ "icon": "contributing",
+ },
+ "Code of Conduct": {
+ "description": "Review the etiquette for interacting with the Litestar community",
+ "link": "https://github.com/litestar-org/.github/blob/main/CODE_OF_CONDUCT.md",
+ "icon": "coc",
+ },
},
"About": {
- "Organization": "https://litestar.dev/about/organization",
- "Releases": "https://litestar.dev/about/litestar-releases",
+ "Litestar Organization": {
+ "description": "Details about the Litestar organization",
+ "link": "about/organization",
+ "icon": "org",
+ },
+ "Releases": {
+ "description": "Details about the Litestar release process",
+ "link": "about/litestar-releases",
+ "icon": "releases",
+ },
},
"Release notes": {
"2.0 migration guide": "release-notes/migration_guide_2",
"2.x Changelog": "https://docs.litestar.dev/2/release-notes/changelog.html",
"1.x Changelog": "https://docs.litestar.dev/1/release-notes/changelog.html",
},
+ "Help": "https://github.com/orgs/litestar-org/discussions",
},
}
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-18T12:09:05 |
|
litestar-org/litestar | 1,543 | litestar-org__litestar-1543 | [
"4321",
"1234"
] | 3f75a50fe33fd6fe9bcca084930ceb4f20c80e92 | diff --git a/litestar/_signature/models/attrs_signature_model.py b/litestar/_signature/models/attrs_signature_model.py
--- a/litestar/_signature/models/attrs_signature_model.py
+++ b/litestar/_signature/models/attrs_signature_model.py
@@ -15,8 +15,6 @@
from uuid import UUID
from _decimal import Decimal
-from dateutil.parser import parse
-from pytimeparse.timeparse import timeparse
from typing_extensions import get_args
from litestar._signature.field import SignatureField
@@ -35,7 +33,17 @@
import attrs
import cattrs
except ImportError as e:
- raise MissingDependencyException("attrs is not installed") from e
+ raise MissingDependencyException("attrs") from e
+
+try:
+ from dateutil.parser import parse
+except ImportError as e:
+ raise MissingDependencyException("python-dateutil", "attrs") from e
+
+try:
+ from pytimeparse.timeparse import timeparse
+except ImportError as e:
+ raise MissingDependencyException("pytimeparse", "attrs") from e
if TYPE_CHECKING:
from litestar.plugins import PluginMapping
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-18T13:08:12 |
|
litestar-org/litestar | 1,550 | litestar-org__litestar-1550 | [
"1547",
"4321",
"1234"
] | b0868be8550d02fbe14ee79e43f7c63e00ebbdc7 | diff --git a/litestar/_parsers.py b/litestar/_parsers.py
--- a/litestar/_parsers.py
+++ b/litestar/_parsers.py
@@ -34,8 +34,7 @@ def parse_query_string(query_string: bytes) -> tuple[tuple[str, Any], ...]:
Returns:
A tuple of key value pairs.
"""
- _bools = {"true": True, "false": False, "True": True, "False": False}
- return tuple((k, v if v not in _bools else _bools[v]) for k, v in fast_parse_query_string(query_string, "&"))
+ return tuple(fast_parse_query_string(query_string, "&"))
@lru_cache(1024)
diff --git a/litestar/_signature/models/attrs_signature_model.py b/litestar/_signature/models/attrs_signature_model.py
--- a/litestar/_signature/models/attrs_signature_model.py
+++ b/litestar/_signature/models/attrs_signature_model.py
@@ -48,9 +48,10 @@
if TYPE_CHECKING:
from litestar.utils.signature import ParsedSignature
-key_re = re.compile("@ attribute (.*)|'(.*)'")
-
__all__ = ("AttrsSignatureModel",)
+key_re = re.compile("@ attribute (.*)|'(.*)'")
+TRUE_SET = {"1", "true", "on", "t", "y", "yes"}
+FALSE_SET = {"0", "false", "off", "f", "n", "no"}
try:
import pydantic
@@ -75,6 +76,22 @@ def _pass_through_unstructure_hook(value: Any) -> Any:
return value
+def _structure_bool(value: Any, _: type[bool]) -> bool:
+ if isinstance(value, bytes):
+ value = value.decode("utf-8").lower()
+
+ if isinstance(value, str):
+ value = value.lower()
+
+ if value == 0 or value in FALSE_SET:
+ return False
+
+ if value == 1 or value in TRUE_SET:
+ return True
+
+ raise ValueError(f"Cannot convert {value} to bool")
+
+
def _structure_datetime(value: Any, cls: type[datetime]) -> datetime:
if isinstance(value, datetime):
return value
@@ -151,6 +168,7 @@ def _structure_str(value: Any, cls: type[str]) -> str:
(UUID, _structure_uuid),
(UploadFile, _pass_through_structure_hook),
(WebSocket, _pass_through_structure_hook),
+ (bool, _structure_bool),
(date, _structure_date),
(datetime, _structure_datetime),
(str, _structure_str),
| diff --git a/tests/datastructures/test_url.py b/tests/datastructures/test_url.py
--- a/tests/datastructures/test_url.py
+++ b/tests/datastructures/test_url.py
@@ -34,7 +34,7 @@ def test_url() -> None:
assert url.password == "hunter2"
assert url.port == 81
assert url.hostname == "example.org"
- assert url.query_params.dict() == {"query": ["param"], "bool": [True]}
+ assert url.query_params.dict() == {"query": ["param"], "bool": ["true"]}
@pytest.mark.parametrize(
diff --git a/tests/signature/test_attrs_signature_modelling.py b/tests/signature/test_attrs_signature_modelling.py
--- a/tests/signature/test_attrs_signature_modelling.py
+++ b/tests/signature/test_attrs_signature_modelling.py
@@ -13,6 +13,58 @@
person = PersonFactory.build()
[email protected](
+ "value,expected",
+ (
+ ("1", True),
+ (b"1", True),
+ ("True", True),
+ (b"True", True),
+ ("on", True),
+ (b"on", True),
+ ("t", True),
+ (b"t", True),
+ ("true", True),
+ (b"true", True),
+ ("y", True),
+ (b"y", True),
+ ("yes", True),
+ (b"yes", True),
+ (1, True),
+ (True, True),
+ ("0", False),
+ (b"0", False),
+ ("False", False),
+ (b"False", False),
+ ("f", False),
+ (b"f", False),
+ ("false", False),
+ (b"false", False),
+ ("n", False),
+ (b"n", False),
+ ("no", False),
+ (b"no", False),
+ ("off", False),
+ (b"off", False),
+ (0, False),
+ (False, False),
+ ),
+)
+def test_cattrs_converter_structure_bool(value: Any, expected: Any) -> None:
+ result = _converter.structure(value, bool)
+ assert result == expected
+
+
+def test_cattrs_converter_structure_bool_value_error() -> None:
+ with pytest.raises(ValueError):
+ _converter.structure(None, bool)
+ _converter.structure("foofoofoo", bool)
+ _converter.structure(object(), bool)
+ _converter.structure(type, bool)
+ _converter.structure({}, bool)
+ _converter.structure([], bool)
+
+
@pytest.mark.parametrize(
"value,cls,expected",
(
diff --git a/tests/signature/test_parsing.py b/tests/signature/test_parsing.py
--- a/tests/signature/test_parsing.py
+++ b/tests/signature/test_parsing.py
@@ -1,4 +1,5 @@
from typing import TYPE_CHECKING, Any, Iterable, List, Literal, Optional, Sequence
+from unittest.mock import MagicMock
import pytest
from pydantic import BaseModel
@@ -265,3 +266,18 @@ def fn(a: Sequence[int], b: OptionalSequence[int]) -> None:
assert model.fields["a"].is_non_string_sequence
assert model.fields["b"].is_non_string_sequence
+
+
[email protected]("signature_backend", ["pydantic", "attrs"])
[email protected]("query,expected", [("1", True), ("true", True), ("0", False), ("false", False)])
+def test_query_param_bool(query: str, expected: bool, signature_backend: Literal["pydantic", "attrs"]) -> None:
+ mock = MagicMock()
+
+ @get("/")
+ def handler(param: bool) -> None:
+ mock(param)
+
+ with create_test_client(route_handlers=[handler], preferred_validation_backend=signature_backend) as client:
+ response = client.get(f"/?param={query}")
+ assert response.status_code == HTTP_200_OK, response.json()
+ mock.assert_called_once_with(expected)
diff --git a/tests/test_parsers.py b/tests/test_parsers.py
--- a/tests/test_parsers.py
+++ b/tests/test_parsers.py
@@ -87,8 +87,8 @@ def test_parse_query_string() -> None:
"value": ["10"],
"veggies": ["tomato", "potato", "aubergine"],
"calories": ["122.53"],
- "healthy": [True],
- "polluting": [False],
+ "healthy": ["True"],
+ "polluting": ["False"],
}
| Bug: Query parameter values of "true"/"false" become title-cased when used as string query parameter
### Description
When defining a query parameter as `param: str`, and passing it a value of `true`, the value received by the route handler is `True`, having been title cased. The same is true for the value of `false`.
The issue lies within the `parse_query_string_function`:
https://github.com/litestar-org/litestar/blob/ae2b8fcde70566bb9929c984525ee6bf98d16f0f/litestar/_parsers.py#L28-L38
The value is converted to a `bool` here, and later on back to a string since the parameter is annotated as such.
The conversion of query string parameters to `bool`s should not happen there, but instead be directed by the type annotation of the parameter, and occur within the signature model.
### URL to code causing the issue
_No response_
### MCVE
```python
from litestar import get
from litestar.testing import create_test_client
@get("/")
def handler(param: str) -> str:
return param
with create_test_client(handler) as client:
assert client.get("/?param=true").text == "True"
```
### Steps to reproduce
_No response_
### Screenshots
```bash
""
```
### Logs
_No response_
### Litestar Version
-
### Platform
- [ ] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
|
I agree. Do you want to submit a PR? | 2023-04-19T09:31:12 |
litestar-org/litestar | 1,551 | litestar-org__litestar-1551 | [
"4321",
"1234"
] | 67652998202e8cc0bd96560f189774d64025cd92 | diff --git a/litestar/app.py b/litestar/app.py
--- a/litestar/app.py
+++ b/litestar/app.py
@@ -2,7 +2,6 @@
import logging
from datetime import date, datetime, time, timedelta
-from functools import partial
from itertools import chain
from pathlib import Path
from typing import TYPE_CHECKING, Any, Literal, Mapping, Sequence, cast
@@ -12,7 +11,6 @@
from litestar._asgi import ASGIRouter
from litestar._asgi.utils import get_route_handlers, wrap_in_exception_handler
from litestar._openapi.path_item import create_path_item
-from litestar._signature import create_signature_model
from litestar.config.allowed_hosts import AllowedHostsConfig
from litestar.config.app import AppConfig
from litestar.config.response_cache import ResponseCacheConfig
@@ -24,7 +22,6 @@
ImproperlyConfiguredException,
NoRouteMatchFoundException,
)
-from litestar.handlers.http_handlers import HTTPRouteHandler
from litestar.logging.config import LoggingConfig, get_logger_placeholder
from litestar.middleware.cors import CORSMiddleware
from litestar.openapi.config import OpenAPIConfig
@@ -42,14 +39,10 @@
from litestar.types.internal_types import PathParameterDefinition
from litestar.utils import (
as_async_callable_list,
- async_partial,
- is_async_callable,
join_paths,
unique,
)
from litestar.utils.dataclass import extract_dataclass_items
-from litestar.utils.helpers import unwrap_partial
-from litestar.utils.signature import ParsedSignature
if TYPE_CHECKING:
from litestar.config.compression import CompressionConfig
@@ -58,7 +51,6 @@
from litestar.datastructures import CacheControlHeader, ETag, ResponseHeader
from litestar.dto.interface import DTOInterface
from litestar.events.listener import EventListener
- from litestar.handlers.base import BaseRouteHandler
from litestar.logging.config import BaseLoggingConfig
from litestar.openapi.spec import SecurityRequirement
from litestar.openapi.spec.open_api import OpenAPI
@@ -541,9 +533,7 @@ def register(self, value: ControllerRouterHandler) -> None: # type: ignore[over
route_handlers = get_route_handlers(route)
for route_handler in route_handlers:
- route_handler.on_registration()
- self._set_runtime_callables(route_handler=route_handler)
- self._create_handler_signature_model(route_handler=route_handler)
+ route_handler.on_registration(self)
if isinstance(route, HTTPRoute):
route.create_handler_map()
@@ -720,63 +710,6 @@ def _create_asgi_handler(self) -> ASGIApp:
debug=self.debug, app=asgi_handler, exception_handlers=self.exception_handlers or {}
)
- @staticmethod
- def _set_runtime_callables(route_handler: BaseRouteHandler) -> None:
- """Optimize the ``route_handler.fn`` and any ``provider.dependency`` callables for runtime by doing the following:
-
- 1. ensure that the ``self`` argument is preserved by binding it using partial.
- 2. ensure sync functions are wrapped in AsyncCallable for sync_to_thread handlers.
-
- Args:
- route_handler: A route handler to process.
-
- Returns:
- None
- """
- from litestar.controller import Controller
-
- if isinstance(route_handler.owner, Controller) and not hasattr(route_handler.fn.value, "func"):
- route_handler.fn.value = partial(route_handler.fn.value, route_handler.owner)
-
- if isinstance(route_handler, HTTPRouteHandler):
- route_handler.has_sync_callable = False
- if not is_async_callable(route_handler.fn.value):
- if route_handler.sync_to_thread:
- route_handler.fn.value = async_partial(route_handler.fn.value)
- else:
- route_handler.has_sync_callable = True
-
- for provider in route_handler.resolve_dependencies().values():
- if not is_async_callable(provider.dependency.value):
- provider.has_sync_callable = False
- if provider.sync_to_thread:
- provider.dependency.value = async_partial(provider.dependency.value)
- else:
- provider.has_sync_callable = True
-
- def _create_handler_signature_model(self, route_handler: BaseRouteHandler) -> None:
- """Create function signature models for all route handler functions and provider dependencies."""
- if not route_handler.signature_model:
- route_handler.signature_model = create_signature_model(
- dependency_name_set=route_handler.dependency_name_set,
- fn=cast("AnyCallable", route_handler.fn.value),
- plugins=self.serialization_plugins,
- preferred_validation_backend=self.preferred_validation_backend,
- parsed_signature=route_handler.parsed_fn_signature,
- )
-
- for provider in route_handler.resolve_dependencies().values():
- if not getattr(provider, "signature_model", None):
- provider.signature_model = create_signature_model(
- dependency_name_set=route_handler.dependency_name_set,
- fn=provider.dependency.value,
- plugins=self.serialization_plugins,
- preferred_validation_backend=self.preferred_validation_backend,
- parsed_signature=ParsedSignature.from_fn(
- unwrap_partial(provider.dependency.value), route_handler.resolve_signature_namespace()
- ),
- )
-
def _wrap_send(self, send: Send, scope: Scope) -> Send:
"""Wrap the ASGI send and handles any 'before send' hooks.
diff --git a/litestar/handlers/base.py b/litestar/handlers/base.py
--- a/litestar/handlers/base.py
+++ b/litestar/handlers/base.py
@@ -1,26 +1,29 @@
from __future__ import annotations
from copy import copy
+from functools import partial
from typing import TYPE_CHECKING, Any, Generic, Mapping, Sequence, TypeVar, cast
+from litestar._signature import create_signature_model
from litestar._signature.field import SignatureField
from litestar.dto.interface import DTOInterface
from litestar.exceptions import ImproperlyConfiguredException
from litestar.types import Dependencies, Empty, ExceptionHandlersMap, Guard, Middleware, TypeEncodersMap
-from litestar.utils import AsyncCallable, Ref, get_name, normalize_path
+from litestar.utils import AsyncCallable, Ref, async_partial, get_name, is_async_callable, normalize_path
from litestar.utils.helpers import unwrap_partial
from litestar.utils.signature import ParsedSignature
if TYPE_CHECKING:
from typing_extensions import Self
+ from litestar import Litestar
from litestar._signature.models import SignatureModel
from litestar.connection import ASGIConnection
from litestar.controller import Controller
from litestar.di import Provide
from litestar.params import ParameterKwarg
from litestar.router import Router
- from litestar.types import AsyncAnyCallable, ExceptionHandler
+ from litestar.types import AnyCallable, AsyncAnyCallable, ExceptionHandler
from litestar.types.composite_types import MaybePartial
from litestar.types.empty import EmptyType
@@ -369,17 +372,61 @@ def _validate_dependency_is_unique(dependencies: dict[str, Provide], key: str, p
f"If you wish to override a provider, it must have the same key."
)
- def on_registration(self) -> None:
+ def on_registration(self, app: Litestar) -> None:
"""Called once per handler when the app object is instantiated."""
self._validate_handler_function()
self.resolve_guards()
self.resolve_middleware()
self.resolve_opts()
self._init_handler_dtos()
+ self._set_runtime_callables()
+ self._create_signature_model(app)
def _validate_handler_function(self) -> None:
"""Validate the route handler function once set by inspecting its return annotations."""
+ def _set_runtime_callables(self) -> None:
+ """Optimize the ``route_handler.fn`` and any ``provider.dependency`` callables for runtime by doing the following:
+
+ 1. ensure that the ``self`` argument is preserved by binding it using partial.
+ 2. ensure sync functions are wrapped in AsyncCallable for sync_to_thread handlers.
+ """
+ from litestar.controller import Controller
+
+ if isinstance(self.owner, Controller) and not hasattr(self.fn.value, "func"):
+ self.fn.value = partial(self.fn.value, self.owner)
+
+ for provider in self.resolve_dependencies().values():
+ if not is_async_callable(provider.dependency.value):
+ provider.has_sync_callable = False
+ if provider.sync_to_thread:
+ provider.dependency.value = async_partial(provider.dependency.value)
+ else:
+ provider.has_sync_callable = True
+
+ def _create_signature_model(self, app: Litestar) -> None:
+ """Create signature model for handler function and dependencies."""
+ if not self.signature_model:
+ self.signature_model = create_signature_model(
+ dependency_name_set=self.dependency_name_set,
+ fn=cast("AnyCallable", self.fn.value),
+ plugins=app.serialization_plugins,
+ preferred_validation_backend=app.preferred_validation_backend,
+ parsed_signature=self.parsed_fn_signature,
+ )
+
+ for provider in self.resolve_dependencies().values():
+ if not getattr(provider, "signature_model", None):
+ provider.signature_model = create_signature_model(
+ dependency_name_set=self.dependency_name_set,
+ fn=provider.dependency.value,
+ plugins=app.serialization_plugins,
+ preferred_validation_backend=app.preferred_validation_backend,
+ parsed_signature=ParsedSignature.from_fn(
+ unwrap_partial(provider.dependency.value), self.resolve_signature_namespace()
+ ),
+ )
+
def __str__(self) -> str:
"""Return a unique identifier for the route handler.
diff --git a/litestar/handlers/http_handlers/base.py b/litestar/handlers/http_handlers/base.py
--- a/litestar/handlers/http_handlers/base.py
+++ b/litestar/handlers/http_handlers/base.py
@@ -44,7 +44,7 @@
TypeEncodersMap,
)
from litestar.types.builtin_types import NoneType
-from litestar.utils import AsyncCallable, is_async_callable
+from litestar.utils import AsyncCallable, async_partial, is_async_callable
if TYPE_CHECKING:
from typing import Any, Awaitable, Callable, Sequence
@@ -468,8 +468,8 @@ async def to_response(
response_handler = self.get_response_handler(is_response_type_data=isinstance(data, Response))
return await response_handler(app=app, data=data, plugins=plugins, request=request, return_dto=self.resolve_return_dto()) # type: ignore
- def on_registration(self) -> None:
- super().on_registration()
+ def on_registration(self, app: Litestar) -> None:
+ super().on_registration(app)
if before_request := self.resolve_before_request():
before_request.set_parsed_signature(self.resolve_signature_namespace())
self.resolve_after_response()
@@ -508,5 +508,15 @@ def _validate_handler_function(self) -> None:
if "data" in self.parsed_fn_signature.parameters and "GET" in self.http_methods:
raise ImproperlyConfiguredException("'data' kwarg is unsupported for 'GET' request handlers")
+ def _set_runtime_callables(self) -> None:
+ """Set the runtime callables for the route handler."""
+ super()._set_runtime_callables()
+ self.has_sync_callable = False
+ if not is_async_callable(self.fn.value):
+ if self.sync_to_thread:
+ self.fn.value = async_partial(self.fn.value)
+ else:
+ self.has_sync_callable = True
+
route = HTTPRouteHandler
| diff --git a/tests/handlers/asgi/test_validations.py b/tests/handlers/asgi/test_validations.py
--- a/tests/handlers/asgi/test_validations.py
+++ b/tests/handlers/asgi/test_validations.py
@@ -2,7 +2,7 @@
import pytest
-from litestar import asgi
+from litestar import Litestar, asgi
from litestar.exceptions import ImproperlyConfiguredException
from litestar.testing import create_test_client
@@ -15,25 +15,25 @@ async def fn_without_scope_arg(receive: "Receive", send: "Send") -> None:
pass
with pytest.raises(ImproperlyConfiguredException):
- asgi(path="/")(fn_without_scope_arg).on_registration()
+ asgi(path="/")(fn_without_scope_arg).on_registration(Litestar())
async def fn_without_receive_arg(scope: "Scope", send: "Send") -> None:
pass
with pytest.raises(ImproperlyConfiguredException):
- asgi(path="/")(fn_without_receive_arg).on_registration()
+ asgi(path="/")(fn_without_receive_arg).on_registration(Litestar())
async def fn_without_send_arg(scope: "Scope", receive: "Receive") -> None:
pass
with pytest.raises(ImproperlyConfiguredException):
- asgi(path="/")(fn_without_send_arg).on_registration()
+ asgi(path="/")(fn_without_send_arg).on_registration(Litestar())
async def fn_with_return_annotation(scope: "Scope", receive: "Receive", send: "Send") -> dict:
return {}
with pytest.raises(ImproperlyConfiguredException):
- asgi(path="/")(fn_with_return_annotation).on_registration()
+ asgi(path="/")(fn_with_return_annotation).on_registration(Litestar())
asgi_handler_with_no_fn = asgi(path="/")
@@ -44,4 +44,4 @@ def sync_fn(scope: "Scope", receive: "Receive", send: "Send") -> None:
return None
with pytest.raises(ImproperlyConfiguredException):
- asgi(path="/")(sync_fn).on_registration() # type: ignore
+ asgi(path="/")(sync_fn).on_registration(Litestar()) # type: ignore
diff --git a/tests/handlers/http/test_head.py b/tests/handlers/http/test_head.py
--- a/tests/handlers/http/test_head.py
+++ b/tests/handlers/http/test_head.py
@@ -2,7 +2,7 @@
import pytest
-from litestar import HttpMethod, head
+from litestar import HttpMethod, Litestar, head
from litestar.exceptions import ImproperlyConfiguredException
from litestar.response import FileResponse
from litestar.response_containers import File
@@ -27,7 +27,7 @@ def test_head_decorator_raises_validation_error_if_body_is_declared() -> None:
def handler() -> dict:
return {}
- handler.on_registration()
+ handler.on_registration(Litestar())
def test_head_decorator_raises_validation_error_if_method_is_passed() -> None:
@@ -37,7 +37,7 @@ def test_head_decorator_raises_validation_error_if_method_is_passed() -> None:
def handler() -> None:
return
- handler.on_registration()
+ handler.on_registration(Litestar())
def test_head_decorator_does_not_raise_for_file() -> None:
@@ -45,7 +45,7 @@ def test_head_decorator_does_not_raise_for_file() -> None:
def handler() -> File:
return File(path=Path("test_head.py"))
- handler.on_registration()
+ handler.on_registration(Litestar())
def test_head_decorator_does_not_raise_for_file_response() -> None:
@@ -53,4 +53,4 @@ def test_head_decorator_does_not_raise_for_file_response() -> None:
def handler() -> "FileResponse":
return FileResponse("test_to_response.py")
- handler.on_registration()
+ handler.on_registration(Litestar())
diff --git a/tests/handlers/http/test_media_type.py b/tests/handlers/http/test_media_type.py
--- a/tests/handlers/http/test_media_type.py
+++ b/tests/handlers/http/test_media_type.py
@@ -4,7 +4,7 @@
import pytest
from pydantic.types import PaymentCardBrand
-from litestar import MediaType, get
+from litestar import Litestar, MediaType, get
from tests import Person
@@ -34,5 +34,5 @@ def test_media_type_inference(annotation: Any, expected_media_type: MediaType) -
def handler() -> annotation:
return None
- handler.on_registration()
+ handler.on_registration(Litestar())
assert handler.media_type == expected_media_type
diff --git a/tests/handlers/http/test_validations.py b/tests/handlers/http/test_validations.py
--- a/tests/handlers/http/test_validations.py
+++ b/tests/handlers/http/test_validations.py
@@ -3,7 +3,7 @@
import pytest
-from litestar import HttpMethod, MediaType, WebSocket, delete, get, route
+from litestar import HttpMethod, Litestar, MediaType, WebSocket, delete, get, route
from litestar.exceptions import ImproperlyConfiguredException, ValidationException
from litestar.handlers.http_handlers import HTTPRouteHandler
from litestar.response_containers import File, Redirect
@@ -41,7 +41,7 @@ async def test_function_validation() -> None:
def method_with_no_annotation(): # type: ignore
pass
- method_with_no_annotation.on_registration()
+ method_with_no_annotation.on_registration(Litestar())
with pytest.raises(ImproperlyConfiguredException):
@@ -49,7 +49,7 @@ def method_with_no_annotation(): # type: ignore
def method_with_no_content() -> Dict[str, str]:
return {}
- method_with_no_content.on_registration()
+ method_with_no_content.on_registration(Litestar())
with pytest.raises(ImproperlyConfiguredException):
@@ -57,7 +57,7 @@ def method_with_no_content() -> Dict[str, str]:
def method_with_not_modified() -> Dict[str, str]:
return {}
- method_with_not_modified.on_registration()
+ method_with_not_modified.on_registration(Litestar())
with pytest.raises(ImproperlyConfiguredException):
@@ -65,19 +65,19 @@ def method_with_not_modified() -> Dict[str, str]:
def method_with_status_lower_than_200() -> Dict[str, str]:
return {}
- method_with_status_lower_than_200.on_registration()
+ method_with_status_lower_than_200.on_registration(Litestar())
@get(path="/", status_code=HTTP_307_TEMPORARY_REDIRECT)
def redirect_method() -> Redirect:
return Redirect("/test")
- redirect_method.on_registration()
+ redirect_method.on_registration(Litestar())
@get(path="/")
def file_method() -> File:
return File(path=Path("."), filename="test_validations.py")
- file_method.on_registration()
+ file_method.on_registration(Litestar())
assert file_method.media_type == MediaType.TEXT
@@ -87,7 +87,7 @@ def file_method() -> File:
def test_function_1(socket: WebSocket) -> None:
return None
- test_function_1.on_registration()
+ test_function_1.on_registration(Litestar())
with pytest.raises(ImproperlyConfiguredException):
@@ -95,4 +95,4 @@ def test_function_1(socket: WebSocket) -> None:
def test_function_2(self, data: Person) -> None: # type: ignore
return None
- test_function_2.on_registration()
+ test_function_2.on_registration(Litestar())
diff --git a/tests/handlers/websocket/test_validations.py b/tests/handlers/websocket/test_validations.py
--- a/tests/handlers/websocket/test_validations.py
+++ b/tests/handlers/websocket/test_validations.py
@@ -2,7 +2,7 @@
import pytest
-from litestar import WebSocket, websocket
+from litestar import Litestar, WebSocket, websocket
from litestar.exceptions import ImproperlyConfiguredException
from litestar.testing import create_test_client
@@ -12,7 +12,7 @@ def fn_without_socket_arg(websocket: WebSocket) -> None:
pass
with pytest.raises(ImproperlyConfiguredException):
- websocket(path="/")(fn_without_socket_arg).on_registration() # type: ignore
+ websocket(path="/")(fn_without_socket_arg).on_registration(Litestar()) # type: ignore
def test_raises_for_return_annotation() -> None:
@@ -20,7 +20,7 @@ async def fn_with_return_annotation(socket: WebSocket) -> dict:
return {}
with pytest.raises(ImproperlyConfiguredException):
- websocket(path="/")(fn_with_return_annotation).on_registration()
+ websocket(path="/")(fn_with_return_annotation).on_registration(Litestar())
def test_raises_when_no_function() -> None:
@@ -37,7 +37,7 @@ def test_raises_when_sync_handler_user() -> None:
def sync_websocket_handler(socket: WebSocket) -> None:
...
- sync_websocket_handler.on_registration()
+ sync_websocket_handler.on_registration(Litestar())
def test_raises_when_data_kwarg_is_used() -> None:
@@ -47,7 +47,7 @@ def test_raises_when_data_kwarg_is_used() -> None:
async def websocket_handler_with_data_kwarg(socket: WebSocket, data: Any) -> None:
...
- websocket_handler_with_data_kwarg.on_registration()
+ websocket_handler_with_data_kwarg.on_registration(Litestar())
def test_raises_when_request_kwarg_is_used() -> None:
@@ -57,7 +57,7 @@ def test_raises_when_request_kwarg_is_used() -> None:
async def websocket_handler_with_request_kwarg(socket: WebSocket, request: Any) -> None:
...
- websocket_handler_with_request_kwarg.on_registration()
+ websocket_handler_with_request_kwarg.on_registration(Litestar())
def test_raises_when_body_kwarg_is_used() -> None:
@@ -67,4 +67,4 @@ def test_raises_when_body_kwarg_is_used() -> None:
async def websocket_handler_with_request_kwarg(socket: WebSocket, body: bytes) -> None:
...
- websocket_handler_with_request_kwarg.on_registration()
+ websocket_handler_with_request_kwarg.on_registration(Litestar())
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-19T11:47:20 |
litestar-org/litestar | 1,557 | litestar-org__litestar-1557 | [
"4321",
"1234"
] | 569b854ade0a46a51f23d446d406667daa73d20c | diff --git a/litestar/exceptions/base_exceptions.py b/litestar/exceptions/base_exceptions.py
--- a/litestar/exceptions/base_exceptions.py
+++ b/litestar/exceptions/base_exceptions.py
@@ -29,7 +29,7 @@ def __str__(self) -> str:
return " ".join(self.args).strip()
-class MissingDependencyException(LitestarException):
+class MissingDependencyException(LitestarException, ImportError):
"""Missing optional dependency.
This exception is raised only when a module depends on a dependency that has not been installed.
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-20T09:17:41 |
|
litestar-org/litestar | 1,572 | litestar-org__litestar-1572 | [
"1571",
"4321"
] | 17659bf229b63c9e5b32fb78b5ff54acaa042fe2 | diff --git a/docs/examples/websockets/setting_custom_connection_headers.py b/docs/examples/websockets/setting_custom_connection_headers.py
new file mode 100644
--- /dev/null
+++ b/docs/examples/websockets/setting_custom_connection_headers.py
@@ -0,0 +1,13 @@
+from litestar import Litestar, WebSocket, websocket_listener
+
+
+async def accept_connection(socket: WebSocket) -> None:
+ await socket.accept(headers={"Cookie": "custom-cookie"})
+
+
+@websocket_listener("/", connection_accept_handler=accept_connection)
+def handler(data: str) -> str:
+ return data
+
+
+app = Litestar([handler])
diff --git a/litestar/handlers/websocket_handlers/_utils.py b/litestar/handlers/websocket_handlers/_utils.py
--- a/litestar/handlers/websocket_handlers/_utils.py
+++ b/litestar/handlers/websocket_handlers/_utils.py
@@ -113,9 +113,10 @@ def create_handler_function(
listener_context: ListenerContext,
on_accept: AsyncCallable | None,
on_disconnect: AsyncCallable | None,
+ accept_connection_handler: Callable[[WebSocket], Coroutine[Any, Any, None]],
) -> Callable[..., Coroutine[None, None, None]]:
async def handler_fn(socket: WebSocket, **kwargs: Any) -> None:
- await socket.accept()
+ await accept_connection_handler(socket)
listener_callback = AsyncCallable(listener_context.listener_callback)
ctx = ConnectionContext.from_connection(socket)
diff --git a/litestar/handlers/websocket_handlers/listener.py b/litestar/handlers/websocket_handlers/listener.py
--- a/litestar/handlers/websocket_handlers/listener.py
+++ b/litestar/handlers/websocket_handlers/listener.py
@@ -7,6 +7,7 @@
from msgspec.json import Encoder as JsonEncoder
from litestar._signature import create_signature_model
+from litestar.connection import WebSocket
from litestar.dto.interface import HandlerContext
from litestar.exceptions import ImproperlyConfiguredException
from litestar.serialization import default_serializer
@@ -29,7 +30,9 @@
from .route_handler import WebsocketRouteHandler
if TYPE_CHECKING:
- from litestar import Litestar, WebSocket
+ from typing import Coroutine
+
+ from litestar import Litestar
from litestar.dto.interface import DTOInterface
from litestar.types.asgi_types import WebSocketMode
@@ -52,12 +55,14 @@ class websocket_listener(WebsocketRouteHandler):
"_receive_mode",
"_send_mode",
"_listener_context",
+ "accept_connection_handler",
)
def __init__(
self,
path: str | None | list[str] | None = None,
*,
+ connection_accept_handler: Callable[[WebSocket], Coroutine[Any, Any, None]] = WebSocket.accept,
dependencies: Dependencies | None = None,
dto: type[DTOInterface] | None | EmptyType = Empty,
exception_handlers: dict[int | type[Exception], ExceptionHandler] | None = None,
@@ -79,6 +84,8 @@ def __init__(
Args:
path: A path fragment for the route handler function or a sequence of path fragments. If not given defaults
to ``/``
+ connection_accept_handler: A callable that accepts a :class:`WebSocket <.connection.WebSocket>` instance
+ and returns a coroutine that when awaited, will accept the connection. Defaults to ``WebSocket.accept``.
dependencies: A string keyed mapping of dependency :class:`Provider <.di.Provide>` instances.
dto: :class:`DTOInterface <.dto.interface.DTOInterface>` to use for (de)serializing and
validation of request data.
@@ -107,6 +114,7 @@ def __init__(
self._send_mode: WebSocketMode = send_mode
self._on_accept = AsyncCallable(on_accept) if on_accept else None
self._on_disconnect = AsyncCallable(on_disconnect) if on_disconnect else None
+ self.accept_connection_handler = connection_accept_handler
self.type_encoders = type_encoders
super().__init__(
@@ -146,6 +154,7 @@ def __call__(self, listener_callback: AnyCallable) -> websocket_listener:
listener_context=self._listener_context,
on_accept=self._on_accept,
on_disconnect=self._on_disconnect,
+ accept_connection_handler=self.accept_connection_handler,
)
return super().__call__(handler_function)
| diff --git a/tests/handlers/websocket/test_listeners.py b/tests/handlers/websocket/test_listeners.py
--- a/tests/handlers/websocket/test_listeners.py
+++ b/tests/handlers/websocket/test_listeners.py
@@ -262,3 +262,16 @@ def handler_body(data: str, body: bytes) -> None:
...
handler_body.on_registration(Litestar())
+
+
+def test_listener_accept_connection_callback() -> None:
+ async def accept_connection(socket: WebSocket) -> None:
+ await socket.accept(headers={"Cookie": "custom-cookie"})
+
+ @websocket_listener("/", connection_accept_handler=accept_connection)
+ def handler(data: bytes) -> None:
+ return None
+
+ client = create_test_client([handler])
+ with client.websocket_connect("/") as ws:
+ assert ws.extra_headers == [(b"cookie", b"custom-cookie")]
| Feature: Websockets: best way to accept() custom headers with `WebsocketListener` class
### Discussed in https://github.com/orgs/litestar-org/discussions/1552
<div type='discussions-op-text'>
<sup>Originally posted by **hadware** April 20, 2023</sup>
Hello,
I'm wondering if it's possible (or even relevant) to be using the `WebsocketListener` class if I want to send custom headers back to the client when accepting the connection.
The "old" way of doing this would be, IIRC, :
```python
from starlite import websocket, WebSocket
from starlite.exceptions import WebSocketDisconnect
@websocket("/")
async def handler(socket: WebSocket) -> str:
# for instance, here, sending a cookie to be set on the client
await socket.accept({'Cookie': 'custom-cookie'})
while True:
try:
data = await socket.receive_text()
await socket.send_text(data)
except WebSocketDisconnect:
break
```
In the new `WebsocketListener` handler, which seems to fit much better in the OOP logic encouraged by litestar, as per [the documentation](https://docs.litestar.dev/2/usage/websockets.html#class-based-websocket-handling), there's an `on_accept(...)` method, but it seems that once this method is called by `litestar`, it's "already too late" to set any custom parameters for `websocket.accept()`.
Is there any way to do this? Is it even a good idea to use `WebsocketListener` in my case? Should I maybe start an issue/PR to suggest the addition of an optionally overloadable method such as `on_connect` (the autobahn way) for `WebsocketListener`?</div>
| 2023-04-24T10:26:47 |
|
litestar-org/litestar | 1,574 | litestar-org__litestar-1574 | [
"4321",
"1234"
] | 40f1c4f57fd150c8b4896e3ddde502a6d803d946 | diff --git a/litestar/app.py b/litestar/app.py
--- a/litestar/app.py
+++ b/litestar/app.py
@@ -653,7 +653,7 @@ def url_for_static_asset(self, name: str, file_path: str) -> str:
.. code-block: python
from litestar import Litestar
- from litestar.config.static_files import StaticFilesConfig
+ from litestar.static_files.config import StaticFilesConfig
app = Litestar(
static_files_config=[StaticFilesConfig(directories=["css"], path="/static/css")]
diff --git a/litestar/middleware/logging.py b/litestar/middleware/logging.py
--- a/litestar/middleware/logging.py
+++ b/litestar/middleware/logging.py
@@ -340,7 +340,7 @@ def middleware(self) -> DefineMiddleware:
.. code-block: python
from litestar import Litestar, Request, get
- from litestar.config.logging import LoggingConfig
+ from litestar.logging import LoggingConfig
from litestar.middleware.logging import LoggingMiddlewareConfig
logging_config = LoggingConfig()
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-25T08:59:53 |
|
litestar-org/litestar | 1,577 | litestar-org__litestar-1577 | [
"4321",
"1576"
] | 17659bf229b63c9e5b32fb78b5ff54acaa042fe2 | diff --git a/litestar/response/file.py b/litestar/response/file.py
--- a/litestar/response/file.py
+++ b/litestar/response/file.py
@@ -2,7 +2,7 @@
from email.utils import formatdate
from inspect import iscoroutine
-from mimetypes import guess_type
+from mimetypes import encodings_map, guess_type
from typing import TYPE_CHECKING, Any, AsyncGenerator, Coroutine, Literal, cast
from urllib.parse import quote
from zlib import adler32
@@ -28,6 +28,9 @@
from litestar.types import HTTPResponseBodyEvent, PathType, Receive, ResponseCookies, Send
from litestar.types.file_types import FileInfo, FileSystemProtocol
+# brotli not supported in 'mimetypes.encodings_map' until py 3.9.
+encodings_map[".br"] = "br"
+
async def async_file_iterator(
file_path: PathType, chunk_size: int, adapter: FileSystemAdapter
@@ -125,8 +128,11 @@ def __init__(
providing an :class:`os.stat_result`.
"""
if not media_type:
- mimetype, _ = guess_type(filename) if filename else (None, None)
+ mimetype, content_encoding = guess_type(filename) if filename else (None, None)
media_type = mimetype or "application/octet-stream"
+ if content_encoding is not None:
+ headers = headers or {}
+ headers.update({"content-encoding": content_encoding})
self.chunk_size = chunk_size
self.content_disposition_type = content_disposition_type
| diff --git a/tests/static_files/test_file_serving_resolution.py b/tests/static_files/test_file_serving_resolution.py
--- a/tests/static_files/test_file_serving_resolution.py
+++ b/tests/static_files/test_file_serving_resolution.py
@@ -1,7 +1,9 @@
+import gzip
import mimetypes
from pathlib import Path
from typing import TYPE_CHECKING
+import brotli
import pytest
from fsspec.implementations.local import LocalFileSystem
@@ -161,6 +163,25 @@ def test_static_files_response_mimetype(tmpdir: "Path", extension: str) -> None:
assert response.headers["content-type"].startswith(expected_mime_type)
[email protected]("extension", ["gz", "br"])
+def test_static_files_response_encoding(tmp_path: "Path", extension: str) -> None:
+ fn = f"test.js.{extension}"
+ path = tmp_path / fn
+ if extension == "br":
+ compressed_data = brotli.compress(b"content")
+ elif extension == "gz":
+ compressed_data = gzip.compress(b"content")
+ path.write_bytes(compressed_data) # pyright: ignore
+ static_files_config = StaticFilesConfig(path="/static", directories=[tmp_path])
+ expected_encoding_type = mimetypes.guess_type(fn)[1]
+
+ with create_test_client([], static_files_config=[static_files_config]) as client:
+ response = client.get(f"/static/{fn}")
+ assert expected_encoding_type
+ assert response.status_code == HTTP_200_OK
+ assert response.headers["content-encoding"].startswith(expected_encoding_type)
+
+
@pytest.mark.parametrize("send_as_attachment,disposition", [(True, "attachment"), (False, "inline")])
def test_static_files_content_disposition(tmpdir: "Path", send_as_attachment: bool, disposition: str) -> None:
path = tmpdir.mkdir("static_part").mkdir("static") / "test.txt" # type: ignore
| Bug: `FileResponse` doesn't set `content-encoding` headers on gzip/brotli compressed files
### Description
When using `StaticFilesConfig` to serve compressed files (think `styles.css.gz`), Litestar will happily serve the file and even properly infer the mimetype, but won't set the correct `content-encoding` header required by the browser.
I will provide a PR with a test.
### URL to code causing the issue
_No response_
### MCVE
_No response_
### Steps to reproduce
_No response_
### Screenshots
```bash
""
```
### Logs
_No response_
### Litestar Version
1.51.10
### Platform
- [X] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| 2023-04-25T15:10:37 |
|
litestar-org/litestar | 1,581 | litestar-org__litestar-1581 | [
"4321",
"1576"
] | a8ee567f600da1e9279ecf4c004cfda75771a390 | diff --git a/starlite/response/file.py b/starlite/response/file.py
--- a/starlite/response/file.py
+++ b/starlite/response/file.py
@@ -1,6 +1,6 @@
from email.utils import formatdate
from inspect import iscoroutine
-from mimetypes import guess_type
+from mimetypes import encodings_map, guess_type
from typing import (
TYPE_CHECKING,
Any,
@@ -38,6 +38,9 @@
)
from starlite.types.file_types import FileInfo, FileSystemProtocol
+# brotli not supported in 'mimetypes.encodings_map' until py 3.9.
+encodings_map[".br"] = "br"
+
async def async_file_iterator(
file_path: "PathType", chunk_size: int, adapter: "FileSystemAdapter"
@@ -133,8 +136,11 @@ def __init__(
file_info: The output of calling ``file_system.info(..)``, equivalent to providing a ``stat_result``.
"""
if not media_type:
- mimetype, _ = guess_type(filename) if filename else (None, None)
+ mimetype, content_encoding = guess_type(filename) if filename else (None, None)
media_type = mimetype or "application/octet-stream"
+ if content_encoding is not None:
+ headers = headers or {}
+ headers.update({"content-encoding": content_encoding})
self.chunk_size = chunk_size
self.content_disposition_type = content_disposition_type
| diff --git a/tests/plugins/piccolo_orm/endpoints.py b/tests/plugins/piccolo_orm/endpoints.py
--- a/tests/plugins/piccolo_orm/endpoints.py
+++ b/tests/plugins/piccolo_orm/endpoints.py
@@ -5,7 +5,7 @@
from starlite import get, post
from tests.plugins.piccolo_orm.tables import Concert, RecordingStudio, Venue
-studio = cast("RecordingStudio", ModelBuilder.build_sync(RecordingStudio, persist=False))
+studio = ModelBuilder.build_sync(RecordingStudio, persist=False)
venues = cast("List[Venue]", [ModelBuilder.build_sync(Venue, persist=False) for _ in range(3)])
diff --git a/tests/static_files/test_file_serving_resolution.py b/tests/static_files/test_file_serving_resolution.py
--- a/tests/static_files/test_file_serving_resolution.py
+++ b/tests/static_files/test_file_serving_resolution.py
@@ -1,6 +1,8 @@
+import gzip
import mimetypes
from typing import TYPE_CHECKING
+import brotli
import pytest
from fsspec.implementations.local import LocalFileSystem
@@ -162,6 +164,25 @@ def test_static_files_response_mimetype(tmpdir: "Path", extension: str) -> None:
assert response.headers["content-type"].startswith(expected_mime_type)
[email protected]("extension", ["gz", "br"])
+def test_static_files_response_encoding(tmp_path: "Path", extension: str) -> None:
+ fn = f"test.js.{extension}"
+ path = tmp_path / fn
+ if extension == "br":
+ compressed_data = brotli.compress(b"content")
+ elif extension == "gz":
+ compressed_data = gzip.compress(b"content")
+ path.write_bytes(compressed_data) # pyright: ignore
+ static_files_config = StaticFilesConfig(path="/static", directories=[tmp_path])
+ expected_encoding_type = mimetypes.guess_type(fn)[1]
+
+ with create_test_client([], static_files_config=[static_files_config]) as client:
+ response = client.get(f"/static/{fn}")
+ assert expected_encoding_type
+ assert response.status_code == HTTP_200_OK
+ assert response.headers["content-encoding"].startswith(expected_encoding_type)
+
+
@pytest.mark.parametrize("send_as_attachment,disposition", [(True, "attachment"), (False, "inline")])
def test_static_files_content_disposition(tmpdir: "Path", send_as_attachment: bool, disposition: str) -> None:
path = tmpdir.mkdir("static_part").mkdir("static") / "test.txt" # type: ignore
| Bug: `FileResponse` doesn't set `content-encoding` headers on gzip/brotli compressed files
### Description
When using `StaticFilesConfig` to serve compressed files (think `styles.css.gz`), Litestar will happily serve the file and even properly infer the mimetype, but won't set the correct `content-encoding` header required by the browser.
I will provide a PR with a test.
### URL to code causing the issue
_No response_
### MCVE
_No response_
### Steps to reproduce
_No response_
### Screenshots
```bash
""
```
### Logs
_No response_
### Litestar Version
1.51.10
### Platform
- [X] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| 2023-04-26T12:23:21 |
|
litestar-org/litestar | 1,582 | litestar-org__litestar-1582 | [
"4321",
"1234"
] | 4a3d05d31383af5cf53bfef01d69fca1540df92d | diff --git a/litestar/contrib/sqlalchemy/base.py b/litestar/contrib/sqlalchemy/base.py
--- a/litestar/contrib/sqlalchemy/base.py
+++ b/litestar/contrib/sqlalchemy/base.py
@@ -24,7 +24,15 @@
if TYPE_CHECKING:
from sqlalchemy.sql import FromClause
-__all__ = ("AuditBase", "AuditColumns", "Base", "CommonTableAttributes", "UUIDPrimaryKey", "touch_updated_timestamp")
+__all__ = (
+ "AuditBase",
+ "AuditColumns",
+ "Base",
+ "CommonTableAttributes",
+ "ModelProtocol",
+ "touch_updated_timestamp",
+ "UUIDPrimaryKey",
+)
BaseT = TypeVar("BaseT", bound="Base")
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-26T12:42:37 |
|
litestar-org/litestar | 1,595 | litestar-org__litestar-1595 | [
"4321",
"1234"
] | 9d81c78c356823da02fbc98b4a9fb353a0455435 | diff --git a/litestar/dto/factory/_backends/abc.py b/litestar/dto/factory/_backends/abc.py
--- a/litestar/dto/factory/_backends/abc.py
+++ b/litestar/dto/factory/_backends/abc.py
@@ -10,7 +10,13 @@
from litestar._signature.field import SignatureField
from .types import NestedFieldDefinition, TransferFieldDefinition
-from .utils import RenameStrategies, build_annotation_for_backend, get_model_type, should_exclude_field
+from .utils import (
+ RenameStrategies,
+ build_annotation_for_backend,
+ generate_reverse_name_map,
+ get_model_type,
+ should_exclude_field,
+)
if TYPE_CHECKING:
from typing import AbstractSet, Any, Callable, Final, Generator
@@ -89,9 +95,7 @@ def __init__(self, context: BackendContext) -> None:
"""
self.context = context
self.parsed_field_definitions = self.parse_model(context.model_type, context.config.exclude)
- self.reverse_name_map = {
- f.serialization_name: f.name for f in self.parsed_field_definitions.values() if f.serialization_name
- }
+ self.reverse_name_map = generate_reverse_name_map(self.parsed_field_definitions)
self.data_container_type = self.create_data_container_type(context)
self.annotation = build_annotation_for_backend(context.parsed_type.annotation, self.data_container_type)
diff --git a/litestar/dto/factory/_backends/msgspec/utils.py b/litestar/dto/factory/_backends/msgspec/utils.py
--- a/litestar/dto/factory/_backends/msgspec/utils.py
+++ b/litestar/dto/factory/_backends/msgspec/utils.py
@@ -144,22 +144,23 @@ def _build_struct_from_model(model: Any, struct_type: type[StructT], reverse_nam
model_name = reverse_name_map.get(key, key)
model_val = getattr(model, model_name)
if parsed_type.is_subclass_of(Struct):
- data[key] = _build_struct_from_model(model_val, parsed_type.annotation, {})
+ data[key] = _build_struct_from_model(model_val, parsed_type.annotation, reverse_name_map)
elif parsed_type.is_union:
- data[key] = _handle_union_type(parsed_type, model_val)
+ data[key] = _handle_union_type(parsed_type, model_val, reverse_name_map)
elif parsed_type.is_collection:
- data[key] = _handle_collection_type(parsed_type, model_val)
+ data[key] = _handle_collection_type(parsed_type, model_val, reverse_name_map)
else:
data[key] = model_val
return struct_type(**data)
-def _handle_union_type(parsed_type: ParsedType, model_val: Any) -> Any:
+def _handle_union_type(parsed_type: ParsedType, model_val: Any, reverse_name_map: dict[str, str]) -> Any:
"""Handle union type.
Args:
parsed_type: Parsed type.
model_val: Model value.
+ reverse_name_map: reverse name map for field definitions.
Returns:
Model value.
@@ -172,22 +173,25 @@ def _handle_union_type(parsed_type: ParsedType, model_val: Any) -> Any:
# for the nested model type instance. For the most likely case of an optional union of a single
# nested type, this should be sufficient.
try:
- return _build_struct_from_model(model_val, inner_type.annotation, {})
+ return _build_struct_from_model(model_val, inner_type.annotation, reverse_name_map)
except (AttributeError, TypeError):
continue
return model_val
-def _handle_collection_type(parsed_type: ParsedType, model_val: Any) -> Any:
+def _handle_collection_type(parsed_type: ParsedType, model_val: Any, reverse_name_map: dict[str, str]) -> Any:
"""Handle collection type.
Args:
parsed_type: Parsed type.
model_val: Model value.
+ reverse_name_map: reverse name map for field definitions.
Returns:
Model value.
"""
if parsed_type.inner_types and (inner_type := parsed_type.inner_types[0]).is_subclass_of(Struct):
- return parsed_type.origin(_build_struct_from_model(m, inner_type.annotation, {}) for m in model_val)
+ return parsed_type.origin(
+ _build_struct_from_model(m, inner_type.annotation, reverse_name_map) for m in model_val
+ )
return model_val
diff --git a/litestar/dto/factory/_backends/utils.py b/litestar/dto/factory/_backends/utils.py
--- a/litestar/dto/factory/_backends/utils.py
+++ b/litestar/dto/factory/_backends/utils.py
@@ -8,12 +8,16 @@
from litestar.types.builtin_types import NoneType
from litestar.utils.signature import ParsedType
+from .types import TransferFieldDefinition
+
if TYPE_CHECKING:
from typing import AbstractSet, Any, Iterable
from litestar.dto.factory.types import FieldDefinition, RenameStrategy
from litestar.dto.types import ForType
+ from .types import FieldDefinitionsType, NestedFieldDefinition
+
__all__ = (
"RenameStrategies",
"build_annotation_for_backend",
@@ -123,3 +127,22 @@ def _camelize(string: str, capitalize_first_letter: bool = False) -> str:
word if index == 0 and not capitalize_first_letter else word.capitalize()
for index, word in enumerate(string.split("_"))
)
+
+
+def generate_reverse_name_map(field_definitions: FieldDefinitionsType) -> dict[str, str]:
+ result = {}
+ for field_definition in field_definitions.values():
+ result.update(_generate_reverse_name_map(field_definition))
+
+ return result
+
+
+def _generate_reverse_name_map(field_definition: TransferFieldDefinition | NestedFieldDefinition) -> dict[str, str]:
+ if isinstance(field_definition, TransferFieldDefinition):
+ return (
+ {field_definition.serialization_name: field_definition.name} if field_definition.serialization_name else {}
+ )
+
+ return generate_reverse_name_map(
+ {field_definition.name: field_definition.field_definition, **field_definition.nested_field_definitions}
+ )
| diff --git a/tests/contrib/sqlalchemy/test_dto_integration.py b/tests/contrib/sqlalchemy/test_dto_integration.py
new file mode 100644
--- /dev/null
+++ b/tests/contrib/sqlalchemy/test_dto_integration.py
@@ -0,0 +1,136 @@
+from dataclasses import dataclass
+from typing import Any, Callable, Dict, List, Tuple
+
+import pytest
+from sqlalchemy import ForeignKey, String
+from sqlalchemy.orm import DeclarativeBase, Mapped, declared_attr, mapped_column, relationship
+from typing_extensions import Annotated
+
+from litestar import get, post
+from litestar.contrib.sqlalchemy.dto import SQLAlchemyDTO
+from litestar.dto.factory import DTOConfig
+from litestar.dto.factory._backends.utils import RenameStrategies
+from litestar.dto.factory.types import RenameStrategy
+from litestar.testing import create_test_client
+
+
+class Base(DeclarativeBase):
+ id: Mapped[str] = mapped_column(primary_key=True)
+
+ # noinspection PyMethodParameters
+ @declared_attr.directive
+ def __tablename__(cls) -> str: # pylint: disable=no-self-argument
+ """Infer table name from class name."""
+ return cls.__name__.lower()
+
+
+class Author(Base):
+ name: Mapped[str] = mapped_column(default="Arthur")
+ date_of_birth: Mapped[str] = mapped_column(nullable=True)
+
+
+class BookReview(Base):
+ review: Mapped[str]
+ book_id: Mapped[str] = mapped_column(ForeignKey("book.id"), default="000")
+
+
+class Book(Base):
+ title: Mapped[str] = mapped_column(String(length=250), default="Hi")
+ author_id: Mapped[str] = mapped_column(ForeignKey("author.id"), default="123")
+ first_author: Mapped[Author] = relationship(lazy="joined", innerjoin=True)
+ reviews: Mapped[List[BookReview]] = relationship(lazy="joined", innerjoin=True)
+ bar: Mapped[str] = mapped_column(default="Hello")
+ SPAM: Mapped[str] = mapped_column(default="Bye")
+ spam_bar: Mapped[str] = mapped_column(default="Goodbye")
+
+
+@dataclass
+class BookAuthorTestData:
+ book_id: str = "000"
+ book_title: str = "TDD Python"
+ book_author_id: str = "123"
+ book_author_name: str = "Harry Percival"
+ book_author_date_of_birth: str = "01/01/1900"
+ book_bar: str = "Hi"
+ book_SPAM: str = "Bye"
+ book_spam_bar: str = "GoodBye"
+ book_review_id: str = "23432"
+ book_review: str = "Excellent!"
+
+
[email protected]
+def book_json_data() -> Callable[[RenameStrategy, BookAuthorTestData], Tuple[Dict[str, Any], Book]]:
+ def _generate(rename_strategy: RenameStrategy, test_data: BookAuthorTestData) -> Tuple[Dict[str, Any], Book]:
+ data: Dict[str, Any] = {
+ RenameStrategies(rename_strategy)("id"): test_data.book_id,
+ RenameStrategies(rename_strategy)("title"): test_data.book_title,
+ RenameStrategies(rename_strategy)("author_id"): test_data.book_author_id,
+ RenameStrategies(rename_strategy)("bar"): test_data.book_bar,
+ RenameStrategies(rename_strategy)("SPAM"): test_data.book_SPAM,
+ RenameStrategies(rename_strategy)("spam_bar"): test_data.book_spam_bar,
+ RenameStrategies(rename_strategy)("first_author"): {
+ RenameStrategies(rename_strategy)("id"): test_data.book_author_id,
+ RenameStrategies(rename_strategy)("name"): test_data.book_author_name,
+ RenameStrategies(rename_strategy)("date_of_birth"): test_data.book_author_date_of_birth,
+ },
+ RenameStrategies(rename_strategy)("reviews"): [
+ {
+ RenameStrategies(rename_strategy)("book_id"): test_data.book_id,
+ RenameStrategies(rename_strategy)("id"): test_data.book_review_id,
+ RenameStrategies(rename_strategy)("review"): test_data.book_review,
+ }
+ ],
+ }
+ book = Book(
+ id=test_data.book_id,
+ title=test_data.book_title,
+ author_id=test_data.book_author_id,
+ bar=test_data.book_bar,
+ SPAM=test_data.book_SPAM,
+ spam_bar=test_data.book_spam_bar,
+ first_author=Author(
+ id=test_data.book_author_id,
+ name=test_data.book_author_name,
+ date_of_birth=test_data.book_author_date_of_birth,
+ ),
+ reviews=[
+ BookReview(id=test_data.book_review_id, review=test_data.book_review, book_id=test_data.book_id),
+ ],
+ )
+ return data, book
+
+ return _generate
+
+
[email protected](
+ "rename_strategy",
+ [
+ ("camel"),
+ ],
+)
+def test_fields_alias_generator_sqlalchemy(
+ rename_strategy: RenameStrategy,
+ book_json_data: Callable[[RenameStrategy, BookAuthorTestData], Tuple[Dict[str, Any], Book]],
+) -> None:
+ test_data = BookAuthorTestData()
+ json_data, instance = book_json_data(rename_strategy, test_data)
+ config = DTOConfig(rename_strategy=rename_strategy)
+ dto = SQLAlchemyDTO[Annotated[Book, config]]
+
+ @post(dto=dto, signature_namespace={"Book": Book})
+ def post_handler(data: Book) -> Book:
+ return data
+
+ @get(dto=dto, signature_namespace={"Book": Book})
+ def get_handler() -> Book:
+ return instance
+
+ with create_test_client(
+ route_handlers=[post_handler, get_handler],
+ debug=True,
+ ) as client:
+ response_callback = client.get("/")
+ assert response_callback.json() == json_data
+
+ response_callback = client.post("/", json=json_data)
+ assert response_callback.json() == json_data
diff --git a/tests/dto/factory/test_integration.py b/tests/dto/factory/test_integration.py
--- a/tests/dto/factory/test_integration.py
+++ b/tests/dto/factory/test_integration.py
@@ -1,6 +1,8 @@
+# ruff: noqa: UP007
from __future__ import annotations
from dataclasses import dataclass, field
+from typing import Optional
import pytest
from typing_extensions import Annotated
@@ -76,11 +78,17 @@ def handler(data: Foo) -> Foo:
assert response.json() == {"baz": "hello"}
+@dataclass
+class Spam:
+ main_id: str = "spam-id"
+
+
@dataclass
class Foo:
bar: str = "hello"
SPAM: str = "bye"
spam_bar: str = "welcome"
+ spam_model: Optional[Spam] = None
@pytest.mark.parametrize(
@@ -91,6 +99,7 @@ class Foo:
(lambda x: x[::-1], Foo(bar="h", SPAM="bye!"), ["rab", "MAPS"], {"rab": "h", "MAPS": "bye!"}),
("camel", Foo(spam_bar="star"), ["spamBar"], {"spamBar": "star"}),
("pascal", Foo(spam_bar="star"), ["SpamBar"], {"SpamBar": "star"}),
+ ("camel", Foo(spam_model=Spam()), ["spamModel"], {"spamModel": {"mainId": "spam-id"}}),
],
)
def test_fields_alias_generator(
@@ -108,11 +117,6 @@ def handler(data: Foo) -> Foo:
assert data.SPAM == instance.SPAM
return data
- with create_test_client(
- route_handlers=[
- handler,
- ],
- debug=True,
- ) as client:
+ with create_test_client(route_handlers=[handler], debug=True) as client:
response_callback = client.post("/", json=data)
assert all([response_callback.json()[f] == data[f] for f in tested_fields])
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-04-29T00:08:06 |
litestar-org/litestar | 1,600 | litestar-org__litestar-1600 | [
"4321",
"1234"
] | a8d4e6f920e81f752caccf2d981cc01d3247a27c | diff --git a/starlite/plugins/sql_alchemy/plugin.py b/starlite/plugins/sql_alchemy/plugin.py
--- a/starlite/plugins/sql_alchemy/plugin.py
+++ b/starlite/plugins/sql_alchemy/plugin.py
@@ -77,9 +77,9 @@ def on_app_init(self, app: "Starlite") -> None:
if self._config is not None:
app.dependencies[self._config.dependency_key] = Provide(self._config.create_db_session_dependency)
app.before_send.append(self._config.before_send_handler) # type: ignore[arg-type]
+ app.on_startup.append(self._config.update_app_state)
app.on_shutdown.append(self._config.on_shutdown)
self._config.config_sql_alchemy_logging(app.logging_config)
- self._config.update_app_state(state=app.state)
@staticmethod
def is_plugin_supported_type(value: Any) -> "TypeGuard[DeclarativeMeta]":
| diff --git a/tests/plugins/sql_alchemy_plugin/test_sql_alchemy_config.py b/tests/plugins/sql_alchemy_plugin/test_sql_alchemy_config.py
--- a/tests/plugins/sql_alchemy_plugin/test_sql_alchemy_config.py
+++ b/tests/plugins/sql_alchemy_plugin/test_sql_alchemy_config.py
@@ -23,17 +23,17 @@
@pytest.mark.parametrize("connection_string", ["sqlite+aiosqlite://", "sqlite://"])
def test_sets_engine_and_session_maker(connection_string: str) -> None:
config = SQLAlchemyConfig(connection_string=connection_string, use_async_engine="+aiosqlite" in connection_string)
- app = Starlite(route_handlers=[], plugins=[SQLAlchemyPlugin(config=config)])
- assert app.state.get(config.engine_app_state_key)
- assert app.state.get(config.session_maker_app_state_key)
+ with create_test_client([], plugins=[SQLAlchemyPlugin(config=config)]) as client:
+ assert client.app.state.get(config.engine_app_state_key)
+ assert client.app.state.get(config.session_maker_app_state_key)
@pytest.mark.parametrize("connection_string", ["sqlite+aiosqlite://", "sqlite://"])
def test_dependency_creates_session(connection_string: str) -> None:
config = SQLAlchemyConfig(connection_string=connection_string, use_async_engine="+aiosqlite" in connection_string)
- app = Starlite(route_handlers=[], plugins=[SQLAlchemyPlugin(config=config)])
- request = RequestFactory().get()
- session = config.create_db_session_dependency(state=app.state, scope=request.scope)
+ with create_test_client([], plugins=[SQLAlchemyPlugin(config=config)]) as client:
+ request = RequestFactory().get()
+ session = config.create_db_session_dependency(state=client.app.state, scope=request.scope)
assert session
assert request.scope[SESSION_SCOPE_KEY] # type: ignore
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-01T16:00:21 |
litestar-org/litestar | 1,605 | litestar-org__litestar-1605 | [
"4321",
"1234"
] | 37e4f9ccbf691b10ba84740f8665c090f867a250 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -64,7 +64,7 @@
suppress_warnings = ["autosectionlabel.*"]
-html_theme = "starlite_sphinx_theme"
+html_theme = "litestar_sphinx_theme"
html_static_path = ["_static"]
html_js_files = ["versioning.js"]
html_css_files = ["style.css"]
@@ -74,23 +74,41 @@
html_theme_options = {
"use_page_nav": False,
- "github_repo_name": "starlite",
+ "github_repo_name": "litestar",
"logo": {
- "link": "https://starliteproject.dev",
+ "link": "https://litestar.dev",
},
"extra_navbar_items": {
"Documentation": "index",
"Community": {
- "Contribution Guide": "contribution-guide",
- "Code of Conduct": "https://github.com/starlite-api/.github/blob/main/CODE_OF_CONDUCT.md",
+ "Contributing": {
+ "description": "Learn how to contribute to the Litestar project",
+ "link": "https://docs.litestar.dev/2/contribution-guide.html",
+ "icon": "contributing",
+ },
+ "Code of Conduct": {
+ "description": "Review the etiquette for interacting with the Litestar community",
+ "link": "https://github.com/litestar-org/.github/blob/main/CODE_OF_CONDUCT.md",
+ "icon": "coc",
+ },
},
"About": {
- "Organization": "https://starliteproject.dev/about/organization",
- "Releases": "https://starliteproject.dev/about/starlite-releases",
+ "Litestar Organization": {
+ "description": "Details about the Litestar organization",
+ "link": "about/organization",
+ "icon": "org",
+ },
+ "Releases": {
+ "description": "Details about the Litestar release process",
+ "link": "about/litestar-releases",
+ "icon": "releases",
+ },
},
"Release notes": {
- "1.x Changelog": "https://docs.starliteproject.dev/1/release-notes/changelog.html",
+ "2.x Changelog": "https://docs.litestar.dev/2/release-notes/changelog.html",
+ "1.x Changelog": "https://docs.litestar.dev/1/release-notes/changelog.html",
},
+ "Help": "https://github.com/orgs/litestar-org/discussions",
},
}
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-02T08:06:56 |
|
litestar-org/litestar | 1,610 | litestar-org__litestar-1610 | [
"4321",
"1234"
] | a9ba76e1b67031a527cfef5f274b5b239c19466b | diff --git a/litestar/connection/base.py b/litestar/connection/base.py
--- a/litestar/connection/base.py
+++ b/litestar/connection/base.py
@@ -287,7 +287,7 @@ def clear_session(self) -> None:
"""
self.scope["session"] = Empty
- def url_for(self, name: str, **path_parameters: dict[str, Any]) -> str:
+ def url_for(self, name: str, **path_parameters: Any) -> str:
"""Return the url for a given route handler name.
Args:
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-03T04:59:35 |
|
litestar-org/litestar | 1,618 | litestar-org__litestar-1618 | [
"4321",
"1586"
] | 1d9ee3337c3fcad2cf902e8ef87403c276c0df72 | diff --git a/litestar/contrib/repository/handlers.py b/litestar/contrib/repository/handlers.py
new file mode 100644
--- /dev/null
+++ b/litestar/contrib/repository/handlers.py
@@ -0,0 +1,22 @@
+from typing import TYPE_CHECKING
+
+from litestar.contrib.repository.filters import BeforeAfter, CollectionFilter, LimitOffset, OrderBy, SearchFilter
+
+if TYPE_CHECKING:
+ from litestar.config.app import AppConfig
+
+
+signature_namespace_values = {
+ "BeforeAfter": BeforeAfter,
+ "CollectionFilter": CollectionFilter,
+ "LimitOffset": LimitOffset,
+ "OrderBy": OrderBy,
+ "SearchFilter": SearchFilter,
+}
+
+
+def on_app_init(app_config: "AppConfig") -> "AppConfig":
+ """Add custom filters for the application during signature modelling."""
+
+ app_config.signature_namespace.update(signature_namespace_values)
+ return app_config
| diff --git a/tests/contrib/repository/test_handlers.py b/tests/contrib/repository/test_handlers.py
new file mode 100644
--- /dev/null
+++ b/tests/contrib/repository/test_handlers.py
@@ -0,0 +1,15 @@
+from litestar.app import Litestar
+from litestar.contrib.repository import handlers
+from litestar.contrib.repository.filters import BeforeAfter, CollectionFilter, LimitOffset, OrderBy, SearchFilter
+
+
+def test_app_debug_create_logger() -> None:
+ app = Litestar([], on_app_init=[handlers.on_app_init])
+
+ assert app.signature_namespace == {
+ "BeforeAfter": BeforeAfter,
+ "CollectionFilter": CollectionFilter,
+ "LimitOffset": LimitOffset,
+ "OrderBy": OrderBy,
+ "SearchFilter": SearchFilter,
+ }
| Enhancement: `contrib.respository.on_app_init()` handler
### Summary
Callback to add to `on_app_init` that adds repository names to signature namespace.
### Basic Example
In `starlite-pg-redis-docker` I init the app with:
```py
signature_namespace={
"AsyncSession": AsyncSession,
"FilterTypes": FilterTypes,
"BeforeAfter": BeforeAfter,
"CollectionFilter": CollectionFilter,
"LimitOffset": LimitOffset,
"UUID": UUID,
"OrderBy": OrderBy,
"SearchFilter": SearchFilter,
},
```
These are names of things that would commonly be declared as handler or model parameter types when using the repository that are likely to get linted into an `if TYPE_CHECKING` block if using `__future__.annotations`.
E.g.,
```py
app = Litestar(..., on_app_init=[contrib.repository.on_app_init], ...)
```
### Drawbacks and Impact
It is opt in, so won't be any backward incompatible behavior.
A drawback is that it is a bit magical, but if we document it well, it might also be a good example of how to use the `signature_namespace` feature.
### Unresolved questions
_No response_
| This could also be a method on the base repository which might make sense if we want to be able to customize behavior to each repository type. Would also probably make sense to make them an implementation of `InitPluginProtocol` in that case also. E.g.,
```py
app = Litestar(..., plugins=[contrib.sqlalchemy.repository.SQLAlchemyAsyncRepository.InitPlugin]
```
Weather the repository was actually also the plugin protocol or owns one is up for debate in this pattern I guess.
I think this is good, no issues from my end
Hi guys, could I work on that?
I'm going to add extra repo names in `signature_namespace` function in each config (`SQLAlchemyAsyncConfig`, `SQLAlchemySyncConfig`), or implement the common ones in `GenericSQLAlchemyConfig`, any thoughts on that?
> Hi guys, could I work on that?
>
>
>
> I'm going to add extra repo names in `signature_namespace` function in each config (`SQLAlchemyAsyncConfig`, `SQLAlchemySyncConfig`), or implement the common ones in `GenericSQLAlchemyConfig`, any thoughts on that?
>
>
>
>
More than welcome to work on it @sonpro1296!
I'm not sure we want the repository names added via the sqlalchemy configs - my thinking here is that the sqlalchemy plug-in can be used independent of the repository pattern and I don't know that we want to unnecessarily populate the namespace in that case. Happy to discuss.
That makes sense, but if that's the case, I think we will have to check if repository pattern is being used in order to add the namespace. I'm not sure it's a good idea (because I don't know how xD), compare to adding via configs.
It should just be something that the user opts into IMO.
E.g.,
```py
from litestar.contrib import repository
app = Litestar(..., on_app_init=[repository.on_app_init])
``` | 2023-05-04T03:16:33 |
litestar-org/litestar | 1,625 | litestar-org__litestar-1625 | [
"4321",
"1234"
] | eb0f0115c05aa019991130ff77b1ede03da76d39 | diff --git a/litestar/handlers/websocket_handlers/_utils.py b/litestar/handlers/websocket_handlers/_utils.py
--- a/litestar/handlers/websocket_handlers/_utils.py
+++ b/litestar/handlers/websocket_handlers/_utils.py
@@ -4,11 +4,12 @@
from typing import TYPE_CHECKING, Any, Callable, Coroutine, cast
from litestar.dto.interface import ConnectionContext
-from litestar.exceptions import WebSocketDisconnect
from litestar.serialization import decode_json
from litestar.utils import AsyncCallable
if TYPE_CHECKING:
+ from contextlib import AbstractAsyncContextManager
+
from msgspec.json import Encoder as JsonEncoder
from litestar import WebSocket
@@ -111,34 +112,26 @@ async def handle_send(socket: WebSocket, data_to_send: Any, dto: DTOInterface |
def create_handler_function(
listener_context: ListenerContext,
- on_accept: AsyncCallable | None,
- on_disconnect: AsyncCallable | None,
- accept_connection_handler: Callable[[WebSocket], Coroutine[Any, Any, None]],
+ lifespan_manager: Callable[[WebSocket], AbstractAsyncContextManager],
) -> Callable[..., Coroutine[None, None, None]]:
- async def handler_fn(socket: WebSocket, **kwargs: Any) -> None:
- await accept_connection_handler(socket)
+ listener_callback = AsyncCallable(listener_context.listener_callback)
- listener_callback = AsyncCallable(listener_context.listener_callback)
+ async def handler_fn(socket: WebSocket, **kwargs: Any) -> None:
ctx = ConnectionContext.from_connection(socket)
data_dto = listener_context.resolved_data_dto(ctx) if listener_context.resolved_data_dto else None
return_dto = listener_context.resolved_return_dto(ctx) if listener_context.resolved_return_dto else None
-
- if on_accept:
- await on_accept(socket)
+ handle_receive = listener_context.handle_receive
+ handle_send = listener_context.handle_send if listener_context.can_send_data else None
if listener_context.pass_socket:
kwargs["socket"] = socket
- while True:
- try:
- received_data = await listener_context.handle_receive(socket, data_dto)
+ async with lifespan_manager(socket):
+ while True:
+ received_data = await handle_receive(socket, data_dto)
data_to_send = await listener_callback(data=received_data, **kwargs)
- if listener_context.can_send_data:
- await listener_context.handle_send(socket, data_to_send, return_dto)
- except WebSocketDisconnect:
- if on_disconnect:
- await on_disconnect(socket)
- break
+ if handle_send:
+ await handle_send(socket, data_to_send, return_dto)
return handler_fn
diff --git a/litestar/handlers/websocket_handlers/listener.py b/litestar/handlers/websocket_handlers/listener.py
--- a/litestar/handlers/websocket_handlers/listener.py
+++ b/litestar/handlers/websocket_handlers/listener.py
@@ -1,8 +1,17 @@
from __future__ import annotations
+import inspect
from abc import ABC, abstractmethod
+from contextlib import AbstractAsyncContextManager, asynccontextmanager
from functools import partial
-from typing import TYPE_CHECKING, Any, Callable, Mapping, cast
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ AsyncGenerator,
+ Callable,
+ Mapping,
+ cast,
+)
from msgspec.json import Encoder as JsonEncoder
@@ -36,10 +45,8 @@
from litestar.dto.interface import DTOInterface
from litestar.types.asgi_types import WebSocketMode
-__all__ = (
- "WebsocketListener",
- "websocket_listener",
-)
+
+__all__ = ("WebsocketListener", "websocket_listener")
class websocket_listener(WebsocketRouteHandler):
@@ -48,21 +55,23 @@ class websocket_listener(WebsocketRouteHandler):
returned
"""
- __slots__ = (
- "_on_accept",
- "_on_disconnect",
- "_pass_socket",
- "_receive_mode",
- "_send_mode",
- "_listener_context",
- "accept_connection_handler",
- )
+ __slots__ = {
+ "connection_accept_handler": "Callback to accept a WebSocket connection. By default, calls WebSocket.accept",
+ "on_accept": "Callback invoked after a WebSocket connection has been accepted",
+ "on_disconnect": "Callback invoked after a WebSocket connection has been closed",
+ "_pass_socket": None,
+ "_receive_mode": None,
+ "_send_mode": None,
+ "_listener_context": None,
+ "_connection_lifespan": None,
+ }
def __init__(
self,
path: str | None | list[str] | None = None,
*,
connection_accept_handler: Callable[[WebSocket], Coroutine[Any, Any, None]] = WebSocket.accept,
+ connection_lifespan: Callable[[WebSocket], AbstractAsyncContextManager[Any]] | None = None,
dependencies: Dependencies | None = None,
dto: type[DTOInterface] | None | EmptyType = Empty,
exception_handlers: dict[int | type[Exception], ExceptionHandler] | None = None,
@@ -86,6 +95,8 @@ def __init__(
to ``/``
connection_accept_handler: A callable that accepts a :class:`WebSocket <.connection.WebSocket>` instance
and returns a coroutine that when awaited, will accept the connection. Defaults to ``WebSocket.accept``.
+ connection_lifespan: An asynchronous context manager, handling the lifespan of the connection. By default,
+ it calls the ``connection_accept_handler``, ``on_connect`` and ``on_disconnect``.
dependencies: A string keyed mapping of dependency :class:`Provider <.di.Provide>` instances.
dto: :class:`DTOInterface <.dto.interface.DTOInterface>` to use for (de)serializing and
validation of request data.
@@ -112,9 +123,11 @@ def __init__(
self._listener_context = _utils.ListenerContext()
self._receive_mode: WebSocketMode = receive_mode
self._send_mode: WebSocketMode = send_mode
- self._on_accept = AsyncCallable(on_accept) if on_accept else None
- self._on_disconnect = AsyncCallable(on_disconnect) if on_disconnect else None
- self.accept_connection_handler = connection_accept_handler
+ self._connection_lifespan = connection_lifespan
+
+ self.connection_accept_handler = connection_accept_handler
+ self.on_accept = AsyncCallable(on_accept) if on_accept else None
+ self.on_disconnect = AsyncCallable(on_disconnect) if on_disconnect else None
self.type_encoders = type_encoders
super().__init__(
@@ -133,6 +146,26 @@ def __init__(
self.dto = dto
self.return_dto = return_dto
+ @asynccontextmanager
+ async def default_connection_lifespan(self, socket: WebSocket) -> AsyncGenerator[None, None]:
+ """Handle the connection lifespan of a WebSocket.
+
+ By, default this will
+
+ - Call :attr:`connection_accept_handler` to accept a connection
+ - Call :attr:`on_accept` if defined after a connection has been accepted
+ - Call :attr:`on_disconnect` upon leaving the context
+ """
+ await self.connection_accept_handler(socket)
+
+ if self.on_accept:
+ await self.on_accept(socket)
+ try:
+ yield
+ finally:
+ if self.on_disconnect:
+ await self.on_disconnect(socket)
+
def _validate_handler_function(self) -> None:
"""Validate the route handler function once it's set by inspecting its return annotations."""
# since none of the validation rules of WebsocketRouteHandler apply here, this is let empty. Validation of the
@@ -152,9 +185,7 @@ def __call__(self, listener_callback: AnyCallable) -> websocket_listener:
self._listener_context.listener_callback = listener_callback
self._listener_context.handler_function = handler_function = _utils.create_handler_function(
listener_context=self._listener_context,
- on_accept=self._on_accept,
- on_disconnect=self._on_disconnect,
- accept_connection_handler=self.accept_connection_handler,
+ lifespan_manager=self._connection_lifespan or self.default_connection_lifespan,
)
return super().__call__(handler_function)
@@ -165,6 +196,12 @@ def on_registration(self, app: Litestar) -> None:
def _create_signature_model(self, app: Litestar) -> None:
"""Create signature model for handler function."""
if not self.signature_model:
+ extra_signatures = []
+ if self.on_accept:
+ extra_signatures.append(inspect.signature(self.on_accept))
+ if self.on_disconnect:
+ extra_signatures.append(inspect.signature(self.on_disconnect))
+
new_signature = _utils.create_handler_signature(
self._listener_context.listener_callback_signature.original_signature
)
| diff --git a/tests/handlers/websocket/test_listeners.py b/tests/handlers/websocket/test_listeners.py
--- a/tests/handlers/websocket/test_listeners.py
+++ b/tests/handlers/websocket/test_listeners.py
@@ -1,5 +1,6 @@
+from contextlib import asynccontextmanager
from dataclasses import dataclass, field
-from typing import Dict, List, Optional, Type, Union, cast
+from typing import AsyncGenerator, Dict, List, Optional, Type, Union, cast
from unittest.mock import MagicMock
import pytest
@@ -275,3 +276,43 @@ def handler(data: bytes) -> None:
client = create_test_client([handler])
with client.websocket_connect("/") as ws:
assert ws.extra_headers == [(b"cookie", b"custom-cookie")]
+
+
+def test_connection_callbacks() -> None:
+ on_accept = MagicMock()
+ on_disconnect = MagicMock()
+
+ @websocket_listener("/", on_accept=on_accept, on_disconnect=on_disconnect)
+ def handler(data: bytes) -> None:
+ pass
+
+ client = create_test_client([handler])
+ with client.websocket_connect("/"):
+ pass
+
+ on_accept.assert_called_once()
+ on_disconnect.assert_called_once()
+
+
+def test_connection_lifespan() -> None:
+ on_accept = MagicMock()
+ on_disconnect = MagicMock()
+
+ @asynccontextmanager
+ async def lifespan(socket: WebSocket) -> AsyncGenerator[None, None]:
+ on_accept(socket)
+ try:
+ yield
+ finally:
+ on_disconnect(socket)
+
+ @websocket_listener("/", connection_lifespan=lifespan)
+ def handler(data: bytes) -> None:
+ pass
+
+ client = create_test_client([handler])
+ with client.websocket_connect("/", timeout=1):
+ pass
+
+ on_accept.assert_called_once()
+ on_disconnect.assert_called_once()
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-07T11:51:24 |
litestar-org/litestar | 1,626 | litestar-org__litestar-1626 | [
"4321",
"1234"
] | e6d668534f6b98b0489c43b64f34b607f8114fa5 | diff --git a/litestar/connection/websocket.py b/litestar/connection/websocket.py
--- a/litestar/connection/websocket.py
+++ b/litestar/connection/websocket.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Any, Generic, Literal, cast, overload
+from typing import TYPE_CHECKING, Any, AsyncGenerator, Generic, Literal, cast, overload
from litestar.connection.base import (
ASGIConnection,
@@ -12,7 +12,7 @@
)
from litestar.datastructures.headers import Headers
from litestar.exceptions import WebSocketDisconnect, WebSocketException
-from litestar.serialization import decode_json, default_serializer, encode_json
+from litestar.serialization import decode_json, decode_msgpack, default_serializer, encode_json, encode_msgpack
from litestar.status_codes import WS_1000_NORMAL_CLOSURE
__all__ = ("WebSocket",)
@@ -29,6 +29,7 @@
WebSocketAcceptEvent,
WebSocketCloseEvent,
WebSocketDisconnectEvent,
+ WebSocketMode,
WebSocketReceiveEvent,
WebSocketSendEvent,
)
@@ -152,7 +153,7 @@ async def receive_data(self, mode: Literal["text"]) -> str:
async def receive_data(self, mode: Literal["binary"]) -> bytes:
...
- async def receive_data(self, mode: Literal["binary", "text"]) -> str | bytes:
+ async def receive_data(self, mode: WebSocketMode) -> str | bytes:
"""Receive an 'websocket.receive' event and returns the data stored on it.
Args:
@@ -170,6 +171,26 @@ async def receive_data(self, mode: Literal["binary", "text"]) -> str | bytes:
raise WebSocketDisconnect(detail=DISCONNECT_MESSAGE) # pragma: no cover
return event.get("text") or "" if mode == "text" else event.get("bytes") or b""
+ @overload
+ def iter_data(self, mode: Literal["text"]) -> AsyncGenerator[str, None]:
+ ...
+
+ @overload
+ def iter_data(self, mode: Literal["binary"]) -> AsyncGenerator[bytes, None]:
+ ...
+
+ async def iter_data(self, mode: WebSocketMode) -> AsyncGenerator[str | bytes, None]:
+ """Continuously receive data and yield it
+
+ Args:
+ mode: Socket mode to use. Either ``text`` or ``binary``
+ """
+ try:
+ while True:
+ yield await self.receive_data(mode)
+ except WebSocketDisconnect:
+ pass
+
async def receive_text(self) -> str:
"""Receive data as text.
@@ -186,11 +207,8 @@ async def receive_bytes(self) -> bytes:
"""
return await self.receive_data(mode="binary")
- async def receive_json(
- self,
- mode: Literal["text", "binary"] = "text",
- ) -> Any:
- """Receive data and loads it into JSON using orson.
+ async def receive_json(self, mode: WebSocketMode = "text") -> Any:
+ """Receive data and decode it as JSON.
Args:
mode: Either ``text`` or ``binary``.
@@ -201,9 +219,39 @@ async def receive_json(
data = await self.receive_data(mode=mode)
return decode_json(data)
- async def send_data(
- self, data: str | bytes, mode: Literal["text", "binary"] = "text", encoding: str = "utf-8"
- ) -> None:
+ async def receive_msgpack(self) -> Any:
+ """Receive data and decode it as MessagePack.
+
+ Note that since MessagePack is a binary format, this method will always receive
+ data in ``binary`` mode.
+
+ Returns:
+ An arbitrary value
+ """
+ data = await self.receive_data(mode="binary")
+ return decode_msgpack(data)
+
+ async def iter_json(self, mode: WebSocketMode) -> AsyncGenerator[Any, None]:
+ """Continuously receive data and yield it, decoding it as JSON in the process.
+
+ Args:
+ mode: Socket mode to use. Either ``text`` or ``binary``
+ """
+ async for data in self.iter_data(mode):
+ yield decode_json(data)
+
+ async def iter_msgpack(self) -> AsyncGenerator[Any, None]:
+ """Continuously receive data and yield it, decoding it as MessagePack in the
+ process.
+
+ Note that since MessagePack is a binary format, this method will always receive
+ data in ``binary`` mode.
+
+ """
+ async for data in self.iter_data(mode="binary"):
+ yield decode_msgpack(data)
+
+ async def send_data(self, data: str | bytes, mode: WebSocketMode = "text", encoding: str = "utf-8") -> None:
"""Send a 'websocket.send' event.
Args:
@@ -266,7 +314,7 @@ async def send_bytes(self, data: str | bytes, encoding: str = "utf-8") -> None:
async def send_json(
self,
data: Any,
- mode: Literal["text", "binary"] = "text",
+ mode: WebSocketMode = "text",
encoding: str = "utf-8",
serializer: Serializer = default_serializer,
) -> None:
@@ -281,8 +329,25 @@ async def send_json(
Returns:
None
"""
- await self.send_data(
- data=encode_json(data, serializer),
- mode=mode,
- encoding=encoding,
- )
+ await self.send_data(data=encode_json(data, serializer), mode=mode, encoding=encoding)
+
+ async def send_msgpack(
+ self,
+ data: Any,
+ encoding: str = "utf-8",
+ serializer: Serializer = default_serializer,
+ ) -> None:
+ """Send data as MessagePack.
+
+ Note that since MessagePack is a binary format, this method will always send
+ data in ``binary`` mode.
+
+ Args:
+ data: A value to serialize.
+ encoding: Encoding to use for binary data.
+ serializer: A serializer function.
+
+ Returns:
+ None
+ """
+ await self.send_data(data=encode_msgpack(data, serializer), mode="binary", encoding=encoding)
| diff --git a/litestar/testing/websocket_test_session.py b/litestar/testing/websocket_test_session.py
--- a/litestar/testing/websocket_test_session.py
+++ b/litestar/testing/websocket_test_session.py
@@ -7,7 +7,7 @@
from anyio import sleep
from litestar.exceptions import WebSocketDisconnect
-from litestar.serialization import decode_json, encode_json
+from litestar.serialization import decode_json, decode_msgpack, encode_json, encode_msgpack
from litestar.status_codes import WS_1000_NORMAL_CLOSURE
if TYPE_CHECKING:
@@ -146,10 +146,21 @@ def send_json(self, data: Any, mode: Literal["text", "binary"] = "text") -> None
mode: Either ``text`` or ``binary``
Returns:
- None.
+ None
"""
self.send(encode_json(data), mode=mode)
+ def send_msgpack(self, data: Any) -> None:
+ """Sends the given data as MessagePack.
+
+ Args:
+ data: The data to send.
+
+ Returns:
+ None
+ """
+ self.send(encode_msgpack(data), mode="binary")
+
def close(self, code: int = WS_1000_NORMAL_CLOSURE) -> None:
"""Sends an 'websocket.disconnect' event.
@@ -232,3 +243,7 @@ def receive_json(
return decode_json(cast("str", message.get("text", "")))
return decode_json(cast("bytes", message.get("bytes", b"")))
+
+ def receive_msgpack(self, block: bool = True, timeout: float | None = None) -> Any:
+ message = self.receive(block=block, timeout=timeout)
+ return decode_msgpack(cast("bytes", message.get("bytes", b"")))
diff --git a/tests/connection/websocket/test_websocket.py b/tests/connection/test_websocket.py
similarity index 70%
rename from tests/connection/websocket/test_websocket.py
rename to tests/connection/test_websocket.py
--- a/tests/connection/websocket/test_websocket.py
+++ b/tests/connection/test_websocket.py
@@ -1,10 +1,11 @@
-"""A large part of the tests in this file were adapted from:
-
-https://github.com/encode/starlette/blob/master/tests/test_websockets.py And are meant to ensure our compatibility with
-their API.
"""
+Some tests in this file were adapted from: https://github.com/encode/starlette/blob/master/tests/test_websockets.py And
+were meant to ensure our compatibility with their API.
+"""
+from __future__ import annotations
-from typing import TYPE_CHECKING, Any, Literal
+from typing import TYPE_CHECKING, Any, AsyncGenerator, Literal
+from unittest.mock import MagicMock
import anyio
import pytest
@@ -15,6 +16,8 @@
from litestar.handlers.websocket_handlers import websocket
from litestar.status_codes import WS_1001_GOING_AWAY
from litestar.testing import TestClient, create_test_client
+from litestar.types.asgi_types import WebSocketMode
+from litestar.utils.compat import async_next
if TYPE_CHECKING:
from litestar.types import Receive, Scope, Send
@@ -70,7 +73,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.scope["called"] = True # type: ignore
- @websocket("/")
+ @websocket("/", signature_namespace={"MyWebSocket": MyWebSocket})
async def handler(socket: MyWebSocket) -> None:
value["called"] = socket.scope.get("called")
await socket.accept()
@@ -81,7 +84,7 @@ async def handler(socket: MyWebSocket) -> None:
def test_websocket_url() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
await websocket.send_json({"url": str(websocket.url)})
@@ -93,7 +96,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_websocket_binary_json() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
message = await websocket.receive_json(mode="binary")
@@ -107,7 +110,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_websocket_query_params() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
query_params = dict(websocket.query_params)
await websocket.accept()
@@ -120,7 +123,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_websocket_headers() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
headers = dict(websocket.headers)
await websocket.accept()
@@ -142,7 +145,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_websocket_port() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
await websocket.send_json({"port": websocket.url.port})
@@ -154,7 +157,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_websocket_send_and_receive_text() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
data = await websocket.receive_text()
@@ -168,7 +171,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_websocket_send_and_receive_bytes() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
data = await websocket.receive_bytes()
@@ -182,7 +185,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_websocket_send_and_receive_json() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
data = await websocket.receive_json()
@@ -195,6 +198,100 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
assert data == {"message": {"hello": "world"}}
+def test_send_msgpack() -> None:
+ test_data = {"message": "hello, world"}
+
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
+ socket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
+ await socket.accept()
+ await socket.send_msgpack(test_data)
+ await socket.close()
+
+ with TestClient(app).websocket_connect("/") as ws:
+ data = ws.receive_msgpack(timeout=1)
+ assert data == test_data
+
+
+def test_receive_msgpack() -> None:
+ test_data = {"message": "hello, world"}
+ callback = MagicMock()
+
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
+ socket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
+ await socket.accept()
+ data = await socket.receive_msgpack()
+ callback(data)
+ await socket.close()
+
+ with TestClient(app).websocket_connect("/") as ws:
+ ws.send_msgpack(test_data)
+
+ callback.assert_called_once_with(test_data)
+
+
+async def consume_gen(generator: AsyncGenerator[Any, Any], count: int, timeout: int = 1) -> list[Any]:
+ async def consumer() -> list[Any]:
+ result = []
+ for _ in range(count):
+ result.append(await async_next(generator))
+ return result
+
+ with anyio.fail_after(timeout):
+ return await consumer()
+
+
[email protected]("mode,data", [("text", ["foo", "bar"]), ("binary", [b"foo", b"bar"])])
+def test_iter_data(mode: WebSocketMode, data: list[str | bytes]) -> None:
+ values = []
+
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
+ socket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
+ await socket.accept()
+ values.extend(await consume_gen(socket.iter_data(mode=mode), 2))
+ await socket.close()
+
+ with TestClient(app).websocket_connect("/") as ws:
+ for message in data:
+ ws.send(message, mode=mode)
+
+ assert values == data
+
+
[email protected]("mode", ["text", "binary"])
+def test_iter_json(mode: WebSocketMode) -> None:
+ messages = [{"data": "foo"}, {"data": "bar"}]
+ values = []
+
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
+ socket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
+ await socket.accept()
+ values.extend(await consume_gen(socket.iter_json(mode=mode), 2))
+ await socket.close()
+
+ with TestClient(app).websocket_connect("/") as ws:
+ for message in messages:
+ ws.send_json(message, mode=mode)
+
+ assert values == messages
+
+
+def test_iter_msgpack() -> None:
+ messages = [{"data": "foo"}, {"data": "bar"}]
+ values = []
+
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
+ socket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
+ await socket.accept()
+ values.extend(await consume_gen(socket.iter_msgpack(), 2))
+ await socket.close()
+
+ with TestClient(app).websocket_connect("/") as ws:
+ for message in messages:
+ ws.send_msgpack(message)
+
+ assert values == messages
+
+
def test_websocket_concurrency_pattern() -> None:
stream_send, stream_receive = anyio.create_memory_object_stream()
@@ -208,7 +305,7 @@ async def writer(websocket: WebSocket[Any, Any, Any]) -> None:
async for message in stream_receive:
await websocket.send_json(message)
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
async with anyio.create_task_group() as task_group:
@@ -225,7 +322,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_client_close() -> None:
close_code = None
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
nonlocal close_code
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
@@ -240,7 +337,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_application_close() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
await websocket.close(WS_1001_GOING_AWAY)
@@ -251,7 +348,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_rejected_connection() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.close(WS_1001_GOING_AWAY)
@@ -261,7 +358,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_subprotocol() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
assert websocket.scope["subprotocols"] == ["soap", "wamp"]
await websocket.accept(subprotocols="wamp")
@@ -272,7 +369,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_additional_headers() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept(headers=[(b"additional", b"header")])
await websocket.close()
@@ -282,7 +379,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_no_additional_headers() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
await websocket.close()
@@ -292,7 +389,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_websocket_exception() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
raise RuntimeError
with pytest.raises(RuntimeError), TestClient(app).websocket_connect("/123?a=abc"):
@@ -300,7 +397,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_duplicate_disconnect() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
message = await websocket.receive()
@@ -312,7 +409,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_websocket_close_reason() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.accept()
await websocket.close(code=WS_1001_GOING_AWAY, reason="Going Away")
@@ -324,7 +421,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_receive_text_before_accept() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.receive_text()
@@ -333,7 +430,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_receive_bytes_before_accept() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.receive_bytes()
@@ -342,7 +439,7 @@ async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
def test_receive_json_before_accept() -> None:
- async def app(scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket[Any, Any, Any](scope, receive=receive, send=send)
await websocket.receive_json()
diff --git a/tests/connection/websocket/__init__.py b/tests/connection/websocket/__init__.py
deleted file mode 100644
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-07T13:47:21 |
litestar-org/litestar | 1,627 | litestar-org__litestar-1627 | [
"1615",
"4321",
"1234"
] | 60eb04549a3209d6db9c9512a5a6b44eca320516 | diff --git a/litestar/handlers/websocket_handlers/_utils.py b/litestar/handlers/websocket_handlers/_utils.py
--- a/litestar/handlers/websocket_handlers/_utils.py
+++ b/litestar/handlers/websocket_handlers/_utils.py
@@ -116,7 +116,7 @@ def create_handler_function(
) -> Callable[..., Coroutine[None, None, None]]:
listener_callback = AsyncCallable(listener_context.listener_callback)
- async def handler_fn(socket: WebSocket, **kwargs: Any) -> None:
+ async def handler_fn(*args: Any, socket: WebSocket, **kwargs: Any) -> None:
ctx = ConnectionContext.from_connection(socket)
data_dto = listener_context.resolved_data_dto(ctx) if listener_context.resolved_data_dto else None
return_dto = listener_context.resolved_return_dto(ctx) if listener_context.resolved_return_dto else None
@@ -129,7 +129,7 @@ async def handler_fn(socket: WebSocket, **kwargs: Any) -> None:
async with lifespan_manager(socket):
while True:
received_data = await handle_receive(socket, data_dto)
- data_to_send = await listener_callback(data=received_data, **kwargs)
+ data_to_send = await listener_callback(*args, data=received_data, **kwargs)
if handle_send:
await handle_send(socket, data_to_send, return_dto)
diff --git a/litestar/handlers/websocket_handlers/listener.py b/litestar/handlers/websocket_handlers/listener.py
--- a/litestar/handlers/websocket_handlers/listener.py
+++ b/litestar/handlers/websocket_handlers/listener.py
@@ -18,7 +18,7 @@
from litestar._signature import create_signature_model
from litestar.connection import WebSocket
from litestar.dto.interface import HandlerContext
-from litestar.exceptions import ImproperlyConfiguredException
+from litestar.exceptions import ImproperlyConfiguredException, WebSocketDisconnect
from litestar.serialization import default_serializer
from litestar.types import (
AnyCallable,
@@ -162,6 +162,8 @@ async def default_connection_lifespan(self, socket: WebSocket) -> AsyncGenerator
await self.on_accept(socket)
try:
yield
+ except WebSocketDisconnect:
+ pass
finally:
if self.on_disconnect:
await self.on_disconnect(socket)
| diff --git a/tests/handlers/websocket/test_listeners.py b/tests/handlers/websocket/test_listeners.py
--- a/tests/handlers/websocket/test_listeners.py
+++ b/tests/handlers/websocket/test_listeners.py
@@ -6,7 +6,7 @@
import pytest
from pytest_lazyfixture import lazy_fixture
-from litestar import Litestar, Request, WebSocket
+from litestar import Controller, Litestar, Request, WebSocket
from litestar.datastructures import State
from litestar.di import Provide
from litestar.dto.factory import dto_field
@@ -316,3 +316,19 @@ def handler(data: bytes) -> None:
on_accept.assert_called_once()
on_disconnect.assert_called_once()
+
+
+def test_listener_in_controller() -> None:
+ # test for https://github.com/litestar-org/litestar/issues/1615
+
+ class ClientController(Controller):
+ path: str = "/"
+
+ @websocket_listener("/ws")
+ async def websocket_handler(self, data: str, socket: WebSocket) -> str:
+ return data
+
+ with create_test_client(ClientController, debug=True) as client, client.websocket_connect("/ws") as ws:
+ ws.send_text("foo")
+ data = ws.receive_text(timeout=1)
+ assert data == "foo"
| Bug: Using `websocket_listener` in controller causes `TypeError: .handler_fn() got multiple values for argument 'socket'`
### Description
Confirmed by @provinzkraut in Discord
### URL to code causing the issue
_No response_
### MCVE
```python
from litestar import Controller, WebSocket, websocket_listener
from litestar.testing import create_test_client
class ClientController(Controller):
path: str = "/client"
@websocket_listener("/ws")
async def websocket_handler(self, data: bytes, socket: WebSocket) -> bytes:
return data
with create_test_client(ClientController) as client, client.websocket_connect("/client/ws") as ws:
data = ws.send_bytes(b"foo")
assert data == b"foo"
```
### Screenshots
""
### Logs
_No response_
### Litestar Version
2.0.0alpha5
### Platform
- [X] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
<!-- POLAR PLEDGE BADGE START -->
---
> [!NOTE]
> While we are open for sponsoring on [GitHub Sponsors](https://github.com/sponsors/litestar-org/) and
> [OpenCollective](https://opencollective.com/litestar), we also utilize [Polar.sh](https://polar.sh/) to engage in pledge-based sponsorship.
>
> Check out all issues funded or available for funding [on our Polar.sh dashboard](https://polar.sh/litestar-org)
> * If you would like to see an issue prioritized, make a pledge towards it!
> * We receive the pledge once the issue is completed & verified
> * This, along with engagement in the community, helps us know which features are a priority to our users.
<a href="https://polar.sh/litestar-org/litestar/issues/1615">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://polar.sh/api/github/litestar-org/litestar/issues/1615/pledge.svg?darkmode=1">
<img alt="Fund with Polar" src="https://polar.sh/api/github/litestar-org/litestar/issues/1615/pledge.svg">
</picture>
</a>
<!-- POLAR PLEDGE BADGE END -->
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
|
I agree. Do you want to submit a PR? | 2023-05-07T15:07:19 |
litestar-org/litestar | 1,628 | litestar-org__litestar-1628 | [
"4321",
"1234"
] | 60eb04549a3209d6db9c9512a5a6b44eca320516 | diff --git a/tools/build_docs.py b/tools/build_docs.py
--- a/tools/build_docs.py
+++ b/tools/build_docs.py
@@ -27,6 +27,7 @@
parser = argparse.ArgumentParser()
parser.add_argument("--version", required=False)
+parser.add_argument("--ignore-missing-examples-output", action="store_true", default=False)
parser.add_argument("output")
@@ -49,12 +50,15 @@ def load_version_spec() -> VersionSpec:
return {"versions": [], "latest": ""}
-def build(output_dir: str, version: str | None) -> None:
+def build(output_dir: str, version: str | None, ignore_missing_output: bool) -> None:
if version is None:
version = importlib.metadata.version("litestar").rsplit(".")[0]
else:
os.environ["_LITESTAR_DOCS_BUILD_VERSION"] = version
+ if ignore_missing_output:
+ os.environ["_LITESTAR_DOCS_IGNORE_MISSING_EXAMPLE_OUTPUT"] = "1"
+
subprocess.run(["make", "docs"], check=True) # noqa: S603 S607
output_dir = Path(output_dir)
@@ -83,7 +87,11 @@ def build(output_dir: str, version: str | None) -> None:
def main() -> None:
args = parser.parse_args()
- build(output_dir=args.output, version=args.version)
+ build(
+ output_dir=args.output,
+ version=args.version,
+ ignore_missing_output=args.ignore_missing_output,
+ )
if __name__ == "__main__":
diff --git a/tools/sphinx_ext/run_examples.py b/tools/sphinx_ext/run_examples.py
--- a/tools/sphinx_ext/run_examples.py
+++ b/tools/sphinx_ext/run_examples.py
@@ -34,6 +34,8 @@
logger = logging.getLogger("sphinx")
+ignore_missing_output = os.getenv("_LITESTAR_DOCS_IGNORE_MISSING_EXAMPLE_OUTPUT", "") == "1"
+
def _load_app_from_path(path: Path) -> Litestar:
module = importlib.import_module(str(path.with_suffix("")).replace("/", "."))
@@ -115,7 +117,9 @@ def exec_examples(app_file: Path, run_configs: list[list[str]]) -> str:
)
stdout = proc.stdout.splitlines()
if not stdout:
- logger.error(f"Example: {app_file}:{args} yielded no results")
+ logger.debug(proc.stderr)
+ if not ignore_missing_output:
+ logger.error(f"Example: {app_file}:{args} yielded no results")
continue
result = "\n".join(line for line in ("> " + (" ".join(clean_args)), *stdout))
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-07T15:36:33 |
|
litestar-org/litestar | 1,633 | litestar-org__litestar-1633 | [
"4321",
"1234"
] | e6d668534f6b98b0489c43b64f34b607f8114fa5 | diff --git a/tools/build_docs.py b/tools/build_docs.py
--- a/tools/build_docs.py
+++ b/tools/build_docs.py
@@ -90,7 +90,7 @@ def main() -> None:
build(
output_dir=args.output,
version=args.version,
- ignore_missing_output=args.ignore_missing_output,
+ ignore_missing_output=args.ignore_missing_examples_output,
)
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-08T09:17:22 |
|
litestar-org/litestar | 1,641 | litestar-org__litestar-1641 | [
"1640",
"1234"
] | d3dfd04c263c9657943003dfbb99810a25e7b64d | diff --git a/litestar/stores/base.py b/litestar/stores/base.py
--- a/litestar/stores/base.py
+++ b/litestar/stores/base.py
@@ -119,7 +119,7 @@ def expires_in(self) -> int:
was set, return ``-1``.
"""
if self.expires_at:
- return (self.expires_at - datetime.now(tz=timezone.utc)).seconds
+ return int(self.expires_at.timestamp() - datetime.now(tz=timezone.utc).timestamp())
return -1
def to_bytes(self) -> bytes:
| diff --git a/tests/test_stores.py b/tests/test_stores.py
--- a/tests/test_stores.py
+++ b/tests/test_stores.py
@@ -150,7 +150,11 @@ async def test_delete_all(store: Store) -> None:
assert not any([await store.get(key) for key in keys])
-async def test_expires_in(store: Store) -> None:
[email protected]("patch_storage_obj_frozen_datetime")
+async def test_expires_in(store: Store, frozen_datetime: FrozenDateTimeFactory) -> None:
+ if not isinstance(store, RedisStore):
+ pytest.xfail("bug in FileStore and MemoryStore")
+
assert await store.expires_in("foo") is None
await store.set("foo", "bar")
@@ -159,6 +163,9 @@ async def test_expires_in(store: Store) -> None:
await store.set("foo", "bar", expires_in=10)
assert math.ceil(await store.expires_in("foo") / 10) * 10 == 10 # type: ignore[operator]
+ frozen_datetime.tick(12)
+ assert await store.expires_in("foo") is None
+
@patch("litestar.stores.redis.Redis")
@patch("litestar.stores.redis.ConnectionPool.from_url")
| Bug: StorageObject doesn't return < 0 when using expiry
### Description
When the stored value is expired, the returned interval is set to 86400 and will therefore not expire.
### URL to code causing the issue
https://github.com/litestar-org/litestar/blob/main/litestar/stores/base.py#L122
### MCVE
```python
from pathlib import Path
from litestar.stores.file import FileStore
store = FileStore(path=Path("test.db"))
async def setstore() -> None:
await store.set("test", "value", expires_in=5)
return None
async def getstore() -> int:
expiry = await store.expires_in("test")
return expiry
```
### Steps to reproduce
_No response_
### Screenshots
```bash
""
```
### Logs
_No response_
### Litestar Version
`litestar==2.0.0a5`
### Platform
- [ ] Linux
- [X] Mac
- [ ] Windows
- [X] Other (Please specify in the description above)
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
|
I agree. Do you want to submit a PR? | 2023-05-09T13:56:16 |
litestar-org/litestar | 1,647 | litestar-org__litestar-1647 | [
"4321",
"1234"
] | 32be0aa3442f8f1883ae66ca83474329a22fb926 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -103,6 +103,7 @@
("py:class", "litestar.utils.signature.ParsedParameter"),
("py:class", "litestar.utils.sync.AsyncCallable"),
("py:class", "BacklogStrategy"),
+ ("py:class", "ExceptionT"),
]
nitpick_ignore_regex = [
(r"py:.*", r"litestar\.types.*"),
diff --git a/litestar/handlers/websocket_handlers/_utils.py b/litestar/handlers/websocket_handlers/_utils.py
--- a/litestar/handlers/websocket_handlers/_utils.py
+++ b/litestar/handlers/websocket_handlers/_utils.py
@@ -1,11 +1,14 @@
from __future__ import annotations
import inspect
-from typing import TYPE_CHECKING, Any, Callable, Coroutine, cast
+from functools import wraps
+from typing import TYPE_CHECKING, Any, Callable, Coroutine, Dict, cast
+from litestar.di import Provide
from litestar.dto.interface import ConnectionContext
from litestar.serialization import decode_json
from litestar.utils import AsyncCallable
+from litestar.utils.helpers import unwrap_partial
if TYPE_CHECKING:
from contextlib import AbstractAsyncContextManager
@@ -112,11 +115,13 @@ async def handle_send(socket: WebSocket, data_to_send: Any, dto: DTOInterface |
def create_handler_function(
listener_context: ListenerContext,
- lifespan_manager: Callable[[WebSocket], AbstractAsyncContextManager],
+ lifespan_manager: Callable[..., AbstractAsyncContextManager],
) -> Callable[..., Coroutine[None, None, None]]:
listener_callback = AsyncCallable(listener_context.listener_callback)
- async def handler_fn(*args: Any, socket: WebSocket, **kwargs: Any) -> None:
+ async def handler_fn(
+ *args: Any, socket: WebSocket, connection_lifespan_dependencies: Dict[str, Any], **kwargs: Any # noqa: UP006
+ ) -> None:
ctx = ConnectionContext.from_connection(socket)
data_dto = listener_context.resolved_data_dto(ctx) if listener_context.resolved_data_dto else None
return_dto = listener_context.resolved_return_dto(ctx) if listener_context.resolved_return_dto else None
@@ -126,7 +131,7 @@ async def handler_fn(*args: Any, socket: WebSocket, **kwargs: Any) -> None:
if listener_context.pass_socket:
kwargs["socket"] = socket
- async with lifespan_manager(socket):
+ async with lifespan_manager(**connection_lifespan_dependencies):
while True:
received_data = await handle_receive(socket, data_dto)
data_to_send = await listener_callback(*args, data=received_data, **kwargs)
@@ -155,4 +160,24 @@ def create_handler_signature(callback_signature: inspect.Signature) -> inspect.S
new_params = [p for p in callback_signature.parameters.values() if p.name not in {"data"}]
if "socket" not in callback_signature.parameters:
new_params.append(inspect.Parameter(name="socket", kind=inspect.Parameter.KEYWORD_ONLY, annotation="WebSocket"))
+
+ new_params.append(
+ inspect.Parameter(
+ name="connection_lifespan_dependencies", kind=inspect.Parameter.KEYWORD_ONLY, annotation="Dict[str, Any]"
+ )
+ )
+
return callback_signature.replace(parameters=new_params)
+
+
+def create_stub_dependency(src: AnyCallable) -> Provide:
+ """Create a stub dependency, accepting any kwargs defined in ``src``, and
+ wrap it in ``Provide``
+ """
+ src = unwrap_partial(src)
+
+ @wraps(src)
+ async def stub(**kwargs: Any) -> Dict[str, Any]: # noqa: UP006
+ return kwargs
+
+ return Provide(stub)
diff --git a/litestar/handlers/websocket_handlers/listener.py b/litestar/handlers/websocket_handlers/listener.py
--- a/litestar/handlers/websocket_handlers/listener.py
+++ b/litestar/handlers/websocket_handlers/listener.py
@@ -1,6 +1,5 @@
from __future__ import annotations
-import inspect
from abc import ABC, abstractmethod
from contextlib import AbstractAsyncContextManager, asynccontextmanager
from functools import partial
@@ -9,8 +8,11 @@
Any,
AsyncGenerator,
Callable,
+ Dict,
Mapping,
+ Optional,
cast,
+ overload,
)
from msgspec.json import Encoder as JsonEncoder
@@ -28,7 +30,6 @@
ExceptionHandler,
Guard,
Middleware,
- SyncOrAsyncUnion,
TypeEncodersMap,
)
from litestar.types.builtin_types import NoneType
@@ -64,14 +65,61 @@ class websocket_listener(WebsocketRouteHandler):
"_send_mode": None,
"_listener_context": None,
"_connection_lifespan": None,
+ "_dependency_stubs": None,
}
+ @overload
+ def __init__(
+ self,
+ path: str | None | list[str] | None = None,
+ *,
+ connection_lifespan: Callable[..., AbstractAsyncContextManager[Any]] | None = None,
+ dependencies: Dependencies | None = None,
+ dto: type[DTOInterface] | None | EmptyType = Empty,
+ exception_handlers: dict[int | type[Exception], ExceptionHandler] | None = None,
+ guards: list[Guard] | None = None,
+ middleware: list[Middleware] | None = None,
+ receive_mode: WebSocketMode = "text",
+ send_mode: WebSocketMode = "text",
+ name: str | None = None,
+ opt: dict[str, Any] | None = None,
+ return_dto: type[DTOInterface] | None | EmptyType = Empty,
+ signature_namespace: Mapping[str, Any] | None = None,
+ type_encoders: TypeEncodersMap | None = None,
+ **kwargs: Any,
+ ) -> None:
+ ...
+
+ @overload
+ def __init__(
+ self,
+ path: str | None | list[str] | None = None,
+ *,
+ connection_accept_handler: Callable[[WebSocket], Coroutine[Any, Any, None]] = WebSocket.accept,
+ dependencies: Dependencies | None = None,
+ dto: type[DTOInterface] | None | EmptyType = Empty,
+ exception_handlers: dict[int | type[Exception], ExceptionHandler] | None = None,
+ guards: list[Guard] | None = None,
+ middleware: list[Middleware] | None = None,
+ receive_mode: WebSocketMode = "text",
+ send_mode: WebSocketMode = "text",
+ name: str | None = None,
+ on_accept: AnyCallable | None = None,
+ on_disconnect: AnyCallable | None = None,
+ opt: dict[str, Any] | None = None,
+ return_dto: type[DTOInterface] | None | EmptyType = Empty,
+ signature_namespace: Mapping[str, Any] | None = None,
+ type_encoders: TypeEncodersMap | None = None,
+ **kwargs: Any,
+ ) -> None:
+ ...
+
def __init__(
self,
path: str | None | list[str] | None = None,
*,
connection_accept_handler: Callable[[WebSocket], Coroutine[Any, Any, None]] = WebSocket.accept,
- connection_lifespan: Callable[[WebSocket], AbstractAsyncContextManager[Any]] | None = None,
+ connection_lifespan: Callable[..., AbstractAsyncContextManager[Any]] | None = None,
dependencies: Dependencies | None = None,
dto: type[DTOInterface] | None | EmptyType = Empty,
exception_handlers: dict[int | type[Exception], ExceptionHandler] | None = None,
@@ -80,8 +128,8 @@ def __init__(
receive_mode: WebSocketMode = "text",
send_mode: WebSocketMode = "text",
name: str | None = None,
- on_accept: Callable[[WebSocket], SyncOrAsyncUnion[None]] | None = None,
- on_disconnect: Callable[[WebSocket], SyncOrAsyncUnion[None]] | None = None,
+ on_accept: AnyCallable | None = None,
+ on_disconnect: AnyCallable | None = None,
opt: dict[str, Any] | None = None,
return_dto: type[DTOInterface] | None | EmptyType = Empty,
signature_namespace: Mapping[str, Any] | None = None,
@@ -96,7 +144,8 @@ def __init__(
connection_accept_handler: A callable that accepts a :class:`WebSocket <.connection.WebSocket>` instance
and returns a coroutine that when awaited, will accept the connection. Defaults to ``WebSocket.accept``.
connection_lifespan: An asynchronous context manager, handling the lifespan of the connection. By default,
- it calls the ``connection_accept_handler``, ``on_connect`` and ``on_disconnect``.
+ it calls the ``connection_accept_handler``, ``on_connect`` and ``on_disconnect``. Can request any
+ dependencies, for example the :class:`WebSocket <.connection.WebSocket>` connection
dependencies: A string keyed mapping of dependency :class:`Provider <.di.Provide>` instances.
dto: :class:`DTOInterface <.dto.interface.DTOInterface>` to use for (de)serializing and
validation of request data.
@@ -106,10 +155,10 @@ def __init__(
receive_mode: Websocket mode to receive data in, either `text` or `binary`.
send_mode: Websocket mode to receive data in, either `text` or `binary`.
name: A string identifying the route handler.
- on_accept: Callback invoked after a connection has been accepted, receiving the
- :class:`WebSocket <.connection.WebSocket>` instance as its only argument
- on_disconnect: Callback invoked after a connection has been closed, receiving the
- :class:`WebSocket <.connection.WebSocket>` instance as its only argument
+ on_accept: Callback invoked after a connection has been accepted. Can request any dependencies, for example
+ the :class:`WebSocket <.connection.WebSocket>` connection
+ on_disconnect: Callback invoked after a connection has been closed. Can request any dependencies, for
+ example the :class:`WebSocket <.connection.WebSocket>` connection
opt: A string keyed mapping of arbitrary values that can be accessed in :class:`Guards <.types.Guard>` or
wherever you have access to :class:`Request <.connection.Request>` or
:class:`ASGI Scope <.types.Scope>`.
@@ -120,6 +169,12 @@ def __init__(
type_encoders: A mapping of types to callables that transform them into types supported for serialization.
**kwargs: Any additional kwarg - will be set in the opt dictionary.
"""
+ if connection_lifespan and any([on_accept, on_disconnect, connection_accept_handler is not WebSocket.accept]):
+ raise ImproperlyConfiguredException(
+ "connection_lifespan can not be used with connection hooks "
+ "(on_accept, on_disconnect, connection_accept_handler)",
+ )
+
self._listener_context = _utils.ListenerContext()
self._receive_mode: WebSocketMode = receive_mode
self._send_mode: WebSocketMode = send_mode
@@ -146,9 +201,32 @@ def __init__(
self.dto = dto
self.return_dto = return_dto
+ if not self.dependencies:
+ self.dependencies = {}
+ self.dependencies = dict(self.dependencies)
+ self.dependencies["connection_lifespan_dependencies"] = _utils.create_stub_dependency(
+ self._connection_lifespan or self.default_connection_lifespan
+ )
+ if self.on_accept:
+ self.dependencies["on_accept_dependencies"] = _utils.create_stub_dependency(self.on_accept.ref.value)
+ if self.on_disconnect:
+ self.dependencies["on_disconnect_dependencies"] = _utils.create_stub_dependency(
+ self.on_disconnect.ref.value
+ )
+
@asynccontextmanager
- async def default_connection_lifespan(self, socket: WebSocket) -> AsyncGenerator[None, None]:
- """Handle the connection lifespan of a WebSocket.
+ async def default_connection_lifespan(
+ self,
+ socket: WebSocket,
+ on_accept_dependencies: Optional[Dict[str, Any]] = None, # noqa: UP007, UP006
+ on_disconnect_dependencies: Optional[Dict[str, Any]] = None, # noqa: UP007, UP006
+ ) -> AsyncGenerator[None, None]:
+ """Handle the connection lifespan of a :class:`WebSocket <.connection.WebSocket>`.
+
+ Args:
+ socket: The :class:`WebSocket <.connection.WebSocket>` connection
+ on_accept_dependencies: Dependencies requested by the :attr:`on_accept` hook
+ on_disconnect_dependencies: Dependencies requested by the :attr:`on_disconnect` hook
By, default this will
@@ -159,14 +237,14 @@ async def default_connection_lifespan(self, socket: WebSocket) -> AsyncGenerator
await self.connection_accept_handler(socket)
if self.on_accept:
- await self.on_accept(socket)
+ await self.on_accept(**(on_accept_dependencies or {}))
try:
yield
except WebSocketDisconnect:
pass
finally:
if self.on_disconnect:
- await self.on_disconnect(socket)
+ await self.on_disconnect(**(on_disconnect_dependencies or {}))
def _validate_handler_function(self) -> None:
"""Validate the route handler function once it's set by inspecting its return annotations."""
@@ -198,12 +276,6 @@ def on_registration(self, app: Litestar) -> None:
def _create_signature_model(self, app: Litestar) -> None:
"""Create signature model for handler function."""
if not self.signature_model:
- extra_signatures = []
- if self.on_accept:
- extra_signatures.append(inspect.signature(self.on_accept))
- if self.on_disconnect:
- extra_signatures.append(inspect.signature(self.on_disconnect))
-
new_signature = _utils.create_handler_signature(
self._listener_context.listener_callback_signature.original_signature
)
@@ -260,8 +332,12 @@ class WebsocketListener(ABC):
"""A sequence of :class:`Guard <.types.Guard>` callables."""
middleware: list[Middleware] | None = None
"""A sequence of :class:`Middleware <.types.Middleware>`."""
+ on_accept: AnyCallable | None = None
+ """Called after a :class:`WebSocket <.connection.WebSocket>` connection has been accepted. Can receive any dependencies"""
+ on_disconnect: AnyCallable | None = None
+ """Called after a :class:`WebSocket <.connection.WebSocket>` connection has been disconnected. Can receive any dependencies"""
receive_mode: WebSocketMode = "text"
- """Websocket mode to receive data in, either `text` or `binary`."""
+ """:class:`WebSocket <.connection.WebSocket>` mode to receive data in, either ``text`` or ``binary``."""
send_mode: WebSocketMode = "text"
"""Websocket mode to send data in, either `text` or `binary`."""
name: str | None = None
@@ -301,9 +377,6 @@ def __init__(self) -> None:
type_encoders=self.type_encoders,
)(self.on_receive)
- def on_accept(self, socket: WebSocket) -> SyncOrAsyncUnion[None]: # noqa: B027
- """Called after a WebSocket connection has been accepted"""
-
@abstractmethod
def on_receive(self, *args: Any, **kwargs: Any) -> Any:
"""Called after data has been received from the WebSocket.
@@ -316,6 +389,3 @@ def on_receive(self, *args: Any, **kwargs: Any) -> Any:
according to handler configuration.
"""
raise NotImplementedError
-
- def on_disconnect(self, socket: WebSocket) -> SyncOrAsyncUnion[None]: # noqa: B027
- """Called after a WebSocket connection has been disconnected"""
| diff --git a/tests/handlers/websocket/test_listeners.py b/tests/handlers/websocket/test_listeners.py
--- a/tests/handlers/websocket/test_listeners.py
+++ b/tests/handlers/websocket/test_listeners.py
@@ -1,7 +1,7 @@
from contextlib import asynccontextmanager
from dataclasses import dataclass, field
from typing import AsyncGenerator, Dict, List, Optional, Type, Union, cast
-from unittest.mock import MagicMock
+from unittest.mock import AsyncMock, MagicMock
import pytest
from pytest_lazyfixture import lazy_fixture
@@ -279,8 +279,14 @@ def handler(data: bytes) -> None:
def test_connection_callbacks() -> None:
- on_accept = MagicMock()
- on_disconnect = MagicMock()
+ on_accept_mock = MagicMock()
+ on_disconnect_mock = MagicMock()
+
+ def on_accept(socket: WebSocket) -> None:
+ on_accept_mock()
+
+ def on_disconnect(socket: WebSocket) -> None:
+ on_disconnect_mock()
@websocket_listener("/", on_accept=on_accept, on_disconnect=on_disconnect)
def handler(data: bytes) -> None:
@@ -290,8 +296,8 @@ def handler(data: bytes) -> None:
with client.websocket_connect("/"):
pass
- on_accept.assert_called_once()
- on_disconnect.assert_called_once()
+ on_accept_mock.assert_called_once()
+ on_disconnect_mock.assert_called_once()
def test_connection_lifespan() -> None:
@@ -328,7 +334,67 @@ class ClientController(Controller):
async def websocket_handler(self, data: str, socket: WebSocket) -> str:
return data
- with create_test_client(ClientController, debug=True) as client, client.websocket_connect("/ws") as ws:
+ with create_test_client(ClientController) as client, client.websocket_connect("/ws") as ws:
ws.send_text("foo")
data = ws.receive_text(timeout=1)
assert data == "foo"
+
+
+def test_lifespan_dependencies() -> None:
+ mock = MagicMock()
+
+ @asynccontextmanager
+ async def lifespan(name: str, state: State, query: dict) -> AsyncGenerator[None, None]:
+ mock(name=name, state=state, query=query)
+ yield
+
+ @websocket_listener("/{name:str}", connection_lifespan=lifespan)
+ async def handler(data: str) -> None:
+ pass
+
+ with create_test_client([handler], debug=True) as client, client.websocket_connect("/foo") as ws:
+ ws.send_text("")
+
+ assert mock.call_args_list[0].kwargs["name"] == "foo"
+ assert isinstance(mock.call_args_list[0].kwargs["state"], State)
+ assert isinstance(mock.call_args_list[0].kwargs["query"], dict)
+
+
+def test_hook_dependencies() -> None:
+ on_accept_mock = MagicMock()
+ on_disconnect_mock = MagicMock()
+
+ def on_accept(name: str, state: State, query: dict) -> None:
+ on_accept_mock(name=name, state=state, query=query)
+
+ def on_disconnect(name: str, state: State, query: dict) -> None:
+ on_disconnect_mock(name=name, state=state, query=query)
+
+ @websocket_listener("/{name: str}", on_accept=on_accept, on_disconnect=on_disconnect)
+ def handler(data: bytes) -> None:
+ pass
+
+ with create_test_client([handler], debug=True) as client, client.websocket_connect("/foo") as ws:
+ ws.send_text("")
+
+ assert on_accept_mock.call_args_list[0].kwargs["name"] == "foo"
+ assert isinstance(on_accept_mock.call_args_list[0].kwargs["state"], State)
+ assert isinstance(on_accept_mock.call_args_list[0].kwargs["query"], dict)
+ assert on_disconnect_mock.call_args_list[0].kwargs["name"] == "foo"
+ assert isinstance(on_disconnect_mock.call_args_list[0].kwargs["state"], State)
+ assert isinstance(on_disconnect_mock.call_args_list[0].kwargs["query"], dict)
+
+
[email protected]("hook_name", ["on_accept", "on_disconnect", "connection_accept_handler"])
+def test_listeners_lifespan_hooks_and_manager_raises(hook_name: str) -> None:
+ @asynccontextmanager
+ async def lifespan() -> AsyncGenerator[None, None]:
+ yield
+
+ hook_callback = AsyncMock()
+
+ with pytest.raises(ImproperlyConfiguredException):
+
+ @websocket_listener("/", **{hook_name: hook_callback}, connection_lifespan=lifespan)
+ def handler(data: bytes) -> None:
+ pass
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-10T11:52:51 |
litestar-org/litestar | 1,649 | litestar-org__litestar-1649 | [
"1643",
"1643",
"1234"
] | cc8c8d5753a5b9230867c76cf5e06e2cf57173af | diff --git a/litestar/dto/factory/_backends/abc.py b/litestar/dto/factory/_backends/abc.py
--- a/litestar/dto/factory/_backends/abc.py
+++ b/litestar/dto/factory/_backends/abc.py
@@ -3,6 +3,7 @@
"""
from __future__ import annotations
+import secrets
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Final, Generic, TypeVar, Union
@@ -408,6 +409,11 @@ def _create_union_type(
has_nested=any(_determine_has_nested(t) for t in inner_types),
)
+ def _gen_unique_name_id(self, unique_name: str, size: int = 12) -> str:
+ # Generate a unique ID
+ # Convert the ID to a short alphanumeric string
+ return f"{unique_name}-{secrets.token_hex(8)}"
+
def _filter_exclude(exclude: AbstractSet[str], field_name: str) -> AbstractSet[str]:
"""Filter exclude set to only include exclusions for the given field name."""
diff --git a/litestar/dto/factory/_backends/msgspec/backend.py b/litestar/dto/factory/_backends/msgspec/backend.py
--- a/litestar/dto/factory/_backends/msgspec/backend.py
+++ b/litestar/dto/factory/_backends/msgspec/backend.py
@@ -26,7 +26,8 @@ class MsgspecDTOBackend(AbstractDTOBackend[Struct]):
__slots__ = ()
def create_transfer_model_type(self, unique_name: str, field_definitions: FieldDefinitionsType) -> type[Struct]:
- return _create_struct_for_field_definitions(unique_name, field_definitions)
+ fqn_uid: str = self._gen_unique_name_id(unique_name)
+ return _create_struct_for_field_definitions(fqn_uid, field_definitions)
def parse_raw(self, raw: bytes, connection_context: ConnectionContext) -> Struct | Collection[Struct]:
return decode_media_type( # type:ignore[no-any-return]
diff --git a/litestar/dto/factory/_backends/pydantic/backend.py b/litestar/dto/factory/_backends/pydantic/backend.py
--- a/litestar/dto/factory/_backends/pydantic/backend.py
+++ b/litestar/dto/factory/_backends/pydantic/backend.py
@@ -25,7 +25,8 @@ class PydanticDTOBackend(AbstractDTOBackend[BaseModel]):
__slots__ = ()
def create_transfer_model_type(self, unique_name: str, field_definitions: FieldDefinitionsType) -> type[BaseModel]:
- return _create_model_for_field_definitions(unique_name, field_definitions)
+ fqn_uid: str = self._gen_unique_name_id(unique_name)
+ return _create_model_for_field_definitions(fqn_uid, field_definitions)
def parse_raw(self, raw: bytes, connection_context: ConnectionContext) -> BaseModel | Collection[BaseModel]:
return decode_media_type( # type:ignore[no-any-return]
| diff --git a/tests/dto/factory/backends/test_backends.py b/tests/dto/factory/backends/test_backends.py
--- a/tests/dto/factory/backends/test_backends.py
+++ b/tests/dto/factory/backends/test_backends.py
@@ -12,13 +12,14 @@
from litestar.dto.factory import DTOConfig
from litestar.dto.factory._backends import MsgspecDTOBackend, PydanticDTOBackend
from litestar.dto.factory._backends.abc import BackendContext
-from litestar.dto.factory._backends.types import CollectionType, SimpleType
+from litestar.dto.factory._backends.types import CollectionType, SimpleType, TransferFieldDefinition
from litestar.dto.factory.stdlib.dataclass import DataclassDTO
from litestar.dto.interface import ConnectionContext
from litestar.enums import MediaType
from litestar.exceptions import SerializationException
from litestar.openapi.spec.reference import Reference
from litestar.serialization import encode_json
+from litestar.types.empty import Empty
from litestar.utils.signature import ParsedType
if TYPE_CHECKING:
@@ -175,6 +176,32 @@ def test_backend_create_openapi_schema(backend_type: type[AbstractDTOBackend], b
assert nested_schema.properties["b"].type == "string"
[email protected]("backend_type", [MsgspecDTOBackend, PydanticDTOBackend])
+def test_backend_model_name_uniqueness(backend_type: type[AbstractDTOBackend], backend_context: BackendContext) -> None:
+ backend = backend_type(backend_context)
+ unique_names: set = set()
+ transfer_type = SimpleType(parsed_type=ParsedType(int), nested_field_info=None)
+
+ fd = (
+ TransferFieldDefinition(
+ name="a",
+ default=Empty,
+ parsed_type=ParsedType(int),
+ default_factory=None,
+ dto_field=None,
+ unique_model_name="some_module.SomeModel",
+ serialization_name="a",
+ transfer_type=transfer_type,
+ is_partial=False,
+ ),
+ )
+ for i in range(100):
+ model_class = backend.create_transfer_model_type("some_module.SomeModel", fd)
+ model_name = model_class.__name__
+ assert model_name not in unique_names
+ unique_names.add(model_name)
+
+
@pytest.mark.parametrize("backend_type", [MsgspecDTOBackend, PydanticDTOBackend])
def test_backend_populate_data_from_raw(
backend_type: type[AbstractDTOBackend], backend_context: BackendContext, connection_context: ConnectionContext
diff --git a/tests/dto/factory/test_integration.py b/tests/dto/factory/test_integration.py
--- a/tests/dto/factory/test_integration.py
+++ b/tests/dto/factory/test_integration.py
@@ -176,3 +176,27 @@ def handler(data: DTOData[User]) -> User:
with create_test_client(route_handlers=[handler], debug=True) as client:
response = client.patch("/", json={"age": 41, "read_only": "whoops"})
assert response.json() == {"name": "John", "age": 41, "read_only": "read-only"}
+
+
+def test_dto_openapi_model_name_collision() -> None:
+ @dataclass
+ class Bar:
+ id: int
+ foo: str
+
+ write_dto = DataclassDTO[Annotated[Bar, DTOConfig(exclude={"id"})]]
+ read_dto = DataclassDTO[Bar]
+
+ @post(dto=write_dto, return_dto=read_dto, signature_namespace={"Bar": Bar})
+ def handler(data: Bar) -> Bar:
+ return data
+
+ with create_test_client(route_handlers=[handler], debug=True) as client:
+ response = client.get("/schema/openapi.json")
+ schemas = list(response.json()["components"]["schemas"].values())
+ assert len(schemas) == 2
+ assert schemas[0] != schemas[1]
+ assert all(
+ k.startswith("tests.dto.factory.test_integration.test_dto_openapi_model_name_collision.<locals>.Bar")
+ for k in response.json()["components"]["schemas"]
+ )
| Bug: `500: Two different schemas with the title app.domain.authors.Author have been defined.`
### Description
Can be reproduced by upgrading pg-redis-docker to alpha6.
Reproduced example included - https://github.com/v3ss0n/starlite-pg-redis-docker/tree/bug-alpha6
### URL to code causing the issue
http://localhost:8000/schema/swagger
### MCVE
```python
https://github.com/v3ss0n/starlite-pg-redis-docker/tree/bug-alpha6
```
### Steps to reproduce
```bash
1. Upgrade To Starlite Alpha6
2. Go to http://localhost:8000/schema/swagger
3. See error
```
### Screenshots
```bash
""
```
### Logs
```
first: {"properties":{"id":{"type":"string","format":"uuid","description":"Any UUID string"},"name":{"type":"string"},"dob":{"type":"string","format":"date"},"country_id":{"oneOf":[{"type":"null"},{"type":"string","format":"uuid","description":"Any UUID string"}]}},"type":"object","required":["dob","name"],"title":"app.domain.authors.Author"}
starlite-pg-redis-docker-app-1 | second: {"properties":{"created":{"type":"string","format":"date-time"},"updated":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid","description":"Any UUID string"},"name":{"type":"string"},"dob":{"type":"string","format":"date"},"country_id":{"oneOf":[{"type":"null"},{"type":"string","format":"uuid","description":"Any UUID string"}]},"nationality":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/app.domain.authors.Author.nationality_0"}]}},"type":"object","required":["dob","name"],"title":"app.domain.authors.Author"}
if schema.title and schema.type in (OpenAPIType.OBJECT, OpenAPIType.ARRAY):
--
782 | if schema.title in schemas and hash(schemas[schema.title]) != hash(schema):
783 | raise ImproperlyConfiguredException(
784 | f"Two different schemas with the title {schema.title} have been defined.\n\n"
785 | f"first: {encode_json(schemas[schema.title].to_schema()).decode()}\n"
786 | f"second: {encode_json(schema.to_schema()).decode()}\n\n"
787 | f"To fix this issue, either rename the base classes from which these titles are derived or manually"
788 | f"set a 'title' kwarg in the route handler."
789 | )
790 | schemas[schema.title] = schema
```
### Litestar Version
litestar = { version = "==2.0.0a6" , extras = ["redis", "cli"] }
### Platform
- [X] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
<!-- POLAR PLEDGE BADGE START -->
---
> [!NOTE]
> While we are open for sponsoring on [GitHub Sponsors](https://github.com/sponsors/litestar-org/) and
> [OpenCollective](https://opencollective.com/litestar), we also utilize [Polar.sh](https://polar.sh/) to engage in pledge-based sponsorship.
>
> Check out all issues funded or available for funding [on our Polar.sh dashboard](https://polar.sh/litestar-org)
> * If you would like to see an issue prioritized, make a pledge towards it!
> * We receive the pledge once the issue is completed & verified
> * This, along with engagement in the community, helps us know which features are a priority to our users.
<a href="https://polar.sh/litestar-org/litestar/issues/1643">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://polar.sh/api/github/litestar-org/litestar/issues/1643/pledge.svg?darkmode=1">
<img alt="Fund with Polar" src="https://polar.sh/api/github/litestar-org/litestar/issues/1643/pledge.svg">
</picture>
</a>
<!-- POLAR PLEDGE BADGE END -->
Bug: `500: Two different schemas with the title app.domain.authors.Author have been defined.`
### Description
Can be reproduced by upgrading pg-redis-docker to alpha6.
Reproduced example included - https://github.com/v3ss0n/starlite-pg-redis-docker/tree/bug-alpha6
### URL to code causing the issue
http://localhost:8000/schema/swagger
### MCVE
```python
https://github.com/v3ss0n/starlite-pg-redis-docker/tree/bug-alpha6
```
### Steps to reproduce
```bash
1. Upgrade To Starlite Alpha6
2. Go to http://localhost:8000/schema/swagger
3. See error
```
### Screenshots
```bash
""
```
### Logs
```
first: {"properties":{"id":{"type":"string","format":"uuid","description":"Any UUID string"},"name":{"type":"string"},"dob":{"type":"string","format":"date"},"country_id":{"oneOf":[{"type":"null"},{"type":"string","format":"uuid","description":"Any UUID string"}]}},"type":"object","required":["dob","name"],"title":"app.domain.authors.Author"}
starlite-pg-redis-docker-app-1 | second: {"properties":{"created":{"type":"string","format":"date-time"},"updated":{"type":"string","format":"date-time"},"id":{"type":"string","format":"uuid","description":"Any UUID string"},"name":{"type":"string"},"dob":{"type":"string","format":"date"},"country_id":{"oneOf":[{"type":"null"},{"type":"string","format":"uuid","description":"Any UUID string"}]},"nationality":{"oneOf":[{"type":"null"},{"$ref":"#/components/schemas/app.domain.authors.Author.nationality_0"}]}},"type":"object","required":["dob","name"],"title":"app.domain.authors.Author"}
if schema.title and schema.type in (OpenAPIType.OBJECT, OpenAPIType.ARRAY):
--
782 | if schema.title in schemas and hash(schemas[schema.title]) != hash(schema):
783 | raise ImproperlyConfiguredException(
784 | f"Two different schemas with the title {schema.title} have been defined.\n\n"
785 | f"first: {encode_json(schemas[schema.title].to_schema()).decode()}\n"
786 | f"second: {encode_json(schema.to_schema()).decode()}\n\n"
787 | f"To fix this issue, either rename the base classes from which these titles are derived or manually"
788 | f"set a 'title' kwarg in the route handler."
789 | )
790 | schemas[schema.title] = schema
```
### Litestar Version
litestar = { version = "==2.0.0a6" , extras = ["redis", "cli"] }
### Platform
- [X] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
<!-- POLAR PLEDGE BADGE START -->
---
> [!NOTE]
> While we are open for sponsoring on [GitHub Sponsors](https://github.com/sponsors/litestar-org/) and
> [OpenCollective](https://opencollective.com/litestar), we also utilize [Polar.sh](https://polar.sh/) to engage in pledge-based sponsorship.
>
> Check out all issues funded or available for funding [on our Polar.sh dashboard](https://polar.sh/litestar-org)
> * If you would like to see an issue prioritized, make a pledge towards it!
> * We receive the pledge once the issue is completed & verified
> * This, along with engagement in the community, helps us know which features are a priority to our users.
<a href="https://polar.sh/litestar-org/litestar/issues/1643">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://polar.sh/api/github/litestar-org/litestar/issues/1643/pledge.svg?darkmode=1">
<img alt="Fund with Polar" src="https://polar.sh/api/github/litestar-org/litestar/issues/1643/pledge.svg">
</picture>
</a>
<!-- POLAR PLEDGE BADGE END -->
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
|
I agree. Do you want to submit a PR? | 2023-05-10T14:06:52 |
litestar-org/litestar | 1,659 | litestar-org__litestar-1659 | [
"4321",
"1234"
] | e9def20dae292ec72ef24f2670564398da8dd363 | diff --git a/litestar/contrib/repository/filters.py b/litestar/contrib/repository/filters.py
--- a/litestar/contrib/repository/filters.py
+++ b/litestar/contrib/repository/filters.py
@@ -1,13 +1,10 @@
"""Collection filter datastructures."""
from __future__ import annotations
+from collections import abc # noqa: TCH003
from dataclasses import dataclass
from datetime import datetime # noqa: TCH003
-from typing import TYPE_CHECKING, Generic, Literal, TypeVar
-
-if TYPE_CHECKING:
- from collections import abc
-
+from typing import Generic, Literal, TypeVar
T = TypeVar("T")
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-12T16:32:55 |
|
litestar-org/litestar | 1,669 | litestar-org__litestar-1669 | [
"1653"
] | cf93d3e8a05d071ef434ac4a64c66c91a8256e82 | diff --git a/litestar/_signature/field.py b/litestar/_signature/field.py
--- a/litestar/_signature/field.py
+++ b/litestar/_signature/field.py
@@ -3,7 +3,8 @@
from dataclasses import dataclass
from typing import Any, Literal
-from typing_extensions import get_args, get_origin
+from msgspec import Meta
+from typing_extensions import Annotated, get_args, get_origin
from litestar.constants import UNDEFINED_SENTINELS
from litestar.params import BodyKwarg, DependencyKwarg, ParameterKwarg
@@ -22,6 +23,26 @@
__all__ = ("SignatureField",)
+def _create_metadata_from_type(
+ value: Any, model: type[ParameterKwarg] | type[BodyKwarg], field_type: Any
+) -> ParameterKwarg | BodyKwarg | None:
+ if isinstance(value, Meta):
+ is_sequence_container = is_non_string_sequence(field_type)
+ return model(
+ gt=value.gt,
+ ge=value.ge,
+ lt=value.lt,
+ le=value.le,
+ multiple_of=value.multiple_of,
+ regex=value.pattern,
+ min_length=value.min_length if not is_sequence_container else None,
+ max_length=value.max_length if not is_sequence_container else None,
+ min_items=value.min_length if is_sequence_container else None,
+ max_items=value.max_length if is_sequence_container else None,
+ )
+ return None
+
+
@dataclass(unsafe_hash=True, frozen=True)
class SignatureField:
"""Abstraction representing a model field. This class is meant to replace equivalent datastructures from other
@@ -170,8 +191,17 @@ def create(
if kwarg_model and default_value is Empty:
default_value = kwarg_model.default
- if not children and get_origin(field_type) and (type_args := get_args(field_type)):
- children = tuple(SignatureField.create(arg) for arg in type_args)
+ origin = get_origin(field_type)
+
+ if not children and origin and (type_args := get_args(field_type)):
+ if origin is Annotated:
+ field_type = type_args[0]
+ if not kwarg_model:
+ kwarg_model = _create_metadata_from_type(
+ type_args[1], BodyKwarg if name == "data" else ParameterKwarg, field_type=field_type
+ )
+ else:
+ children = tuple(SignatureField.create(arg) for arg in type_args)
return SignatureField(
name=name,
| diff --git a/tests/openapi/test_schema.py b/tests/openapi/test_schema.py
--- a/tests/openapi/test_schema.py
+++ b/tests/openapi/test_schema.py
@@ -2,8 +2,10 @@
from enum import Enum
from typing import TYPE_CHECKING, Dict, Literal
+import msgspec
import pytest
from pydantic import BaseModel
+from typing_extensions import Annotated
from litestar import Controller, MediaType, get
from litestar._openapi.schema_generation.schema import (
@@ -245,3 +247,17 @@ class Foo(TypedDict):
)
schema = create_schema_for_typed_dict(module.Foo, generate_examples=False, plugins=[], schemas={})
assert schema.properties and all(key in schema.properties for key in ("foo", "bar", "baz"))
+
+
+def test_create_schema_from_msgspec_annotated_type() -> None:
+ class Lookup(msgspec.Struct):
+ id: Annotated[str, msgspec.Meta(max_length=16)]
+
+ schemas: Dict[str, Schema] = {}
+ create_schema(
+ field=SignatureField.create(name="Lookup", field_type=Lookup),
+ generate_examples=False,
+ plugins=[],
+ schemas=schemas,
+ )
+ assert schemas["Lookup"]
| Bug: Annotated Types not being rendered in OpenAPI schema
### Description
I've got a `msgspec.Struct` request object that uses an `typing.Annotated` type as one of it's field types.
When viewing routes request object, I'm only seeing a str or null allowed but no mention of the max length piece. This also happens when I exclude the `| None` from the type declaration as well.
### URL to code causing the issue
_No response_
### MCVE
```python
from typing import Annotated
import msgspec
IDType = Annotated[str, msgspec.Meta(max_length=16)]
class Lookup(msgspec.Struct):
id: IDType | None
```
### Steps to reproduce
```bash
1. Create route with the above request object.
2. View the route in the /schema endpoint.
```
### Screenshots
_No response_
### Logs
_No response_
### Litestar Version
2.0.0a5
### Platform
- [X] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| Yes, im not sure we currently parse the msgspec meta data.
There is also an issue with `Optional[...]` that @v3ss0n has encountered where type of those fields is displaying as `"Unknown type: null"` in api docs (not saying related, just that there is seemingly work to be done on this in general).
I'd like us to get to handling these msgspec `Annotated` constraints as I intend to improve on the SQLAlchemy and other ORM DTOs to generate them when possible, e.g., `VARCHAR(16)` would generate `Annotated[str, msgspec.Meta(max_length=16)]` on the transfer model.
Aight, if no one picks it I'll take it depending on my capacity
> There is also an issue with `Optional[...]` that @v3ss0n has encountered where type of those fields is displaying as `"Unknown type: null"` in api docs (not saying related, just that there is seemingly work to be done on this in general).
>
> I'd like us to get to handling these msgspec `Annotated` constraints as I intend to improve on the SQLAlchemy and other ORM DTOs to generate them when possible, e.g., `VARCHAR(16)` would generate `Annotated[str, msgspec.Meta(max_length=16)]` on the transfer model.
Cool, this got tracked.
I was about to open an issue on it , so this one should cover right? | 2023-05-13T14:15:17 |
litestar-org/litestar | 1,670 | litestar-org__litestar-1670 | [
"1541"
] | 5b680cb154160b62293b762afdce91134b17db81 | diff --git a/litestar/_openapi/schema_generation/schema.py b/litestar/_openapi/schema_generation/schema.py
--- a/litestar/_openapi/schema_generation/schema.py
+++ b/litestar/_openapi/schema_generation/schema.py
@@ -550,7 +550,9 @@ def create_schema_for_pydantic_model(
required=sorted([field.alias or field.name for field in field_type.__fields__.values() if field.required]),
properties={
(f.alias or f.name): create_schema(
- field=SignatureField.create(field_type=field_type_hints[f.name], name=f.alias or f.name),
+ field=SignatureField.create(
+ field_type=field_type_hints[f.name], name=f.alias or f.name, default_value=f.field_info
+ ),
generate_examples=generate_examples,
plugins=plugins,
schemas=schemas,
diff --git a/litestar/_signature/field.py b/litestar/_signature/field.py
--- a/litestar/_signature/field.py
+++ b/litestar/_signature/field.py
@@ -4,6 +4,7 @@
from typing import Any, Literal
from msgspec import Meta
+from pydantic.fields import FieldInfo
from typing_extensions import Annotated, get_args, get_origin
from litestar.constants import UNDEFINED_SENTINELS
@@ -40,6 +41,28 @@ def _create_metadata_from_type(
min_items=value.min_length if is_sequence_container else None,
max_items=value.max_length if is_sequence_container else None,
)
+ if isinstance(value, FieldInfo):
+ values: dict[str, Any] = {
+ k: v
+ for k, v in {
+ "gt": value.gt,
+ "ge": value.ge,
+ "lt": value.lt,
+ "le": value.le,
+ "multiple_of": value.multiple_of,
+ "regex": value.regex,
+ "min_length": value.min_length,
+ "max_length": value.max_length,
+ "min_items": value.min_items,
+ "max_items": value.max_items,
+ "description": value.description,
+ "title": value.title,
+ "const": value.const is not None,
+ }.items()
+ if v is not None
+ }
+ if values:
+ return model(**values)
return None
@@ -191,15 +214,19 @@ def create(
if kwarg_model and default_value is Empty:
default_value = kwarg_model.default
+ elif isinstance(default_value, FieldInfo) and not kwarg_model:
+ kwarg_model = _create_metadata_from_type(
+ default_value, BodyKwarg if name == "data" else ParameterKwarg, field_type=field_type
+ )
+
origin = get_origin(field_type)
if not children and origin and (type_args := get_args(field_type)):
if origin is Annotated:
field_type = type_args[0]
- if not kwarg_model:
- kwarg_model = _create_metadata_from_type(
- type_args[1], BodyKwarg if name == "data" else ParameterKwarg, field_type=field_type
- )
+ kwarg_model = kwarg_model or _create_metadata_from_type(
+ type_args[1], BodyKwarg if name == "data" else ParameterKwarg, field_type=field_type
+ )
else:
children = tuple(SignatureField.create(arg) for arg in type_args)
| diff --git a/tests/openapi/test_schema.py b/tests/openapi/test_schema.py
--- a/tests/openapi/test_schema.py
+++ b/tests/openapi/test_schema.py
@@ -4,7 +4,7 @@
import msgspec
import pytest
-from pydantic import BaseModel
+from pydantic import BaseModel, Field
from typing_extensions import Annotated
from litestar import Controller, MediaType, get
@@ -260,4 +260,22 @@ class Lookup(msgspec.Struct):
plugins=[],
schemas=schemas,
)
- assert schemas["Lookup"]
+ schema = schemas["Lookup"]
+ assert schema.properties["id"].type == OpenAPIType.STRING # type: ignore
+
+
+def test_create_schema_for_pydantic_field() -> None:
+ class Model(BaseModel):
+ value: str = Field(title="title", description="description")
+
+ schemas: Dict[str, Schema] = {}
+ create_schema(
+ field=SignatureField.create(name="Model", field_type=Model),
+ generate_examples=False,
+ plugins=[],
+ schemas=schemas,
+ )
+ schema = schemas["Model"]
+
+ assert schema.properties["value"].description == "description" # type: ignore
+ assert schema.properties["value"].title == "title" # type: ignore
| Bug: OpenAPI docs parse pydantic Field()
### Description
Referencing a discussion on Discord: https://discord.com/channels/919193495116337154/1093516724864811038
It seems that the pydantic `Field()` in the model isn't parsed and generating title/description fields in the openapi.json.
Running on docker `python:3.11-alpine`
### URL to code causing the issue
_No response_
### MCVE
```python
from starlite import Starlite, post, Controller
from pydantic import BaseModel, Field
class SomeRequestData(BaseModel):
hostname: str = Field(title="The hostname", description="Some description")
interface: str = Field(title="Interface", description="Interface to use")
class SomeController(Controller):
path = "/somepath"
@post(
path="/test", summary="testing", tags=["Test"], description="Test description"
)
async def request_example(self, data: SomeRequestData) -> str:
return "test"
app = Starlite([SomeController])
```
### Steps to reproduce
_No response_
### Screenshots
Current (as in `2.0.0a4`):

Expected (as in `1.51.9`):

### Logs
_No response_
### Litestar Version
litestar==2.0.0a4
### Platform
- [ ] Linux
- [X] Mac
- [ ] Windows
- [X] Other (Please specify in the description above)
| I looked into this a bit since we happened to notice it today as well.
It looks like it applies to both request bodies and responses.
Looking at the source it appears `litestart._signature.field.SignatureField` doesn't support descriptions. There's an `extra` attribute but that appears to be used for creating examples.
> It looks like it applies to both request bodies and responses.
Correct.
I went down the rabbit hole and tried to figure out why this happens. Looks like `construct_open_api_with_schema_class` isn't called anymore from `app.py:update_openapi_schema` when I'm comparing it to 1.51.10.
Simply adding this doesn't work because the `OpenAPI` class isnt based on pydantic `BaseModel` anymore but the litestar `BaseSchemaObject`.
The function `construct_open_api_with_schema_class` relies on the `BaseModel.copy(deep=True)` which doesn't exist on `BaseSchemaObject`. | 2023-05-13T15:06:05 |
litestar-org/litestar | 1,671 | litestar-org__litestar-1671 | [
"1446"
] | 5b680cb154160b62293b762afdce91134b17db81 | diff --git a/litestar/response/base.py b/litestar/response/base.py
--- a/litestar/response/base.py
+++ b/litestar/response/base.py
@@ -92,7 +92,7 @@ def __init__(
dict(headers) if isinstance(headers, Mapping) else {h.name: h.value for h in headers or {}}
)
self.is_head_response = is_head_response
- self.media_type = get_enum_string_value(media_type)
+ self.media_type = get_enum_string_value(media_type) or MediaType.JSON
self.status_allows_body = not (
status_code in {HTTP_204_NO_CONTENT, HTTP_304_NOT_MODIFIED} or status_code < HTTP_200_OK
)
@@ -240,7 +240,7 @@ def render(self, content: Any) -> bytes:
An encoded bytes string
"""
try:
- if self.media_type.startswith("text/"):
+ if self.media_type.startswith("text/") or isinstance(content, str):
if not content:
return b""
@@ -249,7 +249,10 @@ def render(self, content: Any) -> bytes:
if self.media_type == MediaType.MESSAGEPACK:
return encode_msgpack(content, self._enc_hook)
- return encode_json(content, self._enc_hook)
+ if self.media_type.startswith("application/json"):
+ return encode_json(content, self._enc_hook)
+
+ raise ImproperlyConfiguredException(f"unsupported media_type {self.media_type} for content {content!r}")
except (AttributeError, ValueError, TypeError) as e:
raise ImproperlyConfiguredException("Unable to serialize response content") from e
| diff --git a/tests/contrib/htmx/test_htmx_request.py b/tests/contrib/htmx/test_htmx_request.py
--- a/tests/contrib/htmx/test_htmx_request.py
+++ b/tests/contrib/htmx/test_htmx_request.py
@@ -85,7 +85,7 @@ def handler(request: HTMXRequest) -> Response:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/", headers={HTMXHeaders.CURRENT_URL.value: "https://example.com"})
- assert response.text == '"https://example.com"'
+ assert response.text == "https://example.com"
def test_current_url_set_url_encoded() -> None:
@@ -102,7 +102,7 @@ def handler(request: HTMXRequest) -> Response:
HTMXHeaders.CURRENT_URL.value + "-URI-AutoEncoded": "true",
},
)
- assert response.text == '"https://example.com/?"'
+ assert response.text == "https://example.com/?"
def test_current_url_abs_path_default() -> None:
@@ -126,7 +126,7 @@ def handler(request: HTMXRequest) -> Response:
response = client.get(
"/", headers={HTMXHeaders.CURRENT_URL.value: "http://testserver.local/duck/?quack=true#h2"}
)
- assert response.text == '"/duck/?quack=true#h2"'
+ assert response.text == "/duck/?quack=true#h2"
def test_current_url_abs_path_set_other_domain() -> None:
@@ -181,7 +181,7 @@ def handler(request: HTMXRequest) -> Response:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/", headers={HTMXHeaders.PROMPT.value: "Yes"})
- assert response.text == '"Yes"'
+ assert response.text == "Yes"
def test_target_default() -> None:
@@ -203,7 +203,7 @@ def handler(request: HTMXRequest) -> Response:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/", headers={HTMXHeaders.TARGET.value: "#element"})
- assert response.text == '"#element"'
+ assert response.text == "#element"
def test_trigger_default() -> None:
@@ -225,7 +225,7 @@ def handler(request: HTMXRequest) -> Response:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/", headers={HTMXHeaders.TRIGGER_ID.value: "#element"})
- assert response.text == '"#element"'
+ assert response.text == "#element"
def test_trigger_name_default() -> None:
@@ -247,7 +247,7 @@ def handler(request: HTMXRequest) -> Response:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/", headers={HTMXHeaders.TRIGGER_NAME.value: "name_of_element"})
- assert response.text == '"name_of_element"'
+ assert response.text == "name_of_element"
def test_triggering_event_none() -> None:
diff --git a/tests/contrib/htmx/test_htmx_response.py b/tests/contrib/htmx/test_htmx_response.py
--- a/tests/contrib/htmx/test_htmx_response.py
+++ b/tests/contrib/htmx/test_htmx_response.py
@@ -77,7 +77,7 @@ def handler() -> PushUrl:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/")
assert response.status_code == HTTP_200_OK
- assert response.text == '"Success!"'
+ assert response.text == "Success!"
assert response.headers[HTMXHeaders.PUSH_URL] == "/index.html"
@@ -100,7 +100,7 @@ def handler() -> ReplaceUrl:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/")
assert response.status_code == HTTP_200_OK
- assert response.text == '"Success!"'
+ assert response.text == "Success!"
assert response.headers[HTMXHeaders.REPLACE_URL] == "/index.html"
@@ -112,7 +112,7 @@ def handler() -> Reswap:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/")
assert response.status_code == HTTP_200_OK
- assert response.text == '"Success!"'
+ assert response.text == "Success!"
assert response.headers[HTMXHeaders.RE_SWAP] == "beforebegin"
@@ -124,7 +124,7 @@ def handler() -> Retarget:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/")
assert response.status_code == HTTP_200_OK
- assert response.text == '"Success!"'
+ assert response.text == "Success!"
assert response.headers[HTMXHeaders.RE_TARGET] == "#element"
@@ -138,7 +138,7 @@ def handler() -> TriggerEvent:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/")
assert response.status_code == HTTP_200_OK
- assert response.text == '"Success!"'
+ assert response.text == "Success!"
assert response.headers[HTMXHeaders.TRIGGER_EVENT] == '{"alert":{"warning":"Confirm your choice!"}}'
@@ -151,7 +151,7 @@ def handler() -> TriggerEvent:
response = client.get("/")
assert response.status_code == HTTP_200_OK
- assert response.text == '"Success!"'
+ assert response.text == "Success!"
assert response.headers[HTMXHeaders.TRIGGER_EVENT] == '{"alert":{}}'
@@ -165,7 +165,7 @@ def handler() -> TriggerEvent:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/")
assert response.status_code == HTTP_200_OK
- assert response.text == '"Success!"'
+ assert response.text == "Success!"
assert response.headers[HTMXHeaders.TRIGGER_AFTER_SETTLE] == '{"alert":{"warning":"Confirm your choice!"}}'
@@ -177,7 +177,7 @@ def handler() -> TriggerEvent:
with create_test_client(route_handlers=[handler], request_class=HTMXRequest) as client:
response = client.get("/")
assert response.status_code == HTTP_200_OK
- assert response.text == '"Success!"'
+ assert response.text == "Success!"
assert response.headers[HTMXHeaders.TRIGGER_AFTER_SWAP] == '{"alert":{"warning":"Confirm your choice!"}}'
diff --git a/tests/response/test_serialization.py b/tests/response/test_serialization.py
--- a/tests/response/test_serialization.py
+++ b/tests/response/test_serialization.py
@@ -8,6 +8,7 @@
from pydantic import SecretStr
from litestar import MediaType, Response
+from litestar.exceptions import ImproperlyConfiguredException
from litestar.status_codes import HTTP_200_OK
from tests import (
MsgSpecStructPerson,
@@ -70,3 +71,23 @@ def test_response_serialization_structured_types(content: Any, response_type: An
def test_response_serialization_text_types(content: Any, response_type: Any, media_type: MediaType) -> None:
response = Response[response_type](content, media_type=media_type, status_code=HTTP_200_OK)
assert response.body == content.encode("utf-8")
+
+
[email protected](
+ "content, response_type, media_type, should_raise",
+ [
+ ["abcdefg", str, "text/custom", False],
+ ["<xml/>", str, "application/unknown", False],
+ [b"<xml/>", bytes, "application/unknown", False],
+ [{"key": "value"}, dict, "application/unknown", True],
+ ],
+)
+def test_response_validation_of_unknown_media_types(
+ content: Any, response_type: Any, media_type: MediaType, should_raise: bool
+) -> None:
+ if should_raise:
+ with pytest.raises(ImproperlyConfiguredException):
+ Response[response_type](content, media_type=media_type, status_code=HTTP_200_OK)
+ else:
+ response = Response[response_type](content, media_type=media_type, status_code=HTTP_200_OK)
+ assert response.body == (content.encode("utf-8") if not isinstance(content, bytes) else content)
| Responses with a non `text/*` media type get rendered as JSON
### Summary
I'm using v2 from main.
The rendering (everything not `bytes`) of a response only depends on the provided media type and is defaulted to JSON. The logic for rendering can be a bit surprising when you want to return text with a non `text/*` media type.
### Basic Example
```python
from starlite import Response, Starlite, get
@get("/resource")
def retrieve_resource() -> Response[str]:
return Response(content="<xml><msg>Hello World!</msg></xml>", media_type="application/xml")
app = Starlite(route_handlers=[retrieve_resource])
```
This will return `"<xml><msg>Hello World!</msg></xml>"` because it is rendered as JSON.
### Drawbacks and Impact
The media type is defaulted to JSON in
https://github.com/starlite-api/starlite/blob/c8aa65d1419590d7ccb738911783857ca92ddcc1/starlite/response/base.py#L60
This is ok and stated in the documentation.
The render logic in
https://github.com/starlite-api/starlite/blob/c8aa65d1419590d7ccb738911783857ca92ddcc1/starlite/response/base.py#L233-L254
only checks for `text/*`. Obviously starlite can't know every media type in existance but I think the default handling of an unknown media type could be better.
Explicitly check for known (starlite has special handling for it) media types (MediaType.JSON, MediaType.MESSAGEPACK). Existing code still returns JSON because that's the default.
Handle unknown media types by some documented heuristic. I would propose checking if the type is `str` and then do the text encoding, otherwise throw an exception. Returning text and having it properly encoded is common enough to warrant handling in the framework. If a user needs custom rendering of his data, he can always do it himself and return `bytes`.
### Unresolved questions
_No response_
| Have you tried:
```python
from starlite import Response, Starlite, get, MediaType
@get("/resource", media_type=MediaType.XML)
def retrieve_resource() -> str:
return "<xml><msg>Hello World!</msg></xml>"
app = Starlite(route_handlers=[retrieve_resource])
```
You can do this as well for any media type you want:
```python
from starlite import Response, Starlite, get
@get("/resource", media_type="application/my-favourite-media-type")
def retrieve_resource() -> str:
return "Hello World!</msg>"
app = Starlite(route_handlers=[retrieve_resource])
```
Both will also return the string rendered as JSON (with quotes). Isn't it internally converted to `Response[str]`?
```console
$ curl http://127.0.0.1:8001/resource
"<xml><msg>Hello World!</msg></xml>"
```
If you change `str` to `bytes` and return a normal string then you get the same result because of the default json encoding.
As an addition to the render logic: Should this throw an error if the stated return type is different from the actual type? Starlite cares about annotating the return type and throws an error if you don't, should it also check that you return the correct type?
If you change `str` to `bytes` and return a bytes string then you get:
```console
$ curl http://127.0.0.1:8001/resource
<xml><msg>Hello World!</msg></xml>
```
Sorry that I missed it again. I'm using v2 from main.
> Both will also return the string rendered as JSON (with quotes)
That would be a bug then. The example I provided is supposed to work. Would this behaviour (if working as intended) address your issue?
https://github.com/starlite-api/starlite/blob/c8aa65d1419590d7ccb738911783857ca92ddcc1/tests/response/test_serialization.py#L67-L70
We are missing the test case for XML and arbitrary types here.
> That would be a bug then. The example I provided is supposed to work. Would this behaviour (if working as intended) address your issue?
Yes that should do it. My expectation (guided by the documentation) is:
1. no `media_type` given or `media_type=MediaType.JSON`: render the stated return type as JSON.
2. `media_type=MediaType.MESSAGEPACK`: render the stated type as MESSAGEPACK.
3. `media_type=MediaType.TEXT`: render the encoded string.
4. `media_type=MediaType.HTML`: render the encoded string.
My expectation (not stated in the documentation):
5. `media_type=<anything>` and stated type `bytes`. Do nothing and just repond with the bytes.
6. `media_type=<anything else>` and stated type `str`. render the encoded string.
7. `media_type=<anything else>` and stated type `!= str` is ambigious (currently behaves like 1).
Regarding 7., for clarity I would like to get an exception. This would show me that starlite can't render the given type for the stated media_type (because it doesn't know what that media_type means) and that I should do it myself (return `bytes`, 5.).
Adding documentation for 5.-7. would help users that want to return more exotic reponses.
If you want to keep 7. as-is, maybe add a paragraph to the documentation stating that for media_types not in the [list](https://docs.starliteproject.dev/latest/usage/responses.html#media-type) the output will always be JSON and a user should use `bytes` to get the output he wants.
> Regarding 7., for clarity I would like to get an exception. This would show me that starlite can't render the given type for the stated media_type (because it doesn't know what that media_type means) and that I should do it myself
That's a very reasonable approach I think. @starlite-api/maintainers?
Do you want to add a PR, since you've apparently given it a good of amount of consideration already?
im good with this, thanks @NomAnor for your efforts
All sounds good to me. I'm in favour of raising for 7 also, as long as the check is made during startup and we can have a nice exception message that details the error and the alternatives for the user.
> thanks @NomAnor for your efforts
Seconded!
Is this issue resolved @NomAnor ? should we close this issue or is there a PR incoming? | 2023-05-13T15:30:16 |
litestar-org/litestar | 1,676 | litestar-org__litestar-1676 | [
"4321",
"1234"
] | 25eecfa48b35230e41026e75d59bd6615e6c15b5 | diff --git a/litestar/contrib/sqlalchemy/types.py b/litestar/contrib/sqlalchemy/types.py
--- a/litestar/contrib/sqlalchemy/types.py
+++ b/litestar/contrib/sqlalchemy/types.py
@@ -35,7 +35,7 @@ class GUID(TypeDecorator):
cache_ok = True
python_type = type(uuid.UUID)
- def __init__(self, binary: bool = True) -> None:
+ def __init__(self, *args: Any, binary: bool = True, **kwargs: Any) -> None:
self.binary = binary
def load_dialect_impl(self, dialect: Dialect) -> Any:
@@ -75,13 +75,8 @@ def to_uuid(value: Any) -> uuid.UUID | None:
return cast("uuid.UUID | None", value)
-class JSON(_JSON):
- """Platform-independent JSON type.
+JSON = _JSON().with_variant(PG_JSONB, "postgresql")
+"""Platform-independent JSON type.
Uses JSONB type for postgres, otherwise uses the generic JSON data type.
- """
-
- def load_dialect_impl(self, dialect: Dialect) -> Any:
- if dialect.name == "postgresql":
- return dialect.type_descriptor(PG_JSONB()) # type: ignore[no-untyped-call]
- return dialect.type_descriptor(_JSON())
+"""
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-15T23:11:33 |
|
litestar-org/litestar | 1,678 | litestar-org__litestar-1678 | [
"1234",
"4321"
] | 293b17e828df437903f697417a2c84437d2c55f5 | diff --git a/litestar/datastructures/state.py b/litestar/datastructures/state.py
--- a/litestar/datastructures/state.py
+++ b/litestar/datastructures/state.py
@@ -2,7 +2,10 @@
from copy import copy, deepcopy
from threading import RLock
-from typing import Any, Callable, Generator, Iterable, Iterator, Mapping, MutableMapping
+from typing import TYPE_CHECKING, Any, Callable, Generator, Iterable, Iterator, Mapping, MutableMapping
+
+if TYPE_CHECKING:
+ from typing_extensions import Self
__all__ = ("ImmutableState", "State")
@@ -13,7 +16,10 @@ class ImmutableState(Mapping[str, Any]):
It can be accessed using dot notation while exposing dict like functionalities.
"""
- __slots__ = ("_state",)
+ __slots__ = (
+ "_state",
+ "_deep_copy",
+ )
_state: dict[str, Any]
@@ -56,13 +62,13 @@ def __init__(
assert "first" not in state
"""
-
if isinstance(state, ImmutableState):
state = state._state
if not isinstance(state, dict) and isinstance(state, Iterable):
state = dict(state)
+ super().__setattr__("_deep_copy", deep_copy)
super().__setattr__("_state", deepcopy(state) if deep_copy else state)
def __bool__(self) -> bool:
@@ -116,12 +122,12 @@ def __getattr__(self, key: str) -> Any:
except KeyError as e:
raise AttributeError from e
- def __copy__(self) -> ImmutableState:
+ def __copy__(self) -> Self:
"""Return a shallow copy of the given state object.
Customizes how the builtin "copy" function will work.
"""
- return self.__class__(deepcopy(self._state))
+ return self.__class__(self._state, deep_copy=self._deep_copy)
def mutable_copy(self) -> State:
"""Return a mutable copy of the state object.
@@ -129,7 +135,7 @@ def mutable_copy(self) -> State:
Returns:
A ``State``
"""
- return State(self._state)
+ return State(self._state, deep_copy=self._deep_copy)
def dict(self) -> dict[str, Any]:
"""Return a shallow copy of the wrapped dict.
@@ -147,7 +153,7 @@ def __get_validators__(
yield cls.validate
@classmethod
- def validate(cls, value: ImmutableState | dict[str, Any] | Iterable[tuple[str, Any]]) -> ImmutableState: # type: ignore[valid-type]
+ def validate(cls, value: ImmutableState | dict[str, Any] | Iterable[tuple[str, Any]]) -> Self: # type: ignore[valid-type]
"""Parse a value and instantiate state inside a SignatureModel. This allows us to use custom subclasses of
state, as well as allows users to decide whether state is mutable or immutable.
@@ -157,7 +163,8 @@ def validate(cls, value: ImmutableState | dict[str, Any] | Iterable[tuple[str, A
Returns:
An ImmutableState instance
"""
- return cls(value)
+ deep_copy = value._deep_copy if isinstance(value, ImmutableState) else False
+ return cls(value, deep_copy=deep_copy)
class State(ImmutableState, MutableMapping[str, Any]):
@@ -289,13 +296,13 @@ def __delattr__(self, key: str) -> None:
except KeyError as e:
raise AttributeError from e
- def copy(self) -> State:
+ def copy(self) -> Self:
"""Return a shallow copy of the state object.
Returns:
A ``State``
"""
- return self.__class__(self.dict())
+ return self.__class__(self.dict(), deep_copy=self._deep_copy)
def immutable_copy(self) -> ImmutableState:
"""Return a shallow copy of the state object, setting it to be frozen.
@@ -303,4 +310,4 @@ def immutable_copy(self) -> ImmutableState:
Returns:
A ``State``
"""
- return ImmutableState(self)
+ return ImmutableState(self, deep_copy=self._deep_copy)
| diff --git a/tests/datastructures/test_state.py b/tests/datastructures/test_state.py
--- a/tests/datastructures/test_state.py
+++ b/tests/datastructures/test_state.py
@@ -1,4 +1,7 @@
-from typing import Any, Type
+from __future__ import annotations
+
+from copy import copy
+from typing import Any
import pytest
@@ -7,7 +10,7 @@
@pytest.mark.parametrize("state_class", (ImmutableState, State))
-def test_state_immutable_mapping(state_class: Type[ImmutableState]) -> None:
+def test_state_immutable_mapping(state_class: type[ImmutableState]) -> None:
state_dict = {"first": 1, "second": 2, "third": 3}
state = state_class(state_dict, deep_copy=True)
assert len(state) == 3
@@ -68,3 +71,17 @@ def test_state_copy() -> None:
copy = state.copy()
del state.key
assert copy.key
+
+
+def test_state_copy_deep_copy_false() -> None:
+ state = State({}, deep_copy=False)
+ assert state.copy()._deep_copy is False
+
+
+def test_unpicklable_deep_copy_false() -> None:
+ # a module cannot be deep copied
+ import typing
+
+ state = ImmutableState({"module": typing}, deep_copy=False)
+ copy(state)
+ ImmutableState.validate(state)
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-16T01:04:00 |
litestar-org/litestar | 1,682 | litestar-org__litestar-1682 | [
"4321",
"1234"
] | 21621eab36b8cc1e17d07f0559578181da22896e | diff --git a/litestar/_signature/models/attrs_signature_model.py b/litestar/_signature/models/attrs_signature_model.py
--- a/litestar/_signature/models/attrs_signature_model.py
+++ b/litestar/_signature/models/attrs_signature_model.py
@@ -221,8 +221,13 @@ def __init__(self) -> None:
# ensure attrs instances are not unstructured into dict
self.register_unstructure_hook_factory(
- lambda value: attrs.has(value) and AttrsSignatureModel not in list(value.__mro__),
- _pass_through_unstructure_hook,
+ # the first parameter is a predicate that tests the value. In this case we are testing for an attrs
+ # decorated class that does not have the AttrsSignatureModel anywhere in its mro chain.
+ lambda x: attrs.has(x) and AttrsSignatureModel not in list(x.__mro__),
+ # the "unstructuring" hook we are registering is a lambda that receives the class constructor and returns
+ # another lambda that will take a value and receive it unmodified.
+ # this is a hack to ensure that no attrs constructors are called during unstructuring.
+ lambda x: lambda x: x,
)
for cls, structure_hook in hooks:
| diff --git a/tests/kwargs/test_attrs_data.py b/tests/kwargs/test_attrs_data.py
new file mode 100644
--- /dev/null
+++ b/tests/kwargs/test_attrs_data.py
@@ -0,0 +1,22 @@
+from attrs import define
+
+from litestar import post
+from litestar.status_codes import HTTP_201_CREATED
+from litestar.testing import create_test_client
+
+
+def test_parsing_of_attrs_data() -> None:
+ @define(slots=True, frozen=True)
+ class AttrsUser:
+ name: str
+ email: str
+
+ @post("/")
+ async def attrs_data(data: AttrsUser) -> AttrsUser:
+ return data
+
+ with create_test_client([attrs_data]) as client:
+ response = client.post("/", json={"name": "foo", "email": "[email protected]"})
+ assert response.status_code == HTTP_201_CREATED
+ assert response.json().get("name") == "foo"
+ assert response.json().get("email") == "[email protected]"
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-16T13:38:20 |
litestar-org/litestar | 1,686 | litestar-org__litestar-1686 | [
"4321",
"1234"
] | e1b07f6ed7a273efc0bd1d38eeb1b7fc5a1fc86f | diff --git a/litestar/dto/factory/_backends/msgspec/utils.py b/litestar/dto/factory/_backends/msgspec/utils.py
--- a/litestar/dto/factory/_backends/msgspec/utils.py
+++ b/litestar/dto/factory/_backends/msgspec/utils.py
@@ -24,7 +24,7 @@ def _create_msgspec_field(field_definition: FieldDefinition) -> MsgspecField | N
if field_definition.default is not Empty:
kws["default"] = field_definition.default
- if field_definition.default_factory is not None:
+ elif field_definition.default_factory is not None:
kws["default_factory"] = field_definition.default_factory
if not kws:
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-17T17:28:33 |
|
litestar-org/litestar | 1,689 | litestar-org__litestar-1689 | [
"4321",
"1234"
] | f27b33c2142d64c3ec660b2f5f194803100550ae | diff --git a/litestar/cli/_utils.py b/litestar/cli/_utils.py
--- a/litestar/cli/_utils.py
+++ b/litestar/cli/_utils.py
@@ -67,6 +67,7 @@ class LitestarEnv:
host: str | None = None
port: int | None = None
reload: bool | None = None
+ reload_dirs: tuple[str, ...] | None = None
web_concurrency: int | None = None
is_app_factory: bool = False
@@ -97,6 +98,7 @@ def from_env(cls, app_path: str | None) -> LitestarEnv:
port = getenv("LITESTAR_PORT")
web_concurrency = getenv("WEB_CONCURRENCY")
+ reload_dirs = tuple(s.strip() for s in getenv("LITESTAR_RELOAD_DIRS", "").split(",") if s) or None
return cls(
app_path=loaded_app.app_path,
@@ -105,6 +107,7 @@ def from_env(cls, app_path: str | None) -> LitestarEnv:
host=getenv("LITESTAR_HOST"),
port=int(port) if port else None,
reload=_bool_from_env("LITESTAR_RELOAD"),
+ reload_dirs=reload_dirs,
web_concurrency=int(web_concurrency) if web_concurrency else None,
is_app_factory=loaded_app.is_factory,
cwd=cwd,
diff --git a/litestar/cli/commands/core.py b/litestar/cli/commands/core.py
--- a/litestar/cli/commands/core.py
+++ b/litestar/cli/commands/core.py
@@ -25,6 +25,9 @@ def _convert_uvicorn_args(args: dict[str, Any]) -> list[str]:
if isinstance(value, bool):
if value:
process_args.append(f"--{arg}")
+ elif isinstance(value, tuple):
+ for item in value:
+ process_args.append(f"--{arg}={item}")
else:
process_args.append(f"--{arg}={value}")
@@ -59,12 +62,14 @@ def info_command(app: Litestar) -> None:
)
@option("--host", help="Server under this host", default="127.0.0.1", show_default=True)
@option("--debug", help="Run app in debug mode", is_flag=True)
+@option("--reload-dir", help="Directories to watch for file changes", multiple=True)
def run_command(
reload: bool,
port: int,
web_concurrency: int,
host: str,
debug: bool,
+ reload_dir: tuple[str, ...],
env: LitestarEnv,
app: Litestar,
) -> None:
@@ -87,14 +92,20 @@ def run_command(
# invoke uvicorn in a subprocess to be able to use the --reload flag. see
# https://github.com/litestar-org/litestar/issues/1191 and https://github.com/encode/uvicorn/issues/1045
+ reload_dirs = env.reload_dirs or reload_dir
+
process_args = {
- "reload": env.reload or reload,
+ "reload": env.reload or reload or bool(reload_dirs),
"host": env.host or host,
"port": env.port or port,
"workers": env.web_concurrency or web_concurrency,
"factory": env.is_app_factory,
}
- subprocess.run(["uvicorn", env.app_path, *_convert_uvicorn_args(process_args)], check=True) # noqa: S603 S607
+
+ if reload_dirs:
+ process_args["reload-dir"] = reload_dirs
+
+ subprocess.run(["uvicorn", env.app_path, *_convert_uvicorn_args(process_args)], check=True) # noqa: S603 S607I
@command(name="routes")
| diff --git a/tests/cli/test_core_commands.py b/tests/cli/test_core_commands.py
--- a/tests/cli/test_core_commands.py
+++ b/tests/cli/test_core_commands.py
@@ -32,6 +32,7 @@ def mock_subprocess_run(mocker: MockerFixture) -> MagicMock:
@pytest.mark.parametrize("reload", [True, False, None])
@pytest.mark.parametrize("web_concurrency", [2, None])
@pytest.mark.parametrize("app_dir", ["custom_subfolder", None])
[email protected]("reload_dir", [[".", "../somewhere_else"], None])
def test_run_command(
mocker: MockerFixture,
runner: CliRunner,
@@ -41,6 +42,7 @@ def test_run_command(
host: Optional[str],
web_concurrency: Optional[int],
app_dir: Optional[str],
+ reload_dir: Optional[List[str]],
custom_app_file: Optional[Path],
create_app_file: CreateAppFileFixture,
set_in_env: bool,
@@ -86,6 +88,13 @@ def test_run_command(
args.extend(["--web-concurrency", str(web_concurrency)])
else:
web_concurrency = 1
+
+ if reload_dir is not None:
+ if set_in_env:
+ monkeypatch.setenv("LITESTAR_RELOAD_DIRS", ",".join(reload_dir))
+ else:
+ args.extend([f"--reload-dir={s}" for s in reload_dir])
+
path = create_app_file(custom_app_file or "app.py", subdir=app_dir)
result = runner.invoke(cli_command, args)
@@ -94,10 +103,12 @@ def test_run_command(
assert result.exit_code == 0
expected_args = ["uvicorn", f"{path.stem}:app", f"--host={host}", f"--port={port}"]
- if reload:
+ if reload or reload_dir:
expected_args.append("--reload")
if web_concurrency:
expected_args.append(f"--workers={web_concurrency}")
+ if reload_dir:
+ expected_args.extend([f"--reload-dir={s}" for s in reload_dir])
mock_subprocess_run.assert_called_once()
assert sorted(mock_subprocess_run.call_args_list[0].args[0]) == sorted(expected_args)
mock_show_app_info.assert_called_once()
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-18T00:23:25 |
litestar-org/litestar | 1,690 | litestar-org__litestar-1690 | [
"1652",
"4321",
"1234"
] | f27b33c2142d64c3ec660b2f5f194803100550ae | diff --git a/litestar/handlers/http_handlers/base.py b/litestar/handlers/http_handlers/base.py
--- a/litestar/handlers/http_handlers/base.py
+++ b/litestar/handlers/http_handlers/base.py
@@ -417,14 +417,12 @@ def get_response_handler(self, is_response_type_data: bool = False) -> Callable[
handler_return_type = before_request_handler.parsed_signature.return_type
if not handler_return_type.is_subclass_of((Empty, NoneType)):
return_annotation = handler_return_type.annotation
- self._response_handler_mapping["response_type_handler"] = create_response_handler(
+ self._response_handler_mapping["response_type_handler"] = response_type_handler = create_response_handler(
cookies=cookies, after_request=after_request
)
if return_type.is_subclass_of(Response):
- self._response_handler_mapping["default_handler"] = self._response_handler_mapping[
- "response_type_handler"
- ]
+ self._response_handler_mapping["default_handler"] = response_type_handler
elif return_type.is_subclass_of(ResponseContainer):
self._response_handler_mapping["default_handler"] = create_response_container_handler(
after_request=after_request,
diff --git a/litestar/typing.py b/litestar/typing.py
--- a/litestar/typing.py
+++ b/litestar/typing.py
@@ -124,8 +124,8 @@ def is_collection(self) -> bool:
def is_subclass_of(self, cl: type[Any] | tuple[type[Any], ...]) -> bool:
"""Whether the annotation is a subclass of the given type.
- Where ``self.annotation`` is a union type, this method will always return ``False``. While this is not
- strictly correct, we intend on revisiting this once a concrete use-case is to hand.
+ Where ``self.annotation`` is a union type, this method will return ``True`` when all members of the union are
+ a subtype of ``cl``, otherwise, ``False``.
Args:
cl: The type to check, or tuple of types. Passed as 2nd argument to ``issubclass()``.
@@ -134,7 +134,11 @@ def is_subclass_of(self, cl: type[Any] | tuple[type[Any], ...]) -> bool:
Whether the annotation is a subtype of the given type(s).
"""
if self.origin:
+ if self.origin in UNION_TYPES:
+ return all(t.is_subclass_of(cl) for t in self.inner_types)
+
return self.origin not in UNION_TYPES and issubclass(self.origin, cl)
+
if self.annotation is AnyStr:
return issubclass(str, cl) or issubclass(bytes, cl)
return self.annotation is not Any and not self.is_type_var and issubclass(self.annotation, cl)
| diff --git a/tests/utils/test_signature.py b/tests/utils/test_signature.py
--- a/tests/utils/test_signature.py
+++ b/tests/utils/test_signature.py
@@ -308,6 +308,7 @@ def test_parsed_type_is_subclass_of() -> None:
assert ParsedType(List[int]).is_subclass_of(list) is True
assert ParsedType(List[int]).is_subclass_of(int) is False
assert ParsedType(Optional[int]).is_subclass_of(int) is False
+ assert ParsedType(Union[bool, int]).is_subclass_of(int) is True
def test_parsed_type_has_inner_subclass_of() -> None:
| Weird Response when set both "Template and Redirect" on same route as return value type
### Discussed in https://github.com/orgs/litestar-org/discussions/1646
<div type='discussions-op-text'>
<sup>Originally posted by **4n1qz5skwv** May 10, 2023</sup>
```python
from pathlib import Path
from litestar import Litestar, Request, get, HttpMethod, route
from litestar.contrib.jinja import JinjaTemplateEngine
from litestar.response_containers import Template, Redirect
from litestar.template.config import TemplateConfig
@get("/home")
async def aaa() -> str:
return 'home!'
@route(path="/some-path", http_method=[HttpMethod.GET, HttpMethod.POST])
async def bbb(request: Request) -> Template | Redirect:
if request.method == "POST":
# may be login and move to home
return Redirect(path='/home', status_code=302)
# show template on opening page
return Template(name="abc.html")
app = Litestar(route_handlers=[aaa, bbb],
template_config=TemplateConfig(directory=Path(__file__).parent, engine=JinjaTemplateEngine))
if __name__ == '__main__':
import uvicorn
uvicorn.run('0:app')
```
**curl**
```
Microsoft Windows [Version 10.0.19044.2846]
(c) Microsoft Corporation. All rights reserved.
C:\Users\User-PC>curl http://127.0.0.1:8000/some-path -X GET
{"name":"abc.html","context":{},"background":null,"headers":{},"cookies":[],"media_type":null,"encoding":"utf-8"}
C:\Users\User-PC>curl http://127.0.0.1:8000/some-path -X POST
{"path":"/home","background":null,"headers":{},"cookies":[],"media_type":null,"encoding":"utf-8","status_code":302}
C:\Users\User-PC>
```
**logs**
```
INFO: Started server process [14912]
INFO: Waiting for application startup.
INFO: Application startup complete.
INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit)
INFO: 127.0.0.1:52708 - "GET /some-path HTTP/1.1" 201 Created
INFO: 127.0.0.1:52709 - "POST /some-path HTTP/1.1" 201 Created
INFO: Shutting down
INFO: Waiting for application shutdown.
INFO: Application shutdown complete.
INFO: Finished server process [14912]
```
</div>
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| This case is fairly straightforward if we consider that a union is a subtype of `X` if all inner types of the union are also subtypes of `X`, which on the surface seems OK to me.
Specific to this case, can we say that `Template | Redirect` is a subtype of `Response` because both `Template` and `Redirect` _are_ subtypes of `Response`.
This is relevant because:
https://github.com/litestar-org/litestar/blob/f27b33c2142d64c3ec660b2f5f194803100550ae/litestar/handlers/http_handlers/base.py#L424-L427
At the moment, `return_type.is_subclass_of(Response)` is returning `False` - however, if that is allowed to return `True` than this "just works" because both `Template` and `Redirect` are serviced by the same response handler function.
It was a deliberate choice at the time to have any union type return `False` from `ParsedType.is_subclass_of()` with the intention of waiting for some real world cases to arrive - now we have one.
@litestar-org/maintainers WDT? Is `Union[SubclassOfX1, SubclassOfX2]` a subtype of `X`?
> This case is fairly straightforward if we consider that a union is a subtype of `X` if all inner types of the union are also subtypes of `X`, which on the surface seems OK to me.
>
> Specific to this case, can we say that `Template | Redirect` is a subtype of `Response` because both `Template` and `Redirect` _are_ subtypes of `Response`.
>
> This is relevant because:
>
> https://github.com/litestar-org/litestar/blob/f27b33c2142d64c3ec660b2f5f194803100550ae/litestar/handlers/http_handlers/base.py#L424-L427
>
> At the moment, `return_type.is_subclass_of(Response)` is returning `False` - however, if that is allowed to return `True` than this "just works" because both `Template` and `Redirect` are serviced by the same response handler function.
>
> It was a deliberate choice at the time to have any union type return `False` from `ParsedType.is_subclass_of()` with the intention of waiting for some real world cases to arrive - now we have one.
>
> @litestar-org/maintainers WDT? Is `Union[SubclassOfX1, SubclassOfX2]` a subtype of `X`?
Sure
I agree. Do you want to submit a PR? | 2023-05-18T07:28:23 |
litestar-org/litestar | 1,694 | litestar-org__litestar-1694 | [
"4321",
"1234"
] | 54b549e99ad34dc859b992b87ed6f5d9d69d5a2b | diff --git a/litestar/contrib/sqlalchemy/repository.py b/litestar/contrib/sqlalchemy/repository.py
--- a/litestar/contrib/sqlalchemy/repository.py
+++ b/litestar/contrib/sqlalchemy/repository.py
@@ -345,7 +345,7 @@ async def update_many(self, data: list[ModelT]) -> list[ModelT]:
"""
data_to_update: list[dict[str, Any]] = [v.to_dict() if isinstance(v, self.model_type) else v for v in data] # type: ignore
with wrap_sqlalchemy_exception():
- if self.session.bind.dialect.update_executemany_returning:
+ if self.session.bind.dialect.update_executemany_returning and self.session.bind.dialect.name != "oracle":
instances = list(
await self.session.scalars( # type: ignore
update(self.model_type).returning(self.model_type),
@@ -831,7 +831,11 @@ def update_many(self, data: list[ModelT]) -> list[ModelT]:
"""
data_to_update: list[dict[str, Any]] = [v.to_dict() if isinstance(v, self.model_type) else v for v in data] # type: ignore
with wrap_sqlalchemy_exception():
- if self.session.bind and self.session.bind.dialect.update_executemany_returning:
+ if (
+ self.session.bind
+ and self.session.bind.dialect.update_executemany_returning
+ and self.session.bind.dialect.name != "oracle"
+ ):
instances = list(
self.session.scalars(
update(self.model_type).returning(self.model_type),
diff --git a/litestar/contrib/sqlalchemy/types.py b/litestar/contrib/sqlalchemy/types.py
--- a/litestar/contrib/sqlalchemy/types.py
+++ b/litestar/contrib/sqlalchemy/types.py
@@ -3,6 +3,8 @@
import uuid
from typing import TYPE_CHECKING, Any, cast
+from sqlalchemy.dialects.oracle import BLOB as ORA_BLOB
+from sqlalchemy.dialects.oracle import RAW as ORA_RAW
from sqlalchemy.dialects.postgresql import JSONB as PG_JSONB
from sqlalchemy.dialects.postgresql import UUID as PG_UUID
from sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, TypeDecorator
@@ -24,7 +26,7 @@
class GUID(TypeDecorator):
"""Platform-independent GUID type.
- Uses PostgreSQL's UUID type, otherwise uses
+ Uses PostgreSQL's UUID type, Oracle's RAW(16) type, otherwise uses
BINARY(16) or CHAR(32), storing as stringified hex values.
Will accept stringified UUIDs as a hexstring or an actual UUID
@@ -41,6 +43,8 @@ def __init__(self, *args: Any, binary: bool = True, **kwargs: Any) -> None:
def load_dialect_impl(self, dialect: Dialect) -> Any:
if dialect.name == "postgresql":
return dialect.type_descriptor(PG_UUID())
+ if dialect.name == "oracle":
+ return dialect.type_descriptor(ORA_RAW(16))
if self.binary:
return dialect.type_descriptor(BINARY(16))
return dialect.type_descriptor(CHAR(32))
@@ -53,6 +57,8 @@ def process_bind_param(self, value: bytes | str | uuid.UUID | None, dialect: Dia
value = self.to_uuid(value)
if value is None:
return value
+ if dialect.name == "oracle":
+ return value.bytes
return value.bytes if self.binary else value.hex
def process_result_value(self, value: bytes | str | uuid.UUID | None, dialect: Dialect) -> uuid.UUID | None:
@@ -75,8 +81,8 @@ def to_uuid(value: Any) -> uuid.UUID | None:
return cast("uuid.UUID | None", value)
-JSON = _JSON().with_variant(PG_JSONB, "postgresql")
+JSON = _JSON().with_variant(PG_JSONB, "postgresql").with_variant(ORA_BLOB, "oracle")
"""Platform-independent JSON type.
- Uses JSONB type for postgres, otherwise uses the generic JSON data type.
+ Uses JSONB type for postgres, BLOB for Oracle, otherwise uses the generic JSON data type.
"""
| diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -23,13 +23,16 @@
cast,
)
+import anyio
import asyncmy
import asyncpg
+import oracledb
import pytest
from _pytest.fixtures import FixtureRequest
from _pytest.nodes import Item
from fakeredis.aioredis import FakeRedis
from freezegun import freeze_time
+from oracledb.exceptions import DatabaseError, OperationalError
from pytest_docker.plugin import Services
from pytest_lazyfixture import lazy_fixture
from redis.asyncio import Redis as AsyncRedis
@@ -50,6 +53,7 @@
from litestar.stores.memory import MemoryStore
from litestar.stores.redis import RedisStore
from litestar.testing import RequestFactory
+from litestar.utils.sync import AsyncCallable
if TYPE_CHECKING:
from types import ModuleType
@@ -479,6 +483,45 @@ async def postgres_service(docker_ip: str, docker_services: Services) -> None:
await wait_until_responsive(timeout=30.0, pause=0.1, check=postgres_responsive, host=docker_ip)
+def oracle_responsive(host: str) -> bool:
+ """
+ Args:
+ host: docker IP address.
+
+ Returns:
+ Boolean indicating if we can connect to the database.
+ """
+
+ try:
+ conn = oracledb.connect(
+ host=host,
+ port=1512,
+ user="app",
+ service_name="xepdb1",
+ password="super-secret",
+ )
+ with conn.cursor() as cursor:
+ cursor.execute("SELECT 1 FROM dual")
+ resp = cursor.fetchone()
+ return bool(resp[0] == 1)
+ except (OperationalError, DatabaseError): # pyright: ignore
+ return False
+
+
+@docker_service_registry.register("oracle")
+async def oracle_service(docker_ip: str, docker_services: Services) -> None: # pylint: disable=unused-argument
+ """Starts containers for required services, fixture waits until they are
+ responsive before returning.
+
+ Args:
+ docker_ip:
+ docker_services:
+ """
+ # oracle takes a while to mount and open initially.
+ await anyio.sleep(20)
+ await wait_until_responsive(timeout=30.0, pause=0.1, check=AsyncCallable(oracle_responsive), host=docker_ip)
+
+
# the monkeypatch fixture does not work with session scoped dependencies
@pytest.fixture(autouse=True, scope="session")
def disable_warn_implicit_sync_to_thread() -> Generator[None, None, None]:
diff --git a/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_tests.py b/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_tests.py
--- a/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_tests.py
+++ b/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_tests.py
@@ -43,9 +43,12 @@ def seed_db(
with engine.begin() as conn:
base.orm_registry.metadata.drop_all(conn)
base.orm_registry.metadata.create_all(conn)
- conn.execute(insert(Author).values(raw_authors))
- conn.execute(insert(Ingredient).values(raw_ingredients))
- conn.execute(insert(Store).values(raw_stores))
+ for author in raw_authors:
+ conn.execute(insert(Author).values(author))
+ for ingredient in raw_ingredients:
+ conn.execute(insert(Ingredient).values(ingredient))
+ for store in raw_stores:
+ conn.execute(insert(Store).values(store))
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_oracledb.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_oracledb.py
new file mode 100644
--- /dev/null
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_oracledb.py
@@ -0,0 +1,306 @@
+"""Unit tests for the SQLAlchemy Repository implementation for psycopg."""
+from __future__ import annotations
+
+import platform
+import sys
+from typing import Any, Generator
+
+import pytest
+from sqlalchemy import Engine, NullPool, create_engine
+from sqlalchemy.orm import Session, sessionmaker
+
+from tests.contrib.sqlalchemy.models import (
+ AuthorSyncRepository,
+ BookSyncRepository,
+ IngredientSyncRepository,
+ StoreSyncRepository,
+)
+from tests.contrib.sqlalchemy.repository import sqlalchemy_sync_tests as st
+
+pytestmark = [
+ pytest.mark.skipif(sys.platform != "linux", reason="docker not available on this platform"),
+ pytest.mark.skipif(platform.uname()[4] != "x86_64", reason="oracle not available on this platform"),
+ pytest.mark.usefixtures("oracle_service"),
+]
+
+
[email protected]_oracledb
[email protected](name="engine")
+def fx_engine(docker_ip: str) -> Engine:
+ """Postgresql instance for end-to-end testing.
+
+ Args:
+ docker_ip: IP address for TCP connection to Docker containers.
+
+ Returns:
+ Async SQLAlchemy engine instance.
+ """
+ return create_engine(
+ "oracle+oracledb://:@",
+ thick_mode=False,
+ connect_args={
+ "user": "app",
+ "password": "super-secret",
+ "host": docker_ip,
+ "port": 1512,
+ "service_name": "xepdb1",
+ "encoding": "UTF-8",
+ "nencoding": "UTF-8",
+ },
+ echo=True,
+ poolclass=NullPool,
+ )
+
+
[email protected]_oracledb
[email protected](
+ name="session",
+)
+def fx_session(
+ engine: Engine,
+ raw_authors: list[dict[str, Any]],
+ raw_books: list[dict[str, Any]],
+ raw_stores: list[dict[str, Any]],
+ raw_ingredients: list[dict[str, Any]],
+) -> Generator[Session, None, None]:
+ session = sessionmaker(bind=engine)()
+ st.seed_db(engine, raw_authors, raw_books, raw_stores, raw_ingredients)
+ try:
+ yield session
+ finally:
+ session.rollback()
+ session.close()
+
+
[email protected]_oracledb
[email protected](name="author_repo")
+def fx_author_repo(session: Session) -> AuthorSyncRepository:
+ return AuthorSyncRepository(session=session)
+
+
[email protected]_oracledb
[email protected](name="book_repo")
+def fx_book_repo(session: Session) -> BookSyncRepository:
+ return BookSyncRepository(session=session)
+
+
[email protected](name="store_repo")
+def fx_store_repo(session: Session) -> StoreSyncRepository:
+ return StoreSyncRepository(session=session)
+
+
[email protected](name="ingredient_repo")
+def fx_ingredient_repo(session: Session) -> IngredientSyncRepository:
+ return IngredientSyncRepository(session=session)
+
+
[email protected]_oracledb
+def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy filter by kwargs with invalid column name.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_filter_by_kwargs_with_incorrect_attribute_name(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_count_method(author_repo: AuthorSyncRepository, store_repo: StoreSyncRepository) -> None:
+ """Test SQLALchemy count.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_count_method(author_repo=author_repo, store_repo=store_repo)
+
+
[email protected]_oracledb
+def test_repo_list_and_count_method(
+ raw_authors: list[dict[str, Any]],
+ author_repo: AuthorSyncRepository,
+ raw_stores: list[dict[str, Any]],
+ store_repo: StoreSyncRepository,
+) -> None:
+ """Test SQLALchemy list with count.
+
+ Args:
+ raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
+ author_repo (AuthorRepository): The author mock repository
+ raw_stores (list[dict[str, Any]]): list of stores pre-seeded into the mock repository
+ store_repo (StoreRepository): The store mock repository
+ """
+ st.test_repo_list_and_count_method(
+ raw_authors=raw_authors, author_repo=author_repo, raw_stores=raw_stores, store_repo=store_repo
+ )
+
+
[email protected]_oracledb
+def test_repo_list_and_count_method_empty(book_repo: BookSyncRepository) -> None:
+ """Test SQLALchemy list with count.
+
+ Args:
+ raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
+ author_repo (AuthorRepository): The author mock repository
+ """
+
+ st.test_repo_list_and_count_method_empty(book_repo=book_repo)
+
+
[email protected]_oracledb
+def test_repo_list_method(
+ raw_authors: list[dict[str, Any]],
+ author_repo: AuthorSyncRepository,
+ raw_stores: list[dict[str, Any]],
+ store_repo: StoreSyncRepository,
+) -> None:
+ """Test SQLALchemy list.
+
+ Args:
+ raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
+ author_repo (AuthorRepository): The author mock repository
+ raw_stores (list[dict[str, Any]]): list of stores pre-seeded into the mock repository
+ store_repo (StoreRepository): The store mock repository
+ """
+ st.test_repo_list_method(
+ raw_authors=raw_authors, author_repo=author_repo, raw_stores=raw_stores, store_repo=store_repo
+ )
+
+
[email protected]_oracledb
+def test_repo_add_method(
+ raw_authors: list[dict[str, Any]],
+ author_repo: AuthorSyncRepository,
+ raw_stores: list[dict[str, Any]],
+ store_repo: StoreSyncRepository,
+) -> None:
+ """Test SQLALchemy list.
+
+ Args:
+ raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
+ author_repo (AuthorRepository): The author mock repository
+ raw_stores (list[dict[str, Any]]): list of stores pre-seeded into the mock repository
+ store_repo (StoreRepository): The store mock repository
+ """
+ st.test_repo_add_method(
+ raw_authors=raw_authors, author_repo=author_repo, raw_stores=raw_stores, store_repo=store_repo
+ )
+
+
[email protected]_oracledb
+def test_repo_add_many_method(raw_authors: list[dict[str, Any]], author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Add Many.
+
+ Args:
+ raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_add_many_method(raw_authors=raw_authors, author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_update_many_method(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Update Many.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_update_many_method(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_exists_method(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy exists.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_exists_method(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_update_method(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Update.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_update_method(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_delete_method(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy delete.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_delete_method(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_delete_many_method(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy delete many.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_delete_many_method(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_get_method(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Get.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_get_method(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_get_one_or_none_method(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Get One.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_get_one_or_none_method(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_get_one_method(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Get One.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_get_one_method(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_get_or_create_method(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Get or create.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_get_or_create_method(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_get_or_create_match_filter(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Get or create with a match filter
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_get_or_create_match_filter(author_repo=author_repo)
+
+
+def test_repo_upsert_method(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy upsert.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_upsert_method(author_repo=author_repo)
diff --git a/tests/docker-compose.yml b/tests/docker-compose.yml
--- a/tests/docker-compose.yml
+++ b/tests/docker-compose.yml
@@ -18,6 +18,14 @@ services:
MYSQL_DATABASE: db
MYSQL_ROOT_HOST: "%"
LANG: C.UTF-8
+ oracle:
+ image: gvenzl/oracle-xe:latest-faststart
+ ports:
+ - "1512:1521" # use a non-standard port here
+ environment:
+ ORACLE_PASSWORD: super-secret
+ APP_USER_PASSWORD: super-secret
+ APP_USER: app
redis:
image: redis
ports:
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-19T16:33:05 |
litestar-org/litestar | 1,695 | litestar-org__litestar-1695 | [
"4321",
"1234"
] | 1c29f098e1d53966d9949bb873c56877b3cfa39f | diff --git a/litestar/contrib/jwt/jwt_token.py b/litestar/contrib/jwt/jwt_token.py
--- a/litestar/contrib/jwt/jwt_token.py
+++ b/litestar/contrib/jwt/jwt_token.py
@@ -1,13 +1,18 @@
from __future__ import annotations
+import dataclasses
from dataclasses import asdict, dataclass, field
from datetime import datetime, timezone
-from typing import cast
+from typing import TYPE_CHECKING, Any, cast
from jose import JWSError, JWTError, jwt
from litestar.exceptions import ImproperlyConfiguredException, NotAuthorizedException
+if TYPE_CHECKING:
+ from typing_extensions import Self
+
+
__all__ = ("Token",)
@@ -42,6 +47,8 @@ class Token:
"""Audience - intended audience."""
jti: str | None = field(default=None)
"""JWT ID - a unique identifier of the JWT between different issuers."""
+ extras: dict[str, Any] = field(default_factory=dict)
+ """Extra fields that were found on the JWT token."""
def __post_init__(self) -> None:
if len(self.sub) < 1:
@@ -63,8 +70,8 @@ def __post_init__(self) -> None:
else:
raise ImproperlyConfiguredException("iat must be a current or past time")
- @staticmethod
- def decode(encoded_token: str, secret: str | dict[str, str], algorithm: str) -> Token:
+ @classmethod
+ def decode(cls, encoded_token: str, secret: str | dict[str, str], algorithm: str) -> Self:
"""Decode a passed in token string and returns a Token instance.
Args:
@@ -82,7 +89,12 @@ def decode(encoded_token: str, secret: str | dict[str, str], algorithm: str) ->
payload = jwt.decode(token=encoded_token, key=secret, algorithms=[algorithm], options={"verify_aud": False})
exp = datetime.fromtimestamp(payload.pop("exp"), tz=timezone.utc)
iat = datetime.fromtimestamp(payload.pop("iat"), tz=timezone.utc)
- return Token(exp=exp, iat=iat, **payload)
+ field_names = {f.name for f in dataclasses.fields(Token)}
+ extra_fields = payload.keys() - field_names
+ extras = payload.pop("extras", {})
+ for key in extra_fields:
+ extras[key] = payload.pop(key)
+ return cls(exp=exp, iat=iat, **payload, extras=extras)
except (KeyError, JWTError, ImproperlyConfiguredException) as e:
raise NotAuthorizedException("Invalid token") from e
| diff --git a/tests/contrib/jwt/test_token.py b/tests/contrib/jwt/test_token.py
--- a/tests/contrib/jwt/test_token.py
+++ b/tests/contrib/jwt/test_token.py
@@ -8,6 +8,7 @@
import pytest
from hypothesis import given
from hypothesis.strategies import datetimes
+from jose import jwt
from litestar.contrib.jwt import Token
from litestar.exceptions import ImproperlyConfiguredException, NotAuthorizedException
@@ -133,3 +134,28 @@ def test_sub_validation() -> None:
iat=(datetime.now() - timedelta(seconds=30)),
exp=(datetime.now() + timedelta(seconds=120)),
)
+
+
+def test_extra_fields() -> None:
+ raw_token = {
+ "sub": secrets.token_hex(),
+ "iat": datetime.now(timezone.utc),
+ "azp": "extra value",
+ "email": "[email protected]",
+ "exp": (datetime.now(timezone.utc) + timedelta(seconds=30)),
+ }
+ token_secret = secrets.token_hex()
+ encoded_token = jwt.encode(claims=raw_token, key=token_secret, algorithm="HS256")
+ token = Token.decode(encoded_token=encoded_token, secret=token_secret, algorithm="HS256")
+ assert "azp" in token.extras
+ assert "email" in token.extras
+
+ raw_token = {
+ "sub": secrets.token_hex(),
+ "iat": datetime.now(timezone.utc),
+ "exp": (datetime.now(timezone.utc) + timedelta(seconds=30)),
+ }
+ token_secret = secrets.token_hex()
+ encoded_token = jwt.encode(claims=raw_token, key=token_secret, algorithm="HS256")
+ token = Token.decode(encoded_token=encoded_token, secret=token_secret, algorithm="HS256")
+ assert token.extras == {}
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-19T23:12:05 |
litestar-org/litestar | 1,702 | litestar-org__litestar-1702 | [
"4321",
"1234",
"1700"
] | 83dbba6fa642f76efb44e47b715c1be0b7b42842 | diff --git a/litestar/contrib/sqlalchemy/repository.py b/litestar/contrib/sqlalchemy/repository.py
--- a/litestar/contrib/sqlalchemy/repository.py
+++ b/litestar/contrib/sqlalchemy/repository.py
@@ -536,7 +536,7 @@ def _filter_on_datetime_field(
if before is not None:
statement = statement.where(field < before)
if after is not None:
- statement = statement.where(field > before)
+ statement = statement.where(field > after)
return statement
def _filter_select_by_kwargs(self, statement: SelectT, **kwargs: Any) -> SelectT:
@@ -1026,7 +1026,7 @@ def _filter_on_datetime_field(
if before is not None:
statement = statement.where(field < before)
if after is not None:
- statement = statement.where(field > before)
+ statement = statement.where(field > after)
return statement
def _filter_select_by_kwargs(self, statement: SelectT, **kwargs: Any) -> SelectT:
| diff --git a/tests/contrib/sqlalchemy/repository/conftest.py b/tests/contrib/sqlalchemy/repository/conftest.py
--- a/tests/contrib/sqlalchemy/repository/conftest.py
+++ b/tests/contrib/sqlalchemy/repository/conftest.py
@@ -59,15 +59,15 @@ def fx_raw_authors() -> list[dict[str, Any]]:
"id": UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"),
"name": "Agatha Christie",
"dob": "1890-09-15",
- "created": "0001-01-01T00:00:00",
- "updated": "0001-01-01T00:00:00",
+ "created": "2023-05-01T00:00:00",
+ "updated": "2023-05-11T00:00:00",
},
{
"id": "5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2",
"name": "Leo Tolstoy",
"dob": "1828-09-09",
- "created": "0001-01-01T00:00:00",
- "updated": "0001-01-01T00:00:00",
+ "created": "2023-03-01T00:00:00",
+ "updated": "2023-05-15T00:00:00",
},
]
diff --git a/tests/contrib/sqlalchemy/repository/sqlalchemy_async_tests.py b/tests/contrib/sqlalchemy/repository/sqlalchemy_async_tests.py
--- a/tests/contrib/sqlalchemy/repository/sqlalchemy_async_tests.py
+++ b/tests/contrib/sqlalchemy/repository/sqlalchemy_async_tests.py
@@ -12,6 +12,7 @@
)
from litestar.contrib.repository.exceptions import RepositoryError
+from litestar.contrib.repository.filters import BeforeAfter, CollectionFilter, OrderBy, SearchFilter
from litestar.contrib.sqlalchemy import base
from tests.contrib.sqlalchemy.models import (
Author,
@@ -55,17 +56,17 @@ def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorAsync
"""Test SQLALchemy filter by kwargs with invalid column name.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
with pytest.raises(RepositoryError):
author_repo.filter_collection_by_kwargs(author_repo.statement, whoops="silly me")
async def test_repo_count_method(author_repo: AuthorAsyncRepository, store_repo: StoreAsyncRepository) -> None:
- """Test SQLALchemy count with asyncpg.
+ """Test SQLALchemy count.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
assert await author_repo.count() == 2
assert await store_repo.count() == 2
@@ -81,7 +82,7 @@ async def test_repo_list_and_count_method(
Args:
raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
raw_stores (list[dict[str, Any]]): list of stores pre-seeded into the mock repository
store_repo (StoreRepository): The store mock repository
"""
@@ -103,7 +104,7 @@ async def test_repo_list_and_count_method_empty(book_repo: BookAsyncRepository)
Args:
raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
collection, count = await book_repo.list_and_count()
@@ -118,11 +119,11 @@ async def test_repo_list_method(
raw_stores: list[dict[str, Any]],
store_repo: StoreAsyncRepository,
) -> None:
- """Test SQLALchemy list with asyncpg.
+ """Test SQLALchemy list.
Args:
raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
raw_stores (list[dict[str, Any]]): list of stores pre-seeded into the mock repository
store_repo (StoreRepository): The store mock repository
"""
@@ -143,11 +144,11 @@ async def test_repo_add_method(
raw_stores: list[dict[str, Any]],
store_repo: StoreAsyncRepository,
) -> None:
- """Test SQLALchemy Add with asyncpg.
+ """Test SQLALchemy Add.
Args:
raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
raw_stores (list[dict[str, Any]]): list of stores pre-seeded into the mock repository
store_repo (StoreRepository): The store mock repository
"""
@@ -172,11 +173,11 @@ async def test_repo_add_method(
async def test_repo_add_many_method(raw_authors: list[dict[str, Any]], author_repo: AuthorAsyncRepository) -> None:
- """Test SQLALchemy Add Many with asyncpg.
+ """Test SQLALchemy Add Many.
Args:
raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
exp_count = len(raw_authors) + 2
objs = await author_repo.add_many(
@@ -192,10 +193,10 @@ async def test_repo_add_many_method(raw_authors: list[dict[str, Any]], author_re
async def test_repo_update_many_method(author_repo: AuthorAsyncRepository) -> None:
- """Test SQLALchemy Update Many with asyncpg.
+ """Test SQLALchemy Update Many.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
objs = await author_repo.list()
for idx, obj in enumerate(objs):
@@ -206,20 +207,20 @@ async def test_repo_update_many_method(author_repo: AuthorAsyncRepository) -> No
async def test_repo_exists_method(author_repo: AuthorAsyncRepository) -> None:
- """Test SQLALchemy exists with asyncpg.
+ """Test SQLALchemy exists.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
exists = await author_repo.exists(id=UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
assert exists
async def test_repo_update_method(author_repo: AuthorAsyncRepository) -> None:
- """Test SQLALchemy Update with asyncpg.
+ """Test SQLALchemy Update.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
obj = await author_repo.get(UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
obj.name = "Updated Name"
@@ -228,20 +229,20 @@ async def test_repo_update_method(author_repo: AuthorAsyncRepository) -> None:
async def test_repo_delete_method(author_repo: AuthorAsyncRepository) -> None:
- """Test SQLALchemy delete with asyncpg.
+ """Test SQLALchemy delete.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
obj = await author_repo.delete(UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
assert obj.id == UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b")
async def test_repo_delete_many_method(author_repo: AuthorAsyncRepository) -> None:
- """Test SQLALchemy delete many with asyncpg.
+ """Test SQLALchemy delete many.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
data_to_insert = []
for chunk in range(0, 1000):
@@ -262,20 +263,20 @@ async def test_repo_delete_many_method(author_repo: AuthorAsyncRepository) -> No
async def test_repo_get_method(author_repo: AuthorAsyncRepository) -> None:
- """Test SQLALchemy Get with asyncpg.
+ """Test SQLALchemy Get.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
obj = await author_repo.get(UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
assert obj.name == "Agatha Christie"
async def test_repo_get_one_or_none_method(author_repo: AuthorAsyncRepository) -> None:
- """Test SQLALchemy Get One with asyncpg.
+ """Test SQLALchemy Get One.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
obj = await author_repo.get_one_or_none(id=UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
assert obj is not None
@@ -285,10 +286,10 @@ async def test_repo_get_one_or_none_method(author_repo: AuthorAsyncRepository) -
async def test_repo_get_one_method(author_repo: AuthorAsyncRepository) -> None:
- """Test SQLALchemy Get One with asyncpg.
+ """Test SQLALchemy Get One.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
obj = await author_repo.get_one(id=UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
assert obj is not None
@@ -298,10 +299,10 @@ async def test_repo_get_one_method(author_repo: AuthorAsyncRepository) -> None:
async def test_repo_get_or_create_method(author_repo: AuthorAsyncRepository) -> None:
- """Test SQLALchemy Get or create with asyncpg.
+ """Test SQLALchemy Get or create.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
existing_obj, existing_created = await author_repo.get_or_create(name="Agatha Christie")
assert existing_obj.id == UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b")
@@ -316,7 +317,7 @@ async def test_repo_get_or_create_match_filter(author_repo: AuthorAsyncRepositor
"""Test SQLALchemy Get or create with a match filter
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
now = datetime.now()
existing_obj, existing_created = await author_repo.get_or_create(
@@ -328,10 +329,10 @@ async def test_repo_get_or_create_match_filter(author_repo: AuthorAsyncRepositor
async def test_repo_upsert_method(author_repo: AuthorAsyncRepository) -> None:
- """Test SQLALchemy upsert with asyncpg.
+ """Test SQLALchemy upsert.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorAsyncRepository): The author mock repository
"""
existing_obj = await author_repo.get_one(name="Agatha Christie")
existing_obj.name = "Agatha C."
@@ -347,3 +348,74 @@ async def test_repo_upsert_method(author_repo: AuthorAsyncRepository) -> None:
upsert2_insert_obj = await author_repo.upsert(Author(id=uuid4(), name="Another Author"))
assert upsert2_insert_obj.id is not None
assert upsert2_insert_obj.name == "Another Author"
+
+
+async def test_repo_filter_before_after(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy before after filter.
+
+ Args:
+ author_repo (AuthorAsyncRepository): The author mock repository
+ """
+ before_filter = BeforeAfter(
+ field_name="created", before=datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), after=None
+ )
+ existing_obj = await author_repo.list(before_filter)
+ assert existing_obj[0].name == "Leo Tolstoy"
+
+ after_filter = BeforeAfter(
+ field_name="created", after=datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), before=None
+ )
+ existing_obj = await author_repo.list(after_filter)
+ assert existing_obj[0].name == "Agatha Christie"
+
+
+async def test_repo_filter_search(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy search filter.
+
+ Args:
+ author_repo (AuthorAsyncRepository): The author mock repository
+ """
+
+ existing_obj = await author_repo.list(SearchFilter(field_name="name", value="gath", ignore_case=False))
+ assert existing_obj[0].name == "Agatha Christie"
+ existing_obj = await author_repo.list(SearchFilter(field_name="name", value="GATH", ignore_case=False))
+ # sqlite & mysql are case insensitive by default with a `LIKE`
+ dialect = author_repo.session.bind.dialect.name if author_repo.session.bind else "default"
+ if dialect in {"sqlite", "mysql"}:
+ expected_objs = 1
+ else:
+ expected_objs = 0
+ assert len(existing_obj) == expected_objs
+ existing_obj = await author_repo.list(SearchFilter(field_name="name", value="GATH", ignore_case=True))
+ assert existing_obj[0].name == "Agatha Christie"
+
+
+async def test_repo_filter_order_by(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy order by filter.
+
+ Args:
+ author_repo (AuthorAsyncRepository): The author mock repository
+ """
+
+ existing_obj = await author_repo.list(OrderBy(field_name="created", sort_order="desc"))
+ assert existing_obj[0].name == "Agatha Christie"
+ existing_obj = await author_repo.list(OrderBy(field_name="created", sort_order="asc"))
+ assert existing_obj[0].name == "Leo Tolstoy"
+
+
+async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy collection filter.
+
+ Args:
+ author_repo (AuthorAsyncRepository): The author mock repository
+ """
+
+ existing_obj = await author_repo.list(
+ CollectionFilter(field_name="id", values=[UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b")])
+ )
+ assert existing_obj[0].name == "Agatha Christie"
+
+ existing_obj = await author_repo.list(
+ CollectionFilter(field_name="id", values=[UUID("5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2")])
+ )
+ assert existing_obj[0].name == "Leo Tolstoy"
diff --git a/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_tests.py b/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_tests.py
--- a/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_tests.py
+++ b/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_tests.py
@@ -9,6 +9,7 @@
from sqlalchemy import Engine, insert
from litestar.contrib.repository.exceptions import RepositoryError
+from litestar.contrib.repository.filters import BeforeAfter, CollectionFilter, OrderBy, SearchFilter
from litestar.contrib.sqlalchemy import base
from tests.contrib.sqlalchemy.models import (
Author,
@@ -55,17 +56,17 @@ def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncR
"""Test SQLALchemy filter by kwargs with invalid column name.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
with pytest.raises(RepositoryError):
author_repo.filter_collection_by_kwargs(author_repo.statement, whoops="silly me")
def test_repo_count_method(author_repo: AuthorSyncRepository, store_repo: StoreSyncRepository) -> None:
- """Test SQLALchemy count with asyncpg.
+ """Test SQLALchemy count.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
assert author_repo.count() == 2
assert store_repo.count() == 2
@@ -81,7 +82,7 @@ def test_repo_list_and_count_method(
Args:
raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
raw_stores (list[dict[str, Any]]): list of stores pre-seeded into the mock repository
store_repo (StoreRepository): The store mock repository
"""
@@ -103,7 +104,7 @@ def test_repo_list_and_count_method_empty(book_repo: BookSyncRepository) -> None
Args:
raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
collection, count = book_repo.list_and_count()
@@ -118,11 +119,11 @@ def test_repo_list_method(
raw_stores: list[dict[str, Any]],
store_repo: StoreSyncRepository,
) -> None:
- """Test SQLALchemy list with asyncpg.
+ """Test SQLALchemy list.
Args:
raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
raw_stores (list[dict[str, Any]]): list of stores pre-seeded into the mock repository
store_repo (StoreRepository): The store mock repository
"""
@@ -143,11 +144,11 @@ def test_repo_add_method(
raw_stores: list[dict[str, Any]],
store_repo: StoreSyncRepository,
) -> None:
- """Test SQLALchemy Add with asyncpg.
+ """Test SQLALchemy Add.
Args:
raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
raw_stores (list[dict[str, Any]]): list of stores pre-seeded into the mock repository
store_repo (StoreRepository): The store mock repository
"""
@@ -172,11 +173,11 @@ def test_repo_add_method(
def test_repo_add_many_method(raw_authors: list[dict[str, Any]], author_repo: AuthorSyncRepository) -> None:
- """Test SQLALchemy Add Many with asyncpg.
+ """Test SQLALchemy Add Many.
Args:
raw_authors (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
exp_count = len(raw_authors) + 2
objs = author_repo.add_many([Author(name="Testing 2", dob=datetime.now()), Author(name="Cody", dob=datetime.now())])
@@ -190,10 +191,10 @@ def test_repo_add_many_method(raw_authors: list[dict[str, Any]], author_repo: Au
def test_repo_update_many_method(author_repo: AuthorSyncRepository) -> None:
- """Test SQLALchemy Update Many with asyncpg.
+ """Test SQLALchemy Update Many.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
objs = author_repo.list()
for idx, obj in enumerate(objs):
@@ -204,20 +205,20 @@ def test_repo_update_many_method(author_repo: AuthorSyncRepository) -> None:
def test_repo_exists_method(author_repo: AuthorSyncRepository) -> None:
- """Test SQLALchemy exists with asyncpg.
+ """Test SQLALchemy exists.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
exists = author_repo.exists(id=UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
assert exists
def test_repo_update_method(author_repo: AuthorSyncRepository) -> None:
- """Test SQLALchemy Update with asyncpg.
+ """Test SQLALchemy Update.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
obj = author_repo.get(UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
obj.name = "Updated Name"
@@ -226,20 +227,20 @@ def test_repo_update_method(author_repo: AuthorSyncRepository) -> None:
def test_repo_delete_method(author_repo: AuthorSyncRepository) -> None:
- """Test SQLALchemy delete with asyncpg.
+ """Test SQLALchemy delete.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
obj = author_repo.delete(UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
assert obj.id == UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b")
def test_repo_delete_many_method(author_repo: AuthorSyncRepository) -> None:
- """Test SQLALchemy delete many with asyncpg.
+ """Test SQLALchemy delete many.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
data_to_insert = []
for chunk in range(0, 1000):
@@ -260,20 +261,20 @@ def test_repo_delete_many_method(author_repo: AuthorSyncRepository) -> None:
def test_repo_get_method(author_repo: AuthorSyncRepository) -> None:
- """Test SQLALchemy Get with asyncpg.
+ """Test SQLALchemy Get.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
obj = author_repo.get(UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
assert obj.name == "Agatha Christie"
def test_repo_get_one_or_none_method(author_repo: AuthorSyncRepository) -> None:
- """Test SQLALchemy Get One with asyncpg.
+ """Test SQLALchemy Get One.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
obj = author_repo.get_one_or_none(id=UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
assert obj is not None
@@ -283,10 +284,10 @@ def test_repo_get_one_or_none_method(author_repo: AuthorSyncRepository) -> None:
def test_repo_get_one_method(author_repo: AuthorSyncRepository) -> None:
- """Test SQLALchemy Get One with asyncpg.
+ """Test SQLALchemy Get One.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
obj = author_repo.get_one(id=UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"))
assert obj is not None
@@ -296,10 +297,10 @@ def test_repo_get_one_method(author_repo: AuthorSyncRepository) -> None:
def test_repo_get_or_create_method(author_repo: AuthorSyncRepository) -> None:
- """Test SQLALchemy Get or create with asyncpg.
+ """Test SQLALchemy Get or create.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
existing_obj, existing_created = author_repo.get_or_create(name="Agatha Christie")
assert existing_obj.id == UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b")
@@ -314,7 +315,7 @@ def test_repo_get_or_create_match_filter(author_repo: AuthorSyncRepository) -> N
"""Test SQLALchemy Get or create with a match filter
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
now = datetime.now()
existing_obj, existing_created = author_repo.get_or_create(match_fields="name", name="Agatha Christie", dob=now)
@@ -324,10 +325,10 @@ def test_repo_get_or_create_match_filter(author_repo: AuthorSyncRepository) -> N
def test_repo_upsert_method(author_repo: AuthorSyncRepository) -> None:
- """Test SQLALchemy upsert with asyncpg.
+ """Test SQLALchemy upsert.
Args:
- author_repo (AuthorRepository): The author mock repository
+ author_repo (AuthorSyncRepository): The author mock repository
"""
existing_obj = author_repo.get_one(name="Agatha Christie")
existing_obj.name = "Agatha C."
@@ -343,3 +344,74 @@ def test_repo_upsert_method(author_repo: AuthorSyncRepository) -> None:
upsert2_insert_obj = author_repo.upsert(Author(id=uuid4(), name="Another Author"))
assert upsert2_insert_obj.id is not None
assert upsert2_insert_obj.name == "Another Author"
+
+
+def test_repo_filter_before_after(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy upsert.
+
+ Args:
+ author_repo (AuthorSyncRepository): The author mock repository
+ """
+ before_filter = BeforeAfter(
+ field_name="created", before=datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), after=None
+ )
+ existing_obj = author_repo.list(before_filter)
+ assert existing_obj[0].name == "Leo Tolstoy"
+
+ after_filter = BeforeAfter(
+ field_name="created", after=datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), before=None
+ )
+ existing_obj = author_repo.list(after_filter)
+ assert existing_obj[0].name == "Agatha Christie"
+
+
+def test_repo_filter_search(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy upsert.
+
+ Args:
+ author_repo (AuthorSyncRepository): The author mock repository
+ """
+
+ existing_obj = author_repo.list(SearchFilter(field_name="name", value="gath", ignore_case=False))
+ assert existing_obj[0].name == "Agatha Christie"
+ existing_obj = author_repo.list(SearchFilter(field_name="name", value="GATH", ignore_case=False))
+ # sqlite & mysql are case insensitive by default with a `LIKE`
+ dialect = author_repo.session.bind.dialect.name if author_repo.session.bind else "default"
+ if dialect in {"sqlite", "mysql"}:
+ expected_objs = 1
+ else:
+ expected_objs = 0
+ assert len(existing_obj) == expected_objs
+ existing_obj = author_repo.list(SearchFilter(field_name="name", value="GATH", ignore_case=True))
+ assert existing_obj[0].name == "Agatha Christie"
+
+
+def test_repo_filter_order_by(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy upsert.
+
+ Args:
+ author_repo (AuthorSyncRepository): The author mock repository
+ """
+
+ existing_obj = author_repo.list(OrderBy(field_name="created", sort_order="desc"))
+ assert existing_obj[0].name == "Agatha Christie"
+ existing_obj = author_repo.list(OrderBy(field_name="created", sort_order="asc"))
+ assert existing_obj[0].name == "Leo Tolstoy"
+
+
+def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy upsert.
+
+ Args:
+ author_repo (AuthorSyncRepository): The author mock repository
+ """
+
+ existing_obj = author_repo.list(
+ CollectionFilter(field_name="id", values=[UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b")])
+ )
+ assert existing_obj[0].name == "Agatha Christie"
+
+ existing_obj = author_repo.list(
+ CollectionFilter(field_name="id", values=[UUID("5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2")])
+ )
+ assert existing_obj[0].name == "Leo Tolstoy"
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_aiosqlite.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_aiosqlite.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_aiosqlite.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_aiosqlite.py
@@ -296,3 +296,43 @@ async def test_repo_upsert_method(author_repo: AuthorAsyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_upsert_method(author_repo=author_repo)
+
+
[email protected]_aiosqlite
+async def test_repo_filter_before_after(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy BeforeAfter filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_before_after(author_repo=author_repo)
+
+
[email protected]_aiosqlite
+async def test_repo_filter_search(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Search filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_search(author_repo=author_repo)
+
+
[email protected]_aiosqlite
+async def test_repo_filter_order_by(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Order By filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_order_by(author_repo=author_repo)
+
+
[email protected]_aiosqlite
+async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_collection(author_repo=author_repo)
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_asyncmy.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_asyncmy.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_asyncmy.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_asyncmy.py
@@ -307,3 +307,43 @@ async def test_repo_upsert_method(author_repo: AuthorAsyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_upsert_method(author_repo=author_repo)
+
+
[email protected]_asyncmy
+async def test_repo_filter_before_after(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy BeforeAfter filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_before_after(author_repo=author_repo)
+
+
[email protected]_asyncmy
+async def test_repo_filter_search(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Search filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_search(author_repo=author_repo)
+
+
[email protected]_asyncmy
+async def test_repo_filter_order_by(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Order By filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_order_by(author_repo=author_repo)
+
+
[email protected]_asyncmy
+async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_collection(author_repo=author_repo)
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_asyncpg.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_asyncpg.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_asyncpg.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_asyncpg.py
@@ -307,3 +307,43 @@ async def test_repo_upsert_method(author_repo: AuthorAsyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_upsert_method(author_repo=author_repo)
+
+
[email protected]_asyncpg
+async def test_repo_filter_before_after(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy BeforeAfter filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_before_after(author_repo=author_repo)
+
+
[email protected]_asyncpg
+async def test_repo_filter_search(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Search filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_search(author_repo=author_repo)
+
+
[email protected]_asyncpg
+async def test_repo_filter_order_by(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Order By filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_order_by(author_repo=author_repo)
+
+
[email protected]_asyncpg
+async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_collection(author_repo=author_repo)
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_oracledb.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_oracledb.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_oracledb.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_oracledb.py
@@ -304,3 +304,43 @@ def test_repo_upsert_method(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_upsert_method(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_filter_before_after(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy BeforeAfter filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_before_after(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_filter_search(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Search filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_search(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_filter_order_by(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Order By filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_order_by(author_repo=author_repo)
+
+
[email protected]_oracledb
+def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_collection(author_repo=author_repo)
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_psycopg_async.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_psycopg_async.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_psycopg_async.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_psycopg_async.py
@@ -307,3 +307,43 @@ async def test_repo_upsert_method(author_repo: AuthorAsyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_upsert_method(author_repo=author_repo)
+
+
[email protected]_psycopg_async
+async def test_repo_filter_before_after(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy BeforeAfter filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_before_after(author_repo=author_repo)
+
+
[email protected]_psycopg_async
+async def test_repo_filter_search(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Search filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_search(author_repo=author_repo)
+
+
[email protected]_psycopg_async
+async def test_repo_filter_order_by(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Order By filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_order_by(author_repo=author_repo)
+
+
[email protected]_psycopg_async
+async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ await st.test_repo_filter_collection(author_repo=author_repo)
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_psycopg_sync.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_psycopg_sync.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_psycopg_sync.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_psycopg_sync.py
@@ -302,3 +302,43 @@ def test_repo_upsert_method(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_upsert_method(author_repo=author_repo)
+
+
[email protected]_psycopg_sync
+def test_repo_filter_before_after(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy BeforeAfter filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_before_after(author_repo=author_repo)
+
+
[email protected]_psycopg_sync
+def test_repo_filter_search(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Search filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_search(author_repo=author_repo)
+
+
[email protected]_psycopg_sync
+def test_repo_filter_order_by(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Order By filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_order_by(author_repo=author_repo)
+
+
[email protected]_psycopg_sync
+def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_collection(author_repo=author_repo)
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sqlite.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sqlite.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sqlite.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sqlite.py
@@ -301,3 +301,43 @@ def test_repo_upsert_method(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_upsert_method(author_repo=author_repo)
+
+
[email protected]_sqlite
+def test_repo_filter_before_after(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy BeforeAfter filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_before_after(author_repo=author_repo)
+
+
[email protected]_sqlite
+def test_repo_filter_search(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Search filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_search(author_repo=author_repo)
+
+
[email protected]_sqlite
+def test_repo_filter_order_by(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Order By filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_order_by(author_repo=author_repo)
+
+
[email protected]_sqlite
+def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ author_repo (AuthorRepository): The author mock repository
+ """
+ st.test_repo_filter_collection(author_repo=author_repo)
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
Bug: BeforeAfter does not filter against the after value when using SQLAlchemyAsyncRepository
### Description
When trying to use the BeforeAfter filter using the SQLAlchemyAsyncRepository, i noticed my filtering was returning incorrect results, I looked in the code in SQLAlchemyAsyncRepository and notice the filtering function does not actually use the `after` value for the filtering
```
def _filter_on_datetime_field(
self, field_name: str, before: datetime | None, after: datetime | None, statement: SelectT
) -> SelectT:
field = getattr(self.model_type, field_name)
if before is not None:
statement = statement.where(field < before)
if after is not None:
statement = statement.where(field > before).
return statement
```
shouldn't the after condition be?
```
if after is not None:
statement = statement.where(field > after).
```
As a side note, can this filter's types be expanded to include ordinal types as well as inclusive values (e.g. >= and <=)
### URL to code causing the issue
### MCVE
```python
from datetime import datetime, timedelta
from litestar import Litestar, get
from litestar.contrib.sqlalchemy.dto import SQLAlchemyDTO
from litestar.contrib.repository.filters import BeforeAfter
from litestar.contrib.sqlalchemy.repository import SQLAlchemyAsyncRepository
from sqlalchemy.orm import DeclarativeBase, Mapped
from sqlalchemy.ext.asyncio import AsyncSession
from litestar.di import Provide
class User(DeclarativeBase):
created_at: Mapped[datetime]
UserDTO = SQLAlchemyDTO[User]
class UserRepository(SQLAlchemyAsyncRepository[UserDTO]):
model_type = User
@get("/users", dto=UserDTO)
def create_user(user_repo: UserRepository) -> User:
now = datetime.now()
return user_repo.list(
BeforeAfter("created_at", now - timedelta(hours=3), now - timedelta(hours=1))
async def provide_repo(db_session: AsyncSession) -> UserRepository:
return UserRepository(session=db_session)
app = Litestar(route_handlers=[create_user], dependencies={"user_repo": Provide(provide_repo)})
```
### Steps to reproduce
### Screenshots
### Logs
### Litestar Version
2.0.0alpha7
### Platform
- [ ] Linux
- [X] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| I agree. Do you want to submit a PR?
Thanks for reporting this. I'll take a look today. | 2023-05-20T18:32:13 |
litestar-org/litestar | 1,704 | litestar-org__litestar-1704 | [
"1502",
"1234"
] | c0a98e51ee09efb15f5892e6143d0a6fa5fd7cd9 | diff --git a/litestar/data_extractors.py b/litestar/data_extractors.py
--- a/litestar/data_extractors.py
+++ b/litestar/data_extractors.py
@@ -7,7 +7,14 @@
from litestar.datastructures.upload_file import UploadFile
from litestar.enums import HttpMethod, RequestEncodingType
-__all__ = ("ConnectionDataExtractor", "ExtractedRequestData", "ExtractedResponseData", "ResponseDataExtractor")
+__all__ = (
+ "ConnectionDataExtractor",
+ "ExtractedRequestData",
+ "ExtractedResponseData",
+ "ResponseDataExtractor",
+ "RequestExtractorField",
+ "ResponseExtractorField",
+)
if TYPE_CHECKING:
diff --git a/litestar/middleware/exceptions/middleware.py b/litestar/middleware/exceptions/middleware.py
--- a/litestar/middleware/exceptions/middleware.py
+++ b/litestar/middleware/exceptions/middleware.py
@@ -15,7 +15,7 @@
from litestar.middleware.exceptions._debug_response import create_debug_response
from litestar.status_codes import HTTP_500_INTERNAL_SERVER_ERROR
-__all__ = ("ExceptionHandlerMiddleware",)
+__all__ = ("ExceptionHandlerMiddleware", "ExceptionResponseContent", "create_exception_response")
if TYPE_CHECKING:
| Bug: Missing declarations from `__all__`
### Description
There are some items missing declarations in their respective `__all__` vars that need fixing.
While there are some listed in the `MCVE` below, there may be more.
### URL to code causing the issue
https://github.com/litestar-org/litestar/
### MCVE
```python
from litestar.middleware.exceptions.middleware import ExceptionResponseContent
from litestar.middleware.exceptions.middleware import create_exception_response
from litestar.serialization import DEFAULT_TYPE_ENCODERS
from litestar.data_extractors import RequestExtractorField
from litestar.data_extractors import ResponseExtractorField
```
### Steps to reproduce
```bash
1. Try to import one of the above items in the MCVE.
2. See your IDE yell at you for missing `__all__` declarations.
3. Contemplate, yet again, going to drive trains for a living.
```
### Screenshots
```bash
N/A
```
### Logs
```bash
N/A
```
### Litestar Version
2.0.0alpha3
### Platform
- [ ] Linux
- [ ] Mac
- [ ] Windows
- [x] Other (Please specify in the description above)
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
|
I agree. Do you want to submit a PR? | 2023-05-21T17:56:25 |
|
litestar-org/litestar | 1,718 | litestar-org__litestar-1718 | [
"1699",
"4321"
] | b1642c0ccd55dfb24c818f84cb7a64ea5c0e316f | diff --git a/litestar/contrib/mako.py b/litestar/contrib/mako.py
--- a/litestar/contrib/mako.py
+++ b/litestar/contrib/mako.py
@@ -70,7 +70,9 @@ def __init__(self, directory: DirectoryPath | list[DirectoryPath]) -> None:
directory: Direct path or list of directory paths from which to serve templates.
"""
super().__init__(directory=directory)
- self.engine = TemplateLookup(directories=directory if isinstance(directory, (list, tuple)) else [directory])
+ self.engine = TemplateLookup(
+ directories=directory if isinstance(directory, (list, tuple)) else [directory], default_filters=["h"]
+ )
self._template_callables: list[tuple[str, Callable[[dict[str, Any]], Any]]] = []
self.register_template_callable(key="url_for_static_asset", template_callable=url_for_static_asset) # type: ignore
self.register_template_callable(key="csrf_token", template_callable=csrf_token) # type: ignore
| diff --git a/tests/template/test_template.py b/tests/template/test_template.py
--- a/tests/template/test_template.py
+++ b/tests/template/test_template.py
@@ -51,6 +51,12 @@ def callback(engine: JinjaTemplateEngine) -> None:
@pytest.mark.parametrize("engine", (JinjaTemplateEngine, MakoTemplateEngine))
def test_engine_instance(engine: Type["TemplateEngineProtocol"], template_dir: "Path") -> None:
engine_instance = engine(template_dir)
+ if isinstance(engine_instance, JinjaTemplateEngine):
+ assert engine_instance.engine.autoescape is True
+
+ if isinstance(engine_instance, MakoTemplateEngine):
+ assert engine_instance.engine.template_args["default_filters"] == ["h"]
+
config = TemplateConfig(engine=engine_instance)
assert config.engine_instance is engine_instance
| Docs: template autoescaping behavior
### Summary
I appreciate this framework having a built-in choice between Jinja and Mako. The documentation however makes no mention of a significant difference in the Litestar behavior between the two -- that using the Jinja engine will autoescape for you, whereas Mako will not.
| Or just make the functionality match and autoescape Mako by default, which is the expected behavior.
The instantiation here:
https://github.com/litestar-org/litestar/blob/4c1c9c486d4f50027faadec35728e5c51edf1808/litestar/contrib/mako.py#L73
Adding the following kwarg:
```python
default_filters=['h']
```
Jinja instantiation:
https://github.com/litestar-org/litestar/blob/4c1c9c486d4f50027faadec35728e5c51edf1808/litestar/contrib/jinja.py#L40
Relevant Mako docs:
https://docs.makotemplates.org/en/latest/filtering.html#filtering-default-filters
| 2023-05-23T04:16:02 |
litestar-org/litestar | 1,719 | litestar-org__litestar-1719 | [
"1691",
"4321"
] | b1642c0ccd55dfb24c818f84cb7a64ea5c0e316f | diff --git a/litestar/channels/plugin.py b/litestar/channels/plugin.py
--- a/litestar/channels/plugin.py
+++ b/litestar/channels/plugin.py
@@ -111,6 +111,7 @@ def on_app_init(self, app_config: AppConfig) -> AppConfig:
"""Plugin hook. Set up a ``channels`` dependency, add route handlers and register application hooks"""
app_config.dependencies["channels"] = Provide(lambda: self, use_cache=True, sync_to_thread=False)
app_config.lifespan.append(self)
+ app_config.signature_namespace.update(ChannelsPlugin=ChannelsPlugin)
if self._create_route_handlers:
if self._arbitrary_channels_allowed:
| diff --git a/tests/channels/test_plugin.py b/tests/channels/test_plugin.py
--- a/tests/channels/test_plugin.py
+++ b/tests/channels/test_plugin.py
@@ -69,6 +69,12 @@ def handler(channels: ChannelsPlugin) -> None:
assert mock.call_args[0][0] is channels_plugin
+def test_plugin_dependency_signature_namespace(memory_backend: MemoryChannelsBackend) -> None:
+ channels_plugin = ChannelsPlugin(backend=memory_backend, arbitrary_channels_allowed=True)
+ app = Litestar(plugins=[channels_plugin])
+ assert app.signature_namespace["ChannelsPlugin"] is ChannelsPlugin
+
+
@pytest.mark.flaky(reruns=5)
async def test_pub_sub_wait_published(channels_backend: ChannelsBackend) -> None:
async with ChannelsPlugin(backend=channels_backend, channels=["something"]) as plugin:
| Bug: Issue with ChannelsPlugin and __future__.annotations
### Description
While working with the new Channels feature, I found a bug that affects Litestar's ability to generate the route argument signature when passing `channels: ChannelsPlugin` to a route handler while `from __future__ import annotations` is active.
### URL to code causing the issue
_No response_
### MCVE
_No response_
### Steps to reproduce
```bash
1. Copy script from https://blog.litestar.dev/creating-a-websockets-chat-in-just-30-lines-with-litestar-2ca0f3767a47
2. Add `from __future__ import annotations` to the top
3. Start the app
```
### Screenshots
_No response_
### Logs
```bash
Traceback (most recent call last): File "/home/ksmith/.pyenv/versions/3.8.16/lib/python3.8/multiprocessing/process.py", line 315, in _bootstrap self.run() File "/home/ksmith/.pyenv/versions/3.8.16/lib/python3.8/multiprocessing/process.py", line 108, in run
self._target(*self._args, **self._kwargs) File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/uvicorn/_subprocess.py", line 76, in subprocess_started target(sockets=sockets) File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/uvicorn/server.py", line 61, in run
return asyncio.run(self.serve(sockets=sockets))
File "/home/ksmith/.pyenv/versions/3.8.16/lib/python3.8/asyncio/runners.py", line 44, in run
return loop.run_until_complete(main)
File "uvloop/loop.pyx", line 1517, in uvloop.loop.Loop.run_until_complete
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/uvicorn/server.py", line 68, in serve
config.load()
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/uvicorn/config.py", line 473, in load
self.loaded_app = import_from_string(self.app)
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/uvicorn/importer.py", line 21, in import_from_string
module = importlib.import_module(module_str)
File "/home/ksmith/.pyenv/versions/3.8.16/lib/python3.8/importlib/__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1014, in _gcd_import
File "<frozen importlib._bootstrap>", line 991, in _find_and_load
File "<frozen importlib._bootstrap>", line 975, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 671, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 843, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/tmp/litestar-bug/app.py", line 28, in <module>
app = Litestar(
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/litestar/app.py", line 415, in __init__
self.register(route_handler)
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/litestar/app.py", line 553, in register
route_handler.on_registration(self)
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/litestar/handlers/websocket_handlers/listener.py", line 274, in on_registration
super().on_registration(app)
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/litestar/handlers/base.py", line 396, in on_registration
self._create_signature_model(app)
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/litestar/handlers/websocket_handlers/listener.py", line 286, in _create_signature_model
parsed_signature=ParsedSignature.from_signature(new_signature, self.resolve_signature_namespace()),
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/litestar/utils/signature.py", line 369, in from_signature
return cls.from_fn(fn, signature_namespace)
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/litestar/utils/signature.py", line 336, in from_fn
fn_type_hints = get_fn_type_hints(fn, namespace=signature_namespace)
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/litestar/utils/signature.py", line 78, in get_fn_type_hints
return get_type_hints(fn_to_inspect, globalns=namespace, include_extras=True)
File "/tmp/litestar-bug/.venv/lib/python3.8/site-packages/typing_extensions.py", line 940, in get_type_hints
hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
File "/home/ksmith/.pyenv/versions/3.8.16/lib/python3.8/typing.py", line 1264, in get_type_hints
value = _eval_type(value, globalns, localns)
File "/home/ksmith/.pyenv/versions/3.8.16/lib/python3.8/typing.py", line 270, in _eval_type
return t._evaluate(globalns, localns)
File "/home/ksmith/.pyenv/versions/3.8.16/lib/python3.8/typing.py", line 518, in _evaluate
eval(self.__forward_code__, globalns, localns),
File "<string>", line 1, in <module>
NameError: name 'ChannelsPlugin' is not defined
```
### Litestar Version
2.0.0a7
### Platform
- [X] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| This is because when you try to resolve the annotation, the object referred to `ChannelsPlugin` isn't in the namespace. We need to explicitly add it to the signature namespace.
As a workaround, you can add it to the signature namespace manually by adding `signature_namespace={"ChannelsPlugin": ChannelsPlugin}` on any layer above the handler (or on the handler itself).
The workaround works great!
I did attempt to add `ChannelsPlugin` to `litestar/utils/signature.py` and add it to [_GLOBAL_NAMES](https://github.com/litestar-org/litestar/blob/1cb42dc491a644e763f095eda723e75212abdca3/litestar/utils/signature.py#L21) the but importing it triggers a circular import error. I have not had time to spend any more time on it.
Thanks!
Kyle | 2023-05-23T05:46:50 |
litestar-org/litestar | 1,723 | litestar-org__litestar-1723 | [
"1681",
"4321"
] | 9aff0990a925623d8b4acb8ee33d86ed82fb8b28 | diff --git a/litestar/middleware/compression.py b/litestar/middleware/compression.py
--- a/litestar/middleware/compression.py
+++ b/litestar/middleware/compression.py
@@ -85,6 +85,7 @@ def write(self, body: bytes) -> None:
self.buffer.write(self.compressor.process(body) + self.compressor.flush()) # type: ignore
else:
self.compressor.write(body)
+ self.compressor.flush()
def close(self) -> None:
"""Close the compression stream.
| diff --git a/litestar/contrib/repository/testing/generic_mock_repository.py b/litestar/contrib/repository/testing/generic_mock_repository.py
--- a/litestar/contrib/repository/testing/generic_mock_repository.py
+++ b/litestar/contrib/repository/testing/generic_mock_repository.py
@@ -728,5 +728,5 @@ def clear_collection(cls) -> None:
cls.collection = {}
-def model_items(model: ModelT) -> list[tuple[str, Any]]:
+def model_items(model: Any) -> list[tuple[str, Any]]:
return [(k, v) for k, v in model.__dict__.items() if not k.startswith("_")]
diff --git a/tests/conftest.py b/tests/conftest.py
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -64,6 +64,7 @@
from litestar import Litestar
from litestar.types import (
AnyIOBackend,
+ ASGIApp,
ASGIVersion,
Receive,
RouteHandlerType,
@@ -88,8 +89,12 @@ def anyio_backend(request: pytest.FixtureRequest) -> str:
return request.param # type: ignore[no-any-return]
-async def mock_asgi_app(scope: Scope, receive: Receive, send: Send) -> None:
- pass
[email protected]()
+def mock_asgi_app() -> ASGIApp:
+ async def asgi_app(scope: Scope, receive: Receive, send: Send) -> None:
+ ...
+
+ return asgi_app
@pytest.fixture()
@@ -163,13 +168,13 @@ def session_backend_config_memory(memory_store: MemoryStore) -> ServerSideSessio
@pytest.fixture
-def session_middleware(session_backend: BaseSessionBackend) -> SessionMiddleware[Any]:
+def session_middleware(session_backend: BaseSessionBackend, mock_asgi_app: ASGIApp) -> SessionMiddleware[Any]:
return SessionMiddleware(app=mock_asgi_app, backend=session_backend)
@pytest.fixture
def cookie_session_middleware(
- cookie_session_backend: ClientSideSessionBackend,
+ cookie_session_backend: ClientSideSessionBackend, mock_asgi_app: ASGIApp
) -> SessionMiddleware[ClientSideSessionBackend]:
return SessionMiddleware(app=mock_asgi_app, backend=cookie_session_backend)
diff --git a/tests/middleware/test_compression_middleware.py b/tests/middleware/test_compression_middleware.py
--- a/tests/middleware/test_compression_middleware.py
+++ b/tests/middleware/test_compression_middleware.py
@@ -1,4 +1,5 @@
-from typing import AsyncIterator, Literal
+from typing import AsyncIterator, Callable, Literal
+from unittest.mock import MagicMock
import pytest
@@ -7,9 +8,11 @@
from litestar.enums import CompressionEncoding
from litestar.exceptions import ImproperlyConfiguredException
from litestar.handlers import HTTPRouteHandler
+from litestar.middleware.compression import CompressionMiddleware
from litestar.response_containers import Stream
from litestar.status_codes import HTTP_200_OK
from litestar.testing import create_test_client
+from litestar.types.asgi_types import ASGIApp, HTTPResponseBodyEvent, HTTPResponseStartEvent, Message, Scope
BrotliMode = Literal["text", "generic", "font"]
@@ -164,3 +167,29 @@ def test_config_brotli_lgwin_validation(brotli_lgwin: int, should_raise: bool) -
CompressionConfig(backend="brotli", brotli_gzip_fallback=False, brotli_lgwin=brotli_lgwin)
else:
CompressionConfig(backend="brotli", brotli_gzip_fallback=False, brotli_lgwin=brotli_lgwin)
+
+
[email protected](
+ "backend, compression_encoding", (("brotli", CompressionEncoding.BROTLI), ("gzip", CompressionEncoding.GZIP))
+)
+async def test_compression_streaming_response_emitted_messages(
+ backend: Literal["gzip", "brotli"],
+ compression_encoding: Literal[CompressionEncoding.BROTLI, CompressionEncoding.GZIP],
+ create_scope: Callable[..., Scope],
+ mock_asgi_app: ASGIApp,
+) -> None:
+ mock = MagicMock()
+
+ async def fake_send(message: Message) -> None:
+ mock(message)
+
+ wrapped_send = CompressionMiddleware(
+ mock_asgi_app, CompressionConfig(backend=backend)
+ ).create_compression_send_wrapper(fake_send, compression_encoding, create_scope())
+
+ await wrapped_send(HTTPResponseStartEvent(type="http.response.start", status=200, headers={}))
+ # first body message always has compression headers (at least for gzip)
+ await wrapped_send(HTTPResponseBodyEvent(type="http.response.body", body=b"abc", more_body=True))
+ # second body message with more_body=True will be empty if zlib buffers output and is not flushed
+ await wrapped_send(HTTPResponseBodyEvent(type="http.response.body", body=b"abc", more_body=True))
+ assert mock.mock_calls[-1].args[0]["body"]
| Bug: Enabling gzip only sends large streaming responses
### Description
Enabling gzip compression causes small streaming responses to be completely ignored and never sent to the client browser.
I have verified that the request headers to specify at the very least:
`Accept-Encoding: gzip`
As can be seen in the logs.
### URL to code causing the issue
https://github.com/LeckerenSirupwaffeln/Litestar_StreamingResponseGzip_Bug
### MCVE
```python
https://github.com/LeckerenSirupwaffeln/Litestar_StreamingResponseGzip_Bug
```
### Steps to reproduce
```bash
1. git clone https://github.com/LeckerenSirupwaffeln/Litestar_StreamingResponseGzip_Bug
2. cd Litestar_StreamingResponseGzip_Bug
3. docker compose up
4. Open your browser
5. Navigate to http://127.0.0.1:8000/event
6. Open developer tools, go to Console
7. Click "Start retrieving data"
8. Observe that only the large arrays of data get received by the client browser. These arrays should have a size of 1000000, while arrays that have a size of 500 do not get printed to the console, and thus do not get received.
Alternatively:
Disable gzip compression in app construction and observe that arrays of size of 500 do get printed to the console as do the arrays of size of 10000000.
```
### Screenshots
_No response_
### Logs
```bash
Request headers:
POST /event HTTP/1.1
Accept: application/json
Accept-Encoding: gzip, deflate, br
Accept-Language: en-US,en;q=0.9,nl-NL;q=0.8,nl;q=0.7
Connection: keep-alive
Content-Length: 2
Content-Type: application/json
DNT: 1
Host: 127.0.0.1:8000
Origin: http://127.0.0.1:8000
Referer: http://127.0.0.1:8000/event
Sec-Fetch-Dest: empty
Sec-Fetch-Mode: cors
Sec-Fetch-Site: same-origin
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36
sec-ch-ua: "Google Chrome";v="113", "Chromium";v="113", "Not-A.Brand";v="24"
sec-ch-ua-mobile: ?0
sec-ch-ua-platform: "Windows"
```
### Litestar Version
2.0.0alpha3
### Platform
- [X] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| Thanks for the report @LeckerenSirupwaffeln
When I run `docker compose up` on your example repo I get:
```
failed to solve: process "/bin/sh -c apt-get update && apt-get upgrade && apt-get install -y --no-install-recommends sudo && apt-get install -y --no-install-recommends bash && apt-get install -y --no-install-recommends git && apt-get install -y --no-install-recommends vim && apt-get purge -y --auto-remove && rm -rf /var/lib/apt/lists/*" did not complete successfully: exit code: 1
```
Any tips on what I might be doing wrong?
Hi, peterschutt
1) It is possible that it prompted you on apt-get update && apt-get upgrade, so I made a small commit to include the -y flag this time. Please try again with the new commit.
2) You could have CRLF line-endings due to git. Verify that the line-endings in the file are all LF.
3) Further I am not sure why this error would occur, I do not have it on my system.
@LeckerenSirupwaffeln do you think it would be possible to create a self contained MCVE that does not require docker etc.?
That would make testing on our end a lot easier.
I cannot reproduce this with:
```python
from litestar import get, Litestar
from litestar.config.compression import CompressionConfig
from litestar.testing import TestClient
@get(path="/", sync_to_thread=False)
def handler() -> str:
return "foo"
app = Litestar(
route_handlers=[handler],
compression_config=CompressionConfig(
backend="gzip",
minimum_size=1,
),
)
with TestClient(app=app) as client:
res = client.get("/")
assert res.headers.get("content-encoding") == "gzip"
```
Are you sure you have set `minimum_size` to something other than the default value?
Hi provinzkraut,
1) I have indeed tried to set a different `minimum_size`, I also do not think the default `minimum_size` should cause this bug.
2) Sorry for the non-working Docker, here is the MCVE, took me a while to make because I could not replicate the bug exactly either, but eventually I did:
```
from litestar import Litestar, MediaType, post
from litestar.config.compression import CompressionConfig
from litestar.response_containers import Stream
from litestar.serialization import encode_json
from asyncio import sleep
from pathlib import Path
GLOBAL_SMALL_DICT = {}
for i in range(500):
GLOBAL_SMALL_DICT[str(i)] = i
GLOBAL_LARGE_DICT = {}
for i in range(1000000):
GLOBAL_LARGE_DICT[str(i)] = i
class SmallDataStreamer:
def __aiter__(self) -> "DataStreamer":
return self
async def __anext__(self) -> bytes:
await sleep(1)
return encode_json(GLOBAL_SMALL_DICT)
class LargeDataStreamer:
def __aiter__(self) -> "DataStreamer":
return self
async def __anext__(self) -> bytes:
await sleep(1)
return encode_json(GLOBAL_LARGE_DICT)
@post("/raw_small_example", media_type=MediaType.JSON) # Always works.
async def raw_small_example(data: dict[str, str]) -> dict[str, str]:
return GLOBAL_SMALL_DICT
@post("/raw_large_example", media_type=MediaType.JSON) # Always works.
async def raw_large_example(data: dict[str, str]) -> dict[str, str]:
return GLOBAL_LARGE_DICT
@post("/stream_small_example", media_type=MediaType.JSON) # Only works when gzip is disabled.
async def stream_small_example(data: dict[str, str]) -> Stream:
small_data_streamer = SmallDataStreamer()
return Stream(iterator=small_data_streamer)
@post("/stream_large_example", media_type=MediaType.JSON) # Always works.
async def stream_large_example(data: dict[str, str]) -> Stream:
large_data_streamer = LargeDataStreamer()
return Stream(iterator=large_data_streamer)
app = Litestar(
route_handlers=[raw_small_example, raw_large_example, stream_small_example, stream_large_example],
compression_config=CompressionConfig(backend="gzip", gzip_compress_level=9),
)
```
How to use this MCVE:
1. Set it up to be running locally.
2. `curl -X POST -H 'Accept-encoding: gzip' --data '{\"data\": \"\"}' 127.0.0.1:8000/raw_large_example --compressed`
3. `curl -X POST -H 'Accept-encoding: gzip' --data '{\"data\": \"\"}' 127.0.0.1:8000/stream_large_example --compressed`
4. After getting the data once: CTRL+C
5. `curl -X POST -H 'Accept-encoding: gzip' --data '{\"data\": \"\"}' 127.0.0.1:8000/raw_small_example --compressed`
6. `curl -X POST -H 'Accept-encoding: gzip' --data '{\"data\": \"\"}' 127.0.0.1:8000/stream_small_example --compressed`
7. Notice you never get the data within a reasonable time after executing step 6.
8. Now comment-out gzip compression and repeat steps 1 to 7, now you do get the data within a reasonable time after step 6.
The bug seems to be an interaction between `Stream` and `gzip compression`, when `gzip compression` is enabled, only a very large `StreamingResponse` gets sent.
Interestingly, when first sending a small `StreamingResponse` followed a second later by a large `StreamingResponse`, you will receive both at once in-order. | 2023-05-24T01:39:58 |
litestar-org/litestar | 1,731 | litestar-org__litestar-1731 | [
"1726",
"4321"
] | d074a3ede1c665fbaa87e382410d2d94b4045d08 | diff --git a/litestar/dto/factory/_backends/utils.py b/litestar/dto/factory/_backends/utils.py
--- a/litestar/dto/factory/_backends/utils.py
+++ b/litestar/dto/factory/_backends/utils.py
@@ -171,13 +171,23 @@ def filter_missing(value: Any) -> bool:
source_value = source_instance[source_name] if source_is_mapping else getattr(source_instance, source_name)
if field_definition.is_partial and dto_for == "data" and filter_missing(source_value):
continue
- unstructured_data[destination_name] = transfer_type_data(source_value, transfer_type, dto_for)
+ unstructured_data[destination_name] = transfer_type_data(
+ source_value, transfer_type, dto_for, nested_as_dict=destination_type is dict
+ )
return destination_type(**unstructured_data)
-def transfer_type_data(source_value: Any, transfer_type: TransferType, dto_for: ForType) -> Any:
+def transfer_type_data(
+ source_value: Any, transfer_type: TransferType, dto_for: ForType, nested_as_dict: bool = False
+) -> Any:
if isinstance(transfer_type, SimpleType) and transfer_type.nested_field_info:
- dest_type = transfer_type.parsed_type.annotation if dto_for == "data" else transfer_type.nested_field_info.model
+ if nested_as_dict:
+ dest_type = dict
+ else:
+ dest_type = (
+ transfer_type.parsed_type.annotation if dto_for == "data" else transfer_type.nested_field_info.model
+ )
+
return transfer_nested_simple_type_data(dest_type, transfer_type.nested_field_info, dto_for, source_value)
if isinstance(transfer_type, UnionType) and transfer_type.has_nested:
return transfer_nested_union_type_data(transfer_type, dto_for, source_value)
| diff --git a/tests/dto/factory/test_integration.py b/tests/dto/factory/test_integration.py
--- a/tests/dto/factory/test_integration.py
+++ b/tests/dto/factory/test_integration.py
@@ -2,7 +2,7 @@
from __future__ import annotations
from dataclasses import dataclass, field
-from typing import Optional
+from typing import TYPE_CHECKING, Optional
import pytest
from typing_extensions import Annotated
@@ -16,6 +16,10 @@
from litestar.params import Body
from litestar.testing import create_test_client
+if TYPE_CHECKING:
+ from types import ModuleType
+ from typing import Callable
+
def test_url_encoded_form_data() -> None:
@dataclass
@@ -139,6 +143,43 @@ def handler(data: DTOData[Foo]) -> Foo:
assert response.json() == {"bar": "hello"}
+def test_dto_data_injection_with_nested_model(create_module: Callable[[str], ModuleType]) -> None:
+ module = create_module(
+ """
+from dataclasses import dataclass
+from typing import Any, Dict
+
+from typing_extensions import Annotated
+
+from litestar import post
+from litestar.dto.factory import DTOConfig, DTOData
+from litestar.dto.factory.stdlib import DataclassDTO
+
+@dataclass
+class Foo:
+ bar: str
+ baz: str
+
+@dataclass
+class Bar:
+ foo: Foo
+
+config = DTOConfig(exclude={"foo.baz"})
+dto = DataclassDTO[Annotated[Bar, config]]
+
+@post(dto=dto, return_dto=None)
+def handler(data: DTOData[Bar]) -> Dict[str, Any]:
+ assert isinstance(data, DTOData)
+ return data.as_builtins()
+"""
+ )
+
+ with create_test_client(route_handlers=[module.handler], debug=True) as client:
+ resp = client.post("/", json={"foo": {"bar": "hello"}})
+ assert resp.status_code == 201
+ assert resp.json() == {"foo": {"bar": "hello"}}
+
+
def test_dto_data_with_url_encoded_form_data() -> None:
@dataclass
class User:
| Bug: Premature instantiation of nested models
### Description
Assume two dataclasses `Person `and `Address`. `Person `contains an `address` (see MCVE).
Using `DataDTO[Person]` in a route handler results in a `TypeError(\"Address.__init__() missing 1 required positional argument: 'id'\")` because it tries to initialize the nested `Address` (which fails because the `address.id` is excluded from the `WriteDTO`).
### URL to code causing the issue
https://discord.com/channels/919193495116337154/1110854577575698463
### MCVE
```python
from dataclasses import dataclass
import uuid
from litestar import Litestar, post
from litestar.dto.factory import DTOConfig, DTOData
from litestar.dto.factory.stdlib import DataclassDTO
@dataclass
class Address:
id: uuid.UUID
street: str
city: str
@dataclass
class Person:
id: uuid.UUID
name: str
email: str
address: Address
class WriteDTO(DataclassDTO[Person]):
config = DTOConfig(exclude={"id", "address.id"})
@post("/person", dto=WriteDTO, return_dto=None, sync_to_thread=False)
def create_person(data: DTOData[Person]) -> str:
return "Success"
app = Litestar([create_person])
```
### Steps to reproduce
```bash
curl -X 'POST' \
'http://127.0.0.1:8000/person' \
-H 'accept: text/plain' \
-H 'Content-Type: application/json' \
-d '{
"name": "test",
"email": "test",
"address": {
"street": "test",
"city": "test"
}
}'
```
### Screenshots
```bash
""
```
### Logs
```bash
INFO: 127.0.0.1:36960 - "POST /person HTTP/1.1" 500 Internal Server Error
```
### Litestar Version
2.0.0a7
### Platform
- [X] Linux
- [X] Mac
- [X] Windows
- [ ] Other (Please specify in the description above)
| Thanks @MoBoo - confirmed. | 2023-05-25T07:38:42 |
litestar-org/litestar | 1,748 | litestar-org__litestar-1748 | [
"4321",
"1234"
] | 649ca234fc921f0de7de0fcef1ae43111247dfb8 | diff --git a/litestar/contrib/pydantic.py b/litestar/contrib/pydantic.py
--- a/litestar/contrib/pydantic.py
+++ b/litestar/contrib/pydantic.py
@@ -32,9 +32,10 @@ class PydanticDTO(AbstractDTOFactory[T], Generic[T]):
@classmethod
def generate_field_definitions(cls, model_type: type[BaseModel]) -> Generator[FieldDefinition, None, None]:
- for key, parsed_type in get_model_type_hints(model_type).items():
+ model_parsed_types = get_model_type_hints(model_type)
+ for key, model_field in model_type.__fields__.items():
+ parsed_type = model_parsed_types[key]
model_field = model_type.__fields__[key]
-
dto_field: DTOField | None = model_field.field_info.extra.get(DTO_FIELD_META_KEY)
def determine_default(_parsed_type: ParsedType, _model_field: ModelField) -> Any:
| diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py
--- a/tests/contrib/test_pydantic.py
+++ b/tests/contrib/test_pydantic.py
@@ -11,6 +11,7 @@
from litestar.typing import ParsedType
if TYPE_CHECKING:
+ from types import ModuleType
from typing import Callable
@@ -42,3 +43,30 @@ class NotModel:
assert PydanticDTO.detect_nested_field(ParsedType(TestModel)) is True
assert PydanticDTO.detect_nested_field(ParsedType(NotModel)) is False
+
+
+def test_generate_field_definitions_from_beanie_models(create_module: Callable[[str], ModuleType]) -> None:
+ module = create_module(
+ """
+from typing import Optional
+
+import pymongo
+from pydantic import BaseModel
+
+from beanie import Document
+
+
+class Category(BaseModel):
+ name: str
+ description: str
+
+
+class Product(Document): # This is the model
+ name: str
+ description: Optional[str] = None
+ price: float
+ category: Category
+"""
+ )
+ field_names = [field.name for field in PydanticDTO.generate_field_definitions(module.Product)]
+ assert field_names == ["id", "revision_id", "name", "description", "price", "category"]
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-28T00:41:19 |
litestar-org/litestar | 1,754 | litestar-org__litestar-1754 | [
"1234",
"4321"
] | a7f33eb69df198f981c57a98ce8237602a38a991 | diff --git a/litestar/contrib/sqlalchemy/dto.py b/litestar/contrib/sqlalchemy/dto.py
--- a/litestar/contrib/sqlalchemy/dto.py
+++ b/litestar/contrib/sqlalchemy/dto.py
@@ -1,22 +1,32 @@
from __future__ import annotations
+from functools import singledispatchmethod
from typing import TYPE_CHECKING, Generic, TypeVar
-from sqlalchemy import inspect, orm, sql
-from sqlalchemy.orm import DeclarativeBase, Mapped
+from sqlalchemy import Column, inspect, orm, sql
+from sqlalchemy.ext.associationproxy import AssociationProxy, AssociationProxyExtensionType
+from sqlalchemy.ext.hybrid import HybridExtensionType, hybrid_property
+from sqlalchemy.orm import (
+ ColumnProperty,
+ DeclarativeBase,
+ InspectionAttr,
+ Mapped,
+ NotExtension,
+ QueryableAttribute,
+ RelationshipProperty,
+)
from litestar.dto.factory.abc import AbstractDTOFactory
-from litestar.dto.factory.field import DTO_FIELD_META_KEY
+from litestar.dto.factory.field import DTO_FIELD_META_KEY, DTOField, Mark
from litestar.dto.factory.types import FieldDefinition
from litestar.dto.factory.utils import get_model_type_hints
from litestar.types.empty import Empty
from litestar.utils.helpers import get_fully_qualified_class_name
+from litestar.utils.signature import ParsedSignature
if TYPE_CHECKING:
from typing import Any, ClassVar, Collection, Generator
- from sqlalchemy import Column
- from sqlalchemy.orm import RelationshipProperty
from typing_extensions import TypeAlias
from litestar.typing import ParsedType
@@ -26,6 +36,8 @@
T = TypeVar("T", bound="DeclarativeBase | Collection[DeclarativeBase]")
ElementType: TypeAlias = "Column[Any] | RelationshipProperty[Any]"
+SQLA_NS = {**vars(orm), **vars(sql)}
+
class SQLAlchemyDTO(AbstractDTOFactory[T], Generic[T]):
"""Support for domain modelling with SQLAlchemy."""
@@ -34,40 +46,122 @@ class SQLAlchemyDTO(AbstractDTOFactory[T], Generic[T]):
model_type: ClassVar[type[DeclarativeBase]]
+ @singledispatchmethod
+ @classmethod
+ def handle_orm_descriptor(
+ cls,
+ extension_type: NotExtension | AssociationProxyExtensionType | HybridExtensionType,
+ orm_descriptor: InspectionAttr,
+ key: str,
+ model_type_hints: dict[str, ParsedType],
+ model_name: str,
+ ) -> FieldDefinition:
+ raise NotImplementedError(f"Unsupported extension type: {extension_type}")
+
+ @handle_orm_descriptor.register(NotExtension)
+ @classmethod
+ def _(
+ cls,
+ extension_type: NotExtension,
+ key: str,
+ orm_descriptor: InspectionAttr,
+ model_type_hints: dict[str, ParsedType],
+ model_name: str,
+ ) -> FieldDefinition:
+ if not isinstance(orm_descriptor, QueryableAttribute):
+ raise NotImplementedError(f"Unexpected descriptor type for '{extension_type}': '{orm_descriptor}'")
+
+ elem: ElementType
+ if isinstance(orm_descriptor.property, ColumnProperty):
+ if not isinstance(orm_descriptor.property.expression, Column):
+ raise NotImplementedError(f"Expected 'Column', got: '{orm_descriptor.property.expression}'")
+ elem = orm_descriptor.property.expression
+ elif isinstance(orm_descriptor.property, RelationshipProperty):
+ elem = orm_descriptor.property
+ else:
+ raise NotImplementedError(f"Unhandled property type: '{orm_descriptor.property}'")
+
+ default, default_factory = _detect_defaults(elem)
+
+ if (parsed_type := model_type_hints[key]).origin is Mapped:
+ (parsed_type,) = parsed_type.inner_types
+ else:
+ raise NotImplementedError(f"Expected 'Mapped' origin, got: '{parsed_type.origin}'")
+
+ return FieldDefinition(
+ name=key,
+ default=default,
+ parsed_type=parsed_type,
+ default_factory=default_factory,
+ dto_field=elem.info.get(DTO_FIELD_META_KEY),
+ unique_model_name=model_name,
+ )
+
+ @handle_orm_descriptor.register(AssociationProxyExtensionType)
+ @classmethod
+ def _(
+ cls,
+ extension_type: AssociationProxyExtensionType,
+ key: str,
+ orm_descriptor: InspectionAttr,
+ model_type_hints: dict[str, ParsedType],
+ model_name: str,
+ ) -> FieldDefinition:
+ if not isinstance(orm_descriptor, AssociationProxy):
+ raise NotImplementedError(f"Unexpected descriptor type '{orm_descriptor}' for '{extension_type}'")
+
+ if (parsed_type := model_type_hints[key]).origin is AssociationProxy:
+ (parsed_type,) = parsed_type.inner_types
+ else:
+ raise NotImplementedError(f"Expected 'AssociationProxy' origin, got: '{parsed_type.origin}'")
+
+ return FieldDefinition(
+ name=key,
+ default=Empty,
+ parsed_type=parsed_type,
+ default_factory=None,
+ dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.READ_ONLY)),
+ unique_model_name=model_name,
+ )
+
+ @handle_orm_descriptor.register(HybridExtensionType)
+ @classmethod
+ def _(
+ cls,
+ extension_type: HybridExtensionType,
+ key: str,
+ orm_descriptor: InspectionAttr,
+ model_type_hints: dict[str, ParsedType],
+ model_name: str,
+ ) -> FieldDefinition:
+ if not isinstance(orm_descriptor, hybrid_property):
+ raise NotImplementedError(f"Unexpected descriptor type '{orm_descriptor}' for '{extension_type}'")
+
+ getter_sig = ParsedSignature.from_fn(orm_descriptor.fget, {})
+
+ return FieldDefinition(
+ name=key,
+ default=Empty,
+ parsed_type=getter_sig.return_type,
+ default_factory=None,
+ dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.READ_ONLY)),
+ unique_model_name=model_name,
+ )
+
@classmethod
def generate_field_definitions(cls, model_type: type[DeclarativeBase]) -> Generator[FieldDefinition, None, None]:
if (mapper := inspect(model_type)) is None: # pragma: no cover
raise RuntimeError("Unexpected `None` value for mapper.")
- columns = mapper.columns
- relationships = mapper.relationships
-
# includes SQLAlchemy names and other mapped class names in the forward reference resolution namespace
- namespace = dict(vars(orm))
- namespace.update(vars(sql))
- namespace.update({m.class_.__name__: m.class_ for m in mapper.registry.mappers if m is not mapper})
-
- for key, parsed_type in get_model_type_hints(model_type, namespace=namespace).items():
- elem: ElementType | None
- elem = columns.get(key, relationships.get(key)) # pyright:ignore
- if elem is None:
- continue
-
- if parsed_type.origin is Mapped:
- (parsed_type,) = parsed_type.inner_types
-
- default, default_factory = _detect_defaults(elem)
-
- field_def = FieldDefinition(
- name=key,
- default=default,
- parsed_type=parsed_type,
- default_factory=default_factory,
- dto_field=elem.info.get(DTO_FIELD_META_KEY),
- unique_model_name=get_fully_qualified_class_name(model_type),
- )
+ namespace = {**SQLA_NS, **{m.class_.__name__: m.class_ for m in mapper.registry.mappers if m is not mapper}}
+ model_type_hints = get_model_type_hints(model_type, namespace=namespace)
+ model_name = get_fully_qualified_class_name(model_type)
- yield field_def
+ for key, orm_descriptor in mapper.all_orm_descriptors.items():
+ yield cls.handle_orm_descriptor(
+ orm_descriptor.extension_type, key, orm_descriptor, model_type_hints, model_name
+ )
@classmethod
def detect_nested_field(cls, parsed_type: ParsedType) -> bool:
diff --git a/litestar/dto/factory/_backends/utils.py b/litestar/dto/factory/_backends/utils.py
--- a/litestar/dto/factory/_backends/utils.py
+++ b/litestar/dto/factory/_backends/utils.py
@@ -232,10 +232,12 @@ def transfer_type_data(
return transfer_nested_simple_type_data(dest_type, transfer_type.nested_field_info, dto_for, source_value)
if isinstance(transfer_type, UnionType) and transfer_type.has_nested:
return transfer_nested_union_type_data(transfer_type, dto_for, source_value)
- if isinstance(transfer_type, CollectionType) and transfer_type.has_nested:
- return transfer_nested_collection_type_data(
- transfer_type.parsed_type.origin, transfer_type, dto_for, source_value
- )
+ if isinstance(transfer_type, CollectionType):
+ if transfer_type.has_nested:
+ return transfer_nested_collection_type_data(
+ transfer_type.parsed_type.origin, transfer_type, dto_for, source_value
+ )
+ return transfer_type.parsed_type.origin(source_value)
return source_value
| diff --git a/tests/contrib/sqlalchemy/test_dto_integration.py b/tests/contrib/sqlalchemy/test_dto_integration.py
--- a/tests/contrib/sqlalchemy/test_dto_integration.py
+++ b/tests/contrib/sqlalchemy/test_dto_integration.py
@@ -1,4 +1,5 @@
from dataclasses import dataclass
+from types import ModuleType
from typing import Any, Callable, Dict, List, Tuple
import pytest
@@ -134,3 +135,101 @@ def get_handler() -> Book:
response_callback = client.post("/", json=json_data)
assert response_callback.json() == json_data
+
+
+def test_dto_with_association_proxy(create_module: Callable[[str], ModuleType]) -> None:
+ module = create_module(
+ """
+from __future__ import annotations
+
+from typing import Final, List
+
+from sqlalchemy import Column
+from sqlalchemy import ForeignKey
+from sqlalchemy import Integer
+from sqlalchemy import String
+from sqlalchemy import Table
+from sqlalchemy.orm import DeclarativeBase
+from sqlalchemy.orm import Mapped
+from sqlalchemy.orm import mapped_column
+from sqlalchemy.orm import relationship
+from sqlalchemy.ext.associationproxy import association_proxy
+from sqlalchemy.ext.associationproxy import AssociationProxy
+
+from litestar import get
+from litestar.contrib.sqlalchemy.dto import SQLAlchemyDTO
+from litestar.dto.factory import dto_field
+
+class Base(DeclarativeBase):
+ pass
+
+class User(Base):
+ __tablename__ = "user"
+ id: Mapped[int] = mapped_column(primary_key=True)
+ kw: Mapped[List[Keyword]] = relationship(secondary=lambda: user_keyword_table, info=dto_field("private"))
+ # proxy the 'keyword' attribute from the 'kw' relationship
+ keywords: AssociationProxy[List[str]] = association_proxy("kw", "keyword")
+
+class Keyword(Base):
+ __tablename__ = "keyword"
+ id: Mapped[int] = mapped_column(primary_key=True)
+ keyword: Mapped[str] = mapped_column(String(64))
+
+user_keyword_table: Final[Table] = Table(
+ "user_keyword",
+ Base.metadata,
+ Column("user_id", Integer, ForeignKey("user.id"), primary_key=True),
+ Column("keyword_id", Integer, ForeignKey("keyword.id"), primary_key=True),
+)
+
+dto = SQLAlchemyDTO[User]
+
+@get("/", return_dto=dto)
+def get_handler() -> User:
+ return User(id=1, kw=[Keyword(keyword="bar"), Keyword(keyword="baz")])
+"""
+ )
+
+ with create_test_client(route_handlers=[module.get_handler], debug=True) as client:
+ response = client.get("/")
+ assert response.json() == {"id": 1, "keywords": ["bar", "baz"]}
+
+
+def test_dto_with_hybrid_property(create_module: Callable[[str], ModuleType]) -> None:
+ module = create_module(
+ """
+from __future__ import annotations
+
+from sqlalchemy.ext.hybrid import hybrid_property
+from sqlalchemy.orm import DeclarativeBase
+from sqlalchemy.orm import Mapped
+from sqlalchemy.orm import mapped_column
+
+from litestar import get
+from litestar.contrib.sqlalchemy.dto import SQLAlchemyDTO
+
+class Base(DeclarativeBase):
+ pass
+
+class Interval(Base):
+ __tablename__ = 'interval'
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ start: Mapped[int]
+ end: Mapped[int]
+
+ @hybrid_property
+ def length(self) -> int:
+ return self.end - self.start
+
+dto = SQLAlchemyDTO[Interval]
+
+@get("/", return_dto=dto)
+def get_handler() -> Interval:
+ return Interval(id=1, start=1, end=3)
+"""
+ )
+
+ with create_test_client(route_handlers=[module.get_handler], debug=True) as client:
+ response = client.get("/")
+ assert response.json() == {"id": 1, "start": 1, "end": 3, "length": 2}
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-05-29T01:23:46 |
litestar-org/litestar | 1,764 | litestar-org__litestar-1764 | [
"1763",
"4321"
] | ccc9b33831620ea20a4dffa0d0a76501f2a785bb | diff --git a/litestar/dto/factory/_backends/utils.py b/litestar/dto/factory/_backends/utils.py
--- a/litestar/dto/factory/_backends/utils.py
+++ b/litestar/dto/factory/_backends/utils.py
@@ -141,9 +141,9 @@ def transfer_data(
Data parsed into ``destination_type``.
"""
if not parsed_type.is_subclass_of(str) and not parsed_type.is_mapping and parsed_type.is_collection:
- origin = parsed_type.origin
- if not issubclass(origin, InstantiableCollection):
- raise RuntimeError(f"Unexpected origin type '{parsed_type.origin}', expected collection type")
+ origin = parsed_type.instantiable_origin
+ if not issubclass(origin, InstantiableCollection): # pragma: no cover
+ raise RuntimeError(f"Unexpected origin type '{parsed_type.instantiable_origin}', expected collection type")
return origin( # type:ignore[no-any-return]
transfer_data(destination_type, item, field_definitions, dto_for, parsed_type.inner_types[0])
diff --git a/litestar/typing.py b/litestar/typing.py
--- a/litestar/typing.py
+++ b/litestar/typing.py
@@ -6,7 +6,7 @@
from typing_extensions import Annotated, NotRequired, Required, get_args, get_origin
from litestar.types.builtin_types import UNION_TYPES, NoneType
-from litestar.utils.typing import get_safe_generic_origin, unwrap_annotation
+from litestar.utils.typing import get_safe_generic_origin, instantiable_type_mapping, unwrap_annotation
__all__ = ("ParsedType",)
@@ -21,6 +21,7 @@ class ParsedType:
"origin",
"args",
"metadata",
+ "instantiable_origin",
"is_annotated",
"is_required",
"is_not_required",
@@ -38,6 +39,8 @@ class ParsedType:
"""The result of calling ``get_args(annotation)`` after unwrapping Annotated, e.g. (int,)."""
metadata: tuple[Any, ...]
"""Any metadata associated with the annotation via ``Annotated``."""
+ instantiable_origin: Any
+ """An equivalent type to ``origin`` that can be safely instantiated. E.g., ``Sequence`` -> ``list``."""
is_annotated: bool
"""Whether the annotation included ``Annotated`` or not."""
is_required: bool
@@ -71,6 +74,7 @@ def __init__(self, annotation: Any) -> None:
object.__setattr__(self, "origin", origin)
object.__setattr__(self, "args", args)
object.__setattr__(self, "metadata", metadata)
+ object.__setattr__(self, "instantiable_origin", instantiable_type_mapping.get(origin, origin))
object.__setattr__(self, "is_annotated", Annotated in wrappers)
object.__setattr__(self, "is_required", Required in wrappers)
object.__setattr__(self, "is_not_required", NotRequired in wrappers)
diff --git a/litestar/utils/typing.py b/litestar/utils/typing.py
--- a/litestar/utils/typing.py
+++ b/litestar/utils/typing.py
@@ -33,6 +33,7 @@
"annotation_is_iterable_of_type",
"get_origin_or_inner_type",
"get_safe_generic_origin",
+ "instantiable_type_mapping",
"make_non_optional_union",
"unwrap_annotation",
)
@@ -48,7 +49,7 @@
)
)
-types_mapping = {
+instantiable_type_mapping = {
AbstractSet: set,
DefaultDict: defaultdict,
Deque: deque,
@@ -62,6 +63,19 @@
Sequence: list,
Set: set,
Tuple: tuple,
+ abc.Mapping: dict,
+ abc.MutableMapping: dict,
+ abc.MutableSequence: list,
+ abc.MutableSet: set,
+ abc.Sequence: list,
+ abc.Set: set,
+ defaultdict: defaultdict,
+ deque: deque,
+ dict: dict,
+ frozenset: frozenset,
+ list: list,
+ set: set,
+ tuple: tuple,
}
_safe_generic_origin_map = {
@@ -107,7 +121,7 @@
def normalize_type_annotation(annotation: Any) -> Any:
"""Normalize a type annotation to a standard form."""
- return types_mapping.get(annotation, annotation)
+ return instantiable_type_mapping.get(annotation, annotation)
def annotation_is_iterable_of_type(
@@ -208,7 +222,7 @@ def get_origin_or_inner_type(annotation: Any) -> Any:
# we need to recursively call here 'get_origin_or_inner_type' because we might be dealing with a generic type alias
# e.g. Annotated[dict[str, list[int]]
origin = get_origin_or_inner_type(inner)
- return types_mapping.get(origin, origin)
+ return instantiable_type_mapping.get(origin, origin)
def get_safe_generic_origin(origin_type: Any) -> Any:
| diff --git a/tests/dto/factory/test_integration.py b/tests/dto/factory/test_integration.py
--- a/tests/dto/factory/test_integration.py
+++ b/tests/dto/factory/test_integration.py
@@ -2,7 +2,7 @@
from __future__ import annotations
from dataclasses import dataclass, field
-from typing import TYPE_CHECKING, Dict, Optional
+from typing import TYPE_CHECKING, Dict, Optional, Sequence
import pytest
from typing_extensions import Annotated
@@ -302,3 +302,18 @@ def handler(data: DTOData[User] = Body(media_type=RequestEncodingType.URL_ENCODE
headers={"Content-Type": "application/x-www-form-urlencoded"},
)
assert response.json() == {"name": "John"}
+
+
+def test_dto_with_generic_sequence_annotations() -> None:
+ @dataclass
+ class User:
+ name: str
+ age: int
+
+ @post(dto=DataclassDTO[User], signature_namespace={"User": User})
+ def handler(data: Sequence[User]) -> Sequence[User]:
+ return data
+
+ with create_test_client(route_handlers=[handler], debug=True) as client:
+ response = client.post("/", json=[{"name": "John", "age": 42}])
+ assert response.json() == [{"name": "John", "age": 42}]
| DTO: fix data transfer when annotations are generic sequences
In this repo, a handler that returns a `list` is annotated to return a `Sequence`, which causes an error when we try to instantiate the return annotation type.
```py
Traceback (most recent call last):
File "/workspace/app/.venv/lib/python3.11/site-packages/litestar/middleware/exceptions/middleware.py", line 150, in __call__
await self.app(scope, receive, send)
File "/workspace/app/.venv/lib/python3.11/site-packages/litestar/routes/http.py", line 77, in handle
response = await self._get_response_for_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/app/.venv/lib/python3.11/site-packages/litestar/routes/http.py", line 129, in _get_response_for_request
response = await self._call_handler_function(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/app/.venv/lib/python3.11/site-packages/litestar/routes/http.py", line 162, in _call_handler_function
response: ASGIApp = await route_handler.to_response(app=scope["app"], data=response_data, request=request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/app/.venv/lib/python3.11/site-packages/litestar/handlers/http_handlers/base.py", line 469, in to_response
return await response_handler(app=app, data=data, request=request, return_dto=self.resolve_return_dto()) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/app/.venv/lib/python3.11/site-packages/litestar/handlers/http_handlers/_utils.py", line 99, in handler
data = return_dto(ctx).data_to_encodable_type(data)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/app/.venv/lib/python3.11/site-packages/litestar/dto/factory/abc.py", line 103, in data_to_encodable_type
return backend.encode_data(data, self.connection_context)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/workspace/app/.venv/lib/python3.11/site-packages/litestar/dto/factory/_backends/abc.py", line 300, in encode_data
return transfer_data(
^^^^^^^^^^^^^^
File "/workspace/app/.venv/lib/python3.11/site-packages/litestar/dto/factory/_backends/utils.py", line 148, in transfer_data
return origin( # type:ignore[no-any-return]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: Sequence() takes no arguments
```
https://github.com/v3ss0n/litestar-fullstack-error-reproduce
_Originally posted by @v3ss0n in https://github.com/litestar-org/litestar/issues/1761#issuecomment-1568814594_
| 2023-05-30T20:57:01 |
|
litestar-org/litestar | 1,771 | litestar-org__litestar-1771 | [
"4321",
"1234"
] | f569ca1b710bafdafd973594ca1309d082b22607 | diff --git a/litestar/contrib/repository/filters.py b/litestar/contrib/repository/filters.py
--- a/litestar/contrib/repository/filters.py
+++ b/litestar/contrib/repository/filters.py
@@ -8,7 +8,13 @@
T = TypeVar("T")
-__all__ = ["BeforeAfter", "CollectionFilter", "LimitOffset", "OrderBy", "SearchFilter"]
+__all__ = (
+ "BeforeAfter",
+ "CollectionFilter",
+ "LimitOffset",
+ "OrderBy",
+ "SearchFilter",
+)
@dataclass
diff --git a/litestar/dto/exceptions.py b/litestar/dto/exceptions.py
--- a/litestar/dto/exceptions.py
+++ b/litestar/dto/exceptions.py
@@ -2,7 +2,7 @@
from litestar.exceptions import ImproperlyConfiguredException
-__all__ = ["DTOException", "UnsupportedType"]
+__all__ = ("DTOException", "UnsupportedType")
class DTOException(ImproperlyConfiguredException):
diff --git a/litestar/dto/factory/abc.py b/litestar/dto/factory/abc.py
--- a/litestar/dto/factory/abc.py
+++ b/litestar/dto/factory/abc.py
@@ -27,7 +27,7 @@
from ._backends import AbstractDTOBackend
from .types import FieldDefinition
-__all__ = ["AbstractDTOFactory"]
+__all__ = ("AbstractDTOFactory",)
T = TypeVar("T")
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-06-01T23:46:40 |
|
litestar-org/litestar | 1,773 | litestar-org__litestar-1773 | [
"4321",
"1234"
] | f68b2989f2f9f018f0c3fa22aa357a63a6dadf5f | diff --git a/litestar/dto/exceptions.py b/litestar/dto/exceptions.py
deleted file mode 100644
--- a/litestar/dto/exceptions.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from __future__ import annotations
-
-from litestar.exceptions import ImproperlyConfiguredException
-
-__all__ = ("DTOException", "UnsupportedType")
-
-
-class DTOException(ImproperlyConfiguredException):
- """Base exception for DTO errors."""
-
-
-class UnsupportedType(DTOException):
- """Raised when a type is not supported by Litestar."""
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-06-02T00:03:32 |
|
litestar-org/litestar | 1,780 | litestar-org__litestar-1780 | [
"4321",
"1234"
] | 83269ae38604b9f62a504f4a0f976b85ee7ca13c | diff --git a/litestar/contrib/sqlalchemy/types.py b/litestar/contrib/sqlalchemy/types.py
--- a/litestar/contrib/sqlalchemy/types.py
+++ b/litestar/contrib/sqlalchemy/types.py
@@ -4,11 +4,12 @@
from base64 import b64decode
from typing import TYPE_CHECKING, Any, cast
+from sqlalchemy import text, util
from sqlalchemy.dialects.oracle import BLOB as ORA_BLOB
from sqlalchemy.dialects.oracle import RAW as ORA_RAW
from sqlalchemy.dialects.postgresql import JSONB as PG_JSONB
from sqlalchemy.dialects.postgresql import UUID as PG_UUID
-from sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, TypeDecorator
+from sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, SchemaType, TypeDecorator
from sqlalchemy.types import JSON as _JSON
if TYPE_CHECKING:
@@ -80,7 +81,7 @@ def to_uuid(value: Any) -> uuid.UUID | None:
return cast("uuid.UUID | None", value)
-class JSON(TypeDecorator):
+class JSON(TypeDecorator, SchemaType): # type: ignore
"""Platform-independent JSON type.
Uses JSONB type for postgres, BLOB for Oracle, otherwise uses the generic JSON data type.
@@ -98,6 +99,8 @@ def python_type(self) -> type[dict]:
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""Initialize JSON type"""
+ self.name = kwargs.pop("name", None)
+ self.oracle_strict = kwargs.pop("oracle_strict", True)
def load_dialect_impl(self, dialect: Dialect) -> Any:
if dialect.name == "postgresql":
@@ -105,3 +108,31 @@ def load_dialect_impl(self, dialect: Dialect) -> Any:
if dialect.name == "oracle":
return dialect.type_descriptor(ORA_BLOB())
return dialect.type_descriptor(_JSON())
+
+ def _should_create_constraint(self, compiler: Any, **kw: Any) -> bool:
+ return bool(compiler.dialect.name == "oracle")
+
+ def _variant_mapping_for_set_table(self, column: Any) -> dict | None:
+ if column.type._variant_mapping:
+ variant_mapping = dict(column.type._variant_mapping)
+ variant_mapping["_default"] = column.type
+ else:
+ variant_mapping = None
+ return variant_mapping
+
+ @util.preload_module("sqlalchemy.sql.schema")
+ def _set_table(self, column: Any, table: Any) -> None:
+ schema = util.preloaded.sql_schema
+ variant_mapping = self._variant_mapping_for_set_table(column)
+ constraint_options = "(strict)" if self.oracle_strict else ""
+ sqltext = text(f"{column.name} is json {constraint_options}")
+ e = schema.CheckConstraint(
+ sqltext,
+ name=f"{column.name}_is_json",
+ _create_rule=util.portable_instancemethod( # type: ignore[no-untyped-call]
+ self._should_create_constraint,
+ {"variant_mapping": variant_mapping},
+ ),
+ _type_bound=True,
+ )
+ table.append_constraint(e)
| diff --git a/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_json.py b/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_json.py
--- a/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_json.py
+++ b/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_json.py
@@ -7,6 +7,7 @@
import pytest
from sqlalchemy import Engine, NullPool, create_engine
from sqlalchemy.dialects import oracle
+from sqlalchemy.schema import CreateTable
from tests.contrib.sqlalchemy.models_uuid import (
UUIDEventLog,
@@ -47,7 +48,9 @@ def fx_engine(docker_ip: str) -> Engine:
)
[email protected]
def test_json_constraint_generation(engine: Engine) -> None:
- _ddl = UUIDEventLog.__table__.compile(engine, dialect=oracle.dialect()) # type: ignore
- assert "BLOB" in str(_ddl)
+ ddl = str(CreateTable(UUIDEventLog.__table__).compile(engine, dialect=oracle.dialect())) # type: ignore
+ assert "BLOB" in ddl.upper()
+ assert "JSON" in ddl.upper()
+ with engine.begin() as conn:
+ UUIDEventLog.metadata.create_all(conn)
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-06-04T00:11:37 |
litestar-org/litestar | 1,783 | litestar-org__litestar-1783 | [
"4321",
"1234"
] | b97d1f872ab94262d4d9ef293d7e516ddbc24a27 | diff --git a/litestar/contrib/sqlalchemy/repository.py b/litestar/contrib/sqlalchemy/repository.py
--- a/litestar/contrib/sqlalchemy/repository.py
+++ b/litestar/contrib/sqlalchemy/repository.py
@@ -278,7 +278,9 @@ async def get_or_create(
field = getattr(existing, field_name, None)
if field and field != new_field_value:
setattr(existing, field_name, new_field_value)
- return (await self.update(existing)), False
+ existing = await self._attach_to_session(existing, strategy="merge")
+ await self.session.flush()
+ self.session.expunge(existing)
return existing, False
async def count(self, *filters: FilterTypes, **kwargs: Any) -> int:
@@ -315,7 +317,11 @@ async def update(self, data: ModelT) -> ModelT:
NotFoundError: If no instance found with same identifier as `data`.
"""
with wrap_sqlalchemy_exception():
- instance = await self._attach_to_session(data)
+ item_id = self.get_id_attribute_value(data)
+ # this will raise for not found, and will put the item in the session
+ await self.get(item_id)
+ # this will merge the inbound data to the instance we just put in the session
+ instance = await self._attach_to_session(data, strategy="merge")
await self.session.flush()
self.session.expunge(instance)
return instance
@@ -806,7 +812,9 @@ def get_or_create(
field = getattr(existing, field_name, None)
if field and field != new_field_value:
setattr(existing, field_name, new_field_value)
- return (self.update(existing)), False
+ existing = self._attach_to_session(existing, strategy="merge")
+ self.session.flush()
+ self.session.expunge(existing)
return existing, False
def count(self, *filters: FilterTypes, **kwargs: Any) -> int:
@@ -843,7 +851,11 @@ def update(self, data: ModelT) -> ModelT:
NotFoundError: If no instance found with same identifier as `data`.
"""
with wrap_sqlalchemy_exception():
- instance = self._attach_to_session(data)
+ item_id = self.get_id_attribute_value(data)
+ # this will raise for not found, and will put the item in the session
+ self.get(item_id)
+ # this will merge the inbound data to the instance we just put in the session
+ instance = self._attach_to_session(data, strategy="merge")
self.session.flush()
self.session.expunge(instance)
return instance
| diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml
--- a/.github/workflows/test.yaml
+++ b/.github/workflows/test.yaml
@@ -18,7 +18,7 @@ on:
jobs:
test:
runs-on: ${{ inputs.os }}
- timeout-minutes: 15
+ timeout-minutes: 20
defaults:
run:
shell: bash
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py
@@ -254,12 +254,33 @@ async def test_sqlalchemy_repo_get_or_create_member_existing(
result_mock = MagicMock()
result_mock.scalar_one_or_none = MagicMock(return_value=mock_instance)
execute_mock = AsyncMock(return_value=result_mock)
+ attach_to_session_mock = AsyncMock(return_value=mock_instance)
monkeypatch.setattr(mock_repo, "_execute", execute_mock)
+ monkeypatch.setattr(mock_repo, "_attach_to_session", attach_to_session_mock)
instance, created = await mock_repo.get_or_create(id="instance-id")
assert instance is mock_instance
assert created is False
mock_repo.session.expunge.assert_called_with(mock_instance)
- mock_repo.session.add.assert_called_once()
+ mock_repo.session.merge.assert_not_called()
+
+
+async def test_sqlalchemy_repo_get_or_create_member_existing_upsert(
+ mock_repo: SQLAlchemyAsyncRepository, monkeypatch: MonkeyPatch
+) -> None:
+ """Test expected method calls for member get or create operation (existing)."""
+ mock_instance = MagicMock()
+ result_mock = MagicMock()
+ result_mock.scalar_one_or_none = MagicMock(return_value=mock_instance)
+ execute_mock = AsyncMock(return_value=result_mock)
+ attach_to_session_mock = AsyncMock(return_value=mock_instance)
+ monkeypatch.setattr(mock_repo, "_execute", execute_mock)
+ monkeypatch.setattr(mock_repo, "_attach_to_session", attach_to_session_mock)
+ instance, created = await mock_repo.get_or_create(id="instance-id", upsert=True, an_extra_attribute="yep")
+ assert instance is mock_instance
+ assert created is False
+ mock_repo.session.expunge.assert_called_with(mock_instance)
+ mock_repo._attach_to_session.assert_called_once()
+ mock_repo.session.flush.assert_called_once()
async def test_sqlalchemy_repo_get_or_create_member_existing_no_upsert(
@@ -441,7 +462,7 @@ async def test_sqlalchemy_repo_update(mock_repo: SQLAlchemyAsyncRepository, monk
mock_repo.session.merge.return_value = mock_instance
instance = await mock_repo.update(mock_instance)
assert instance is mock_instance
- mock_repo.session.add.assert_called_once_with(mock_instance)
+ mock_repo.session.merge.assert_called_once_with(mock_instance)
mock_repo.session.flush.assert_called_once()
mock_repo.session.expunge.assert_called_once_with(mock_instance)
mock_repo.session.commit.assert_not_called()
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py
@@ -252,11 +252,33 @@ def test_sqlalchemy_repo_get_or_create_member_existing(
result_mock.scalar_one_or_none = MagicMock(return_value=mock_instance)
execute_mock = MagicMock(return_value=result_mock)
monkeypatch.setattr(mock_repo, "_execute", execute_mock)
+ attach_to_session_mock = MagicMock(return_value=mock_instance)
+ monkeypatch.setattr(mock_repo, "_execute", execute_mock)
+ monkeypatch.setattr(mock_repo, "_attach_to_session", attach_to_session_mock)
instance, created = mock_repo.get_or_create(id="instance-id")
assert instance is mock_instance
assert created is False
mock_repo.session.expunge.assert_called_with(mock_instance)
- mock_repo.session.add.assert_called_once()
+ mock_repo.session.merge.assert_not_called()
+
+
+def test_sqlalchemy_repo_get_or_create_member_existing_upsert(
+ mock_repo: SQLAlchemySyncRepository, monkeypatch: MonkeyPatch
+) -> None:
+ """Test expected method calls for member get or create operation (existing)."""
+ mock_instance = MagicMock()
+ result_mock = MagicMock()
+ result_mock.scalar_one_or_none = MagicMock(return_value=mock_instance)
+ execute_mock = MagicMock(return_value=result_mock)
+ attach_to_session_mock = MagicMock(return_value=mock_instance)
+ monkeypatch.setattr(mock_repo, "_execute", execute_mock)
+ monkeypatch.setattr(mock_repo, "_attach_to_session", attach_to_session_mock)
+ instance, created = mock_repo.get_or_create(id="instance-id", upsert=True, an_extra_attribute="yep")
+ assert instance is mock_instance
+ assert created is False
+ mock_repo.session.expunge.assert_called_with(mock_instance)
+ mock_repo._attach_to_session.assert_called_once()
+ mock_repo.session.flush.assert_called_once()
def test_sqlalchemy_repo_get_or_create_member_existing_no_upsert(
@@ -431,10 +453,10 @@ def test_sqlalchemy_repo_update(mock_repo: SQLAlchemySyncRepository, monkeypatch
monkeypatch.setattr(mock_repo, "get_id_attribute_value", get_id_value_mock)
get_mock = MagicMock()
monkeypatch.setattr(mock_repo, "get", get_mock)
- mock_repo.session.add.return_value = mock_instance
+ mock_repo.session.merge.return_value = mock_instance
instance = mock_repo.update(mock_instance)
assert instance is mock_instance
- mock_repo.session.add.assert_called_once_with(mock_instance)
+ mock_repo.session.merge.assert_called_once_with(mock_instance)
mock_repo.session.flush.assert_called_once()
mock_repo.session.expunge.assert_called_once_with(mock_instance)
mock_repo.session.commit.assert_not_called()
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-06-04T16:58:57 |
litestar-org/litestar | 1,791 | litestar-org__litestar-1791 | [
"1440",
"4321"
] | f81cbba577d29e1079dac524022b112b6da95687 | diff --git a/litestar/types/composite_types.py b/litestar/types/composite_types.py
--- a/litestar/types/composite_types.py
+++ b/litestar/types/composite_types.py
@@ -12,6 +12,7 @@
Iterator,
Literal,
Mapping,
+ MutableMapping,
Sequence,
Set,
Tuple,
@@ -45,7 +46,7 @@
Dependencies = Mapping[str, Union[Provide, AnyCallable]]
-ExceptionHandlersMap = Mapping[Union[int, Type[Exception]], ExceptionHandler]
+ExceptionHandlersMap = MutableMapping[Union[int, Type[Exception]], ExceptionHandler]
MaybePartial = Union[T, partial]
Middleware = Union[
Callable[..., ASGIApp], DefineMiddleware, Iterator[Tuple[ASGIApp, Dict[str, Any]]], Type[MiddlewareProtocol]
| Enhancement: Only run Sonar & Snyk on forks
### Summary
When forking the repo, tests are brought with it but Snyk & Sonar CI fails because the secrets are not set.
### Basic Example
Add ` && github.repository_owner == 'starlite-api'` to the if-check in `ci.yaml` to prevent these two CI items from running on non-upstream repos.
### Drawbacks and Impact
_No response_
### Unresolved questions
_No response_
| 2023-06-07T11:11:30 |
||
litestar-org/litestar | 1,794 | litestar-org__litestar-1794 | [
"4321",
"1234"
] | 3cc3120e9fe7f8f8411561207456593911990738 | diff --git a/litestar/contrib/sqlalchemy/base.py b/litestar/contrib/sqlalchemy/base.py
--- a/litestar/contrib/sqlalchemy/base.py
+++ b/litestar/contrib/sqlalchemy/base.py
@@ -19,7 +19,7 @@
registry,
)
-from .types import GUID, JSON, BigIntIdentity
+from .types import GUID, BigIntIdentity, JsonB
if TYPE_CHECKING:
from sqlalchemy.sql import FromClause
@@ -151,7 +151,7 @@ def create_registry() -> registry:
EmailStr: String,
AnyUrl: String,
AnyHttpUrl: String,
- dict: JSON,
+ dict: JsonB,
datetime: DateTime,
date: Date,
},
diff --git a/litestar/contrib/sqlalchemy/types.py b/litestar/contrib/sqlalchemy/types.py
--- a/litestar/contrib/sqlalchemy/types.py
+++ b/litestar/contrib/sqlalchemy/types.py
@@ -9,14 +9,14 @@
from sqlalchemy.dialects.oracle import RAW as ORA_RAW
from sqlalchemy.dialects.postgresql import JSONB as PG_JSONB
from sqlalchemy.dialects.postgresql import UUID as PG_UUID
-from sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, SchemaType, TypeDecorator
+from sqlalchemy.types import BINARY, CHAR, BigInteger, Integer, SchemaType, TypeDecorator, TypeEngine
from sqlalchemy.types import JSON as _JSON
+from litestar.serialization import decode_json, encode_json
+
if TYPE_CHECKING:
from sqlalchemy.engine import Dialect
-BigIntIdentity = BigInteger().with_variant(Integer, "sqlite")
-
class GUID(TypeDecorator):
"""Platform-independent GUID type.
@@ -81,16 +81,14 @@ def to_uuid(value: Any) -> uuid.UUID | None:
return cast("uuid.UUID | None", value)
-class JSON(TypeDecorator, SchemaType): # type: ignore
- """Platform-independent JSON type.
-
- Uses JSONB type for postgres, BLOB for Oracle, otherwise uses the generic JSON data type.
+class ORA_JSONB(TypeDecorator, SchemaType): # type: ignore # noqa: N801
+ """Oracle Binary JSON type.
- JSON = _JSON().with_variant(PG_JSONB, "postgresql").with_variant(ORA_BLOB, "oracle")
+ JsonB = _JSON().with_variant(PG_JSONB, "postgresql").with_variant(ORA_JSONB, "oracle")
"""
- impl = _JSON
+ impl = ORA_BLOB
cache_ok = True
@property
@@ -102,12 +100,21 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
self.name = kwargs.pop("name", None)
self.oracle_strict = kwargs.pop("oracle_strict", True)
- def load_dialect_impl(self, dialect: Dialect) -> Any:
- if dialect.name == "postgresql":
- return dialect.type_descriptor(PG_JSONB()) # type: ignore
- if dialect.name == "oracle":
- return dialect.type_descriptor(ORA_BLOB())
- return dialect.type_descriptor(_JSON())
+ def coerce_compared_value(self, op: Any, value: Any) -> Any:
+ return self.impl.coerce_compared_value(op=op, value=value) # type: ignore
+
+ def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]:
+ return dialect.type_descriptor(ORA_BLOB())
+
+ def process_bind_param(self, value: Any, dialect: Dialect) -> Any | None:
+ if value is None:
+ return value
+ return encode_json(value)
+
+ def process_result_value(self, value: bytes | None, dialect: Dialect) -> Any | None:
+ if value is None:
+ return value
+ return decode_json(value)
def _should_create_constraint(self, compiler: Any, **kw: Any) -> bool:
return bool(compiler.dialect.name == "oracle")
@@ -136,3 +143,7 @@ def _set_table(self, column: Any, table: Any) -> None:
_type_bound=True,
)
table.append_constraint(e)
+
+
+BigIntIdentity = BigInteger().with_variant(Integer, "sqlite")
+JsonB = _JSON().with_variant(PG_JSONB, "postgresql").with_variant(ORA_JSONB, "oracle")
| diff --git a/tests/contrib/sqlalchemy/models_bigint.py b/tests/contrib/sqlalchemy/models_bigint.py
--- a/tests/contrib/sqlalchemy/models_bigint.py
+++ b/tests/contrib/sqlalchemy/models_bigint.py
@@ -32,6 +32,19 @@ class BigIntEventLog(BigIntAuditBase):
payload: Mapped[dict] = mapped_column(default=lambda: {}) # pyright: ignore
+class BigIntRule(BigIntAuditBase):
+ """The rule domain object."""
+
+ name: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore
+ config: Mapped[dict] = mapped_column(default=lambda: {}) # pyright: ignore
+
+
+class RuleAsyncRepository(SQLAlchemyAsyncRepository[BigIntRule]):
+ """Rule repository."""
+
+ model_type = BigIntRule
+
+
class AuthorAsyncRepository(SQLAlchemyAsyncRepository[BigIntAuthor]):
"""Author repository."""
@@ -66,3 +79,9 @@ class EventLogSyncRepository(SQLAlchemySyncRepository[BigIntEventLog]):
"""Event log repository."""
model_type = BigIntEventLog
+
+
+class RuleSyncRepository(SQLAlchemySyncRepository[BigIntRule]):
+ """Rule repository."""
+
+ model_type = BigIntRule
diff --git a/tests/contrib/sqlalchemy/models_uuid.py b/tests/contrib/sqlalchemy/models_uuid.py
--- a/tests/contrib/sqlalchemy/models_uuid.py
+++ b/tests/contrib/sqlalchemy/models_uuid.py
@@ -33,6 +33,19 @@ class UUIDEventLog(UUIDAuditBase):
payload: Mapped[dict] = mapped_column(default={}) # pyright: ignore
+class UUIDRule(UUIDAuditBase):
+ """The rule domain object."""
+
+ name: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore
+ config: Mapped[dict] = mapped_column(default=lambda: {}) # pyright: ignore
+
+
+class RuleAsyncRepository(SQLAlchemyAsyncRepository[UUIDRule]):
+ """Rule repository."""
+
+ model_type = UUIDRule
+
+
class AuthorAsyncRepository(SQLAlchemyAsyncRepository[UUIDAuthor]):
"""Author repository."""
@@ -67,3 +80,9 @@ class EventLogSyncRepository(SQLAlchemySyncRepository[UUIDEventLog]):
"""Event log repository."""
model_type = UUIDEventLog
+
+
+class RuleSyncRepository(SQLAlchemySyncRepository[UUIDRule]):
+ """Rule repository."""
+
+ model_type = UUIDRule
diff --git a/tests/contrib/sqlalchemy/repository/aiosqlite/test_sqlalchemy_aiosqlite_bigint.py b/tests/contrib/sqlalchemy/repository/aiosqlite/test_sqlalchemy_aiosqlite_bigint.py
--- a/tests/contrib/sqlalchemy/repository/aiosqlite/test_sqlalchemy_aiosqlite_bigint.py
+++ b/tests/contrib/sqlalchemy/repository/aiosqlite/test_sqlalchemy_aiosqlite_bigint.py
@@ -13,7 +13,11 @@
create_async_engine,
)
-from tests.contrib.sqlalchemy.models_bigint import AuthorAsyncRepository, BookAsyncRepository
+from tests.contrib.sqlalchemy.models_bigint import (
+ AuthorAsyncRepository,
+ BookAsyncRepository,
+ RuleAsyncRepository,
+)
from tests.contrib.sqlalchemy.repository import sqlalchemy_async_bigint_tests as st
pytestmark = pytest.mark.sqlalchemy_aiosqlite
@@ -41,10 +45,13 @@ async def fx_engine(tmp_path: Path) -> AsyncGenerator[AsyncEngine, None]:
name="session",
)
async def fx_session(
- engine: AsyncEngine, raw_authors_bigint: list[dict[str, Any]], raw_books_bigint: list[dict[str, Any]]
+ engine: AsyncEngine,
+ raw_authors_bigint: list[dict[str, Any]],
+ raw_books_bigint: list[dict[str, Any]],
+ raw_rules_bigint: list[dict[str, Any]],
) -> AsyncGenerator[AsyncSession, None]:
session = async_sessionmaker(bind=engine)()
- await st.seed_db(engine, raw_authors_bigint, raw_books_bigint)
+ await st.seed_db(engine, raw_authors_bigint, raw_books_bigint, raw_rules_bigint)
try:
yield session
finally:
@@ -62,6 +69,11 @@ def fx_book_repo(session: AsyncSession) -> BookAsyncRepository:
return BookAsyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: AsyncSession) -> RuleAsyncRepository:
+ return RuleAsyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorAsyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -270,3 +282,16 @@ async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> Non
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_filter_collection(author_repo=author_repo)
+
+
+async def test_repo_json_methods(
+ raw_rules_bigint: list[dict[str, Any]],
+ rule_repo: RuleAsyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_bigint (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (AuthorAsyncRepository): The rules mock repository
+ """
+ await st.test_repo_json_methods(raw_rules_bigint=raw_rules_bigint, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/aiosqlite/test_sqlalchemy_aiosqlite_uuid.py b/tests/contrib/sqlalchemy/repository/aiosqlite/test_sqlalchemy_aiosqlite_uuid.py
--- a/tests/contrib/sqlalchemy/repository/aiosqlite/test_sqlalchemy_aiosqlite_uuid.py
+++ b/tests/contrib/sqlalchemy/repository/aiosqlite/test_sqlalchemy_aiosqlite_uuid.py
@@ -13,7 +13,11 @@
create_async_engine,
)
-from tests.contrib.sqlalchemy.models_uuid import AuthorAsyncRepository, BookAsyncRepository
+from tests.contrib.sqlalchemy.models_uuid import (
+ AuthorAsyncRepository,
+ BookAsyncRepository,
+ RuleAsyncRepository,
+)
from tests.contrib.sqlalchemy.repository import sqlalchemy_async_uuid_tests as st
pytestmark = pytest.mark.sqlalchemy_aiosqlite
@@ -41,10 +45,13 @@ async def fx_engine(tmp_path: Path) -> AsyncGenerator[AsyncEngine, None]:
name="session",
)
async def fx_session(
- engine: AsyncEngine, raw_authors_uuid: list[dict[str, Any]], raw_books_uuid: list[dict[str, Any]]
+ engine: AsyncEngine,
+ raw_authors_uuid: list[dict[str, Any]],
+ raw_books_uuid: list[dict[str, Any]],
+ raw_rules_uuid: list[dict[str, Any]],
) -> AsyncGenerator[AsyncSession, None]:
session = async_sessionmaker(bind=engine)()
- await st.seed_db(engine, raw_authors_uuid, raw_books_uuid)
+ await st.seed_db(engine, raw_authors_uuid, raw_books_uuid, raw_rules_uuid)
try:
yield session
finally:
@@ -62,6 +69,11 @@ def fx_book_repo(session: AsyncSession) -> BookAsyncRepository:
return BookAsyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: AsyncSession) -> RuleAsyncRepository:
+ return RuleAsyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorAsyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -276,3 +288,16 @@ async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> Non
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_filter_collection(author_repo=author_repo)
+
+
+async def test_repo_json_methods(
+ raw_rules_uuid: list[dict[str, Any]],
+ rule_repo: RuleAsyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_uuid (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rules_repo (AuthorAsyncRepository): The rules mock repository
+ """
+ await st.test_repo_json_methods(raw_rules_uuid=raw_rules_uuid, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/asyncmy/test_sqlalchemy_asyncmy_bigint.py b/tests/contrib/sqlalchemy/repository/asyncmy/test_sqlalchemy_asyncmy_bigint.py
--- a/tests/contrib/sqlalchemy/repository/asyncmy/test_sqlalchemy_asyncmy_bigint.py
+++ b/tests/contrib/sqlalchemy/repository/asyncmy/test_sqlalchemy_asyncmy_bigint.py
@@ -14,7 +14,7 @@
create_async_engine,
)
-from tests.contrib.sqlalchemy.models_bigint import AuthorAsyncRepository, BookAsyncRepository
+from tests.contrib.sqlalchemy.models_bigint import AuthorAsyncRepository, BookAsyncRepository, RuleAsyncRepository
from tests.contrib.sqlalchemy.repository import sqlalchemy_async_bigint_tests as st
pytestmark = [
@@ -52,10 +52,13 @@ async def fx_engine(docker_ip: str) -> AsyncEngine:
@pytest.fixture(name="session")
async def fx_session(
- engine: AsyncEngine, raw_authors_bigint: list[dict[str, Any]], raw_books_bigint: list[dict[str, Any]]
+ engine: AsyncEngine,
+ raw_authors_bigint: list[dict[str, Any]],
+ raw_books_bigint: list[dict[str, Any]],
+ raw_rules_bigint: list[dict[str, Any]],
) -> AsyncGenerator[AsyncSession, None]:
session = async_sessionmaker(bind=engine)()
- await st.seed_db(engine, raw_authors_bigint, raw_books_bigint)
+ await st.seed_db(engine, raw_authors_bigint, raw_books_bigint, raw_rules_bigint)
try:
yield session
finally:
@@ -73,6 +76,11 @@ def fx_book_repo(session: AsyncSession) -> BookAsyncRepository:
return BookAsyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: AsyncSession) -> RuleAsyncRepository:
+ return RuleAsyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorAsyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -279,3 +287,16 @@ async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> Non
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_filter_collection(author_repo=author_repo)
+
+
+async def test_repo_json_methods(
+ raw_rules_bigint: list[dict[str, Any]],
+ rule_repo: RuleAsyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_bigint (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (AuthorAsyncRepository): The rules mock repository
+ """
+ await st.test_repo_json_methods(raw_rules_bigint=raw_rules_bigint, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/asyncmy/test_sqlalchemy_asyncmy_uuid.py b/tests/contrib/sqlalchemy/repository/asyncmy/test_sqlalchemy_asyncmy_uuid.py
--- a/tests/contrib/sqlalchemy/repository/asyncmy/test_sqlalchemy_asyncmy_uuid.py
+++ b/tests/contrib/sqlalchemy/repository/asyncmy/test_sqlalchemy_asyncmy_uuid.py
@@ -17,6 +17,7 @@
from tests.contrib.sqlalchemy.models_uuid import (
AuthorAsyncRepository,
BookAsyncRepository,
+ RuleAsyncRepository,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_async_uuid_tests as st
@@ -55,10 +56,13 @@ async def fx_engine(docker_ip: str) -> AsyncEngine:
@pytest.fixture(name="session")
async def fx_session(
- engine: AsyncEngine, raw_authors_uuid: list[dict[str, Any]], raw_books_uuid: list[dict[str, Any]]
+ engine: AsyncEngine,
+ raw_authors_uuid: list[dict[str, Any]],
+ raw_books_uuid: list[dict[str, Any]],
+ raw_rules_uuid: list[dict[str, Any]],
) -> AsyncGenerator[AsyncSession, None]:
session = async_sessionmaker(bind=engine)()
- await st.seed_db(engine, raw_authors_uuid, raw_books_uuid)
+ await st.seed_db(engine, raw_authors_uuid, raw_books_uuid, raw_rules_uuid)
try:
yield session
finally:
@@ -76,6 +80,11 @@ def fx_book_repo(session: AsyncSession) -> BookAsyncRepository:
return BookAsyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: AsyncSession) -> RuleAsyncRepository:
+ return RuleAsyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorAsyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -280,3 +289,16 @@ async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> Non
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_filter_collection(author_repo=author_repo)
+
+
+async def test_repo_json_methods(
+ raw_rules_uuid: list[dict[str, Any]],
+ rule_repo: RuleAsyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_uuid (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rules_repo (AuthorAsyncRepository): The rules mock repository
+ """
+ await st.test_repo_json_methods(raw_rules_uuid=raw_rules_uuid, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/asyncpg/test_sqlalchemy_asyncpg_bigint.py b/tests/contrib/sqlalchemy/repository/asyncpg/test_sqlalchemy_asyncpg_bigint.py
--- a/tests/contrib/sqlalchemy/repository/asyncpg/test_sqlalchemy_asyncpg_bigint.py
+++ b/tests/contrib/sqlalchemy/repository/asyncpg/test_sqlalchemy_asyncpg_bigint.py
@@ -17,6 +17,7 @@
from tests.contrib.sqlalchemy.models_bigint import (
AuthorAsyncRepository,
BookAsyncRepository,
+ RuleAsyncRepository,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_async_bigint_tests as st
@@ -57,10 +58,13 @@ async def fx_engine(docker_ip: str) -> AsyncEngine:
name="session",
)
async def fx_session(
- engine: AsyncEngine, raw_authors_bigint: list[dict[str, Any]], raw_books_uuid: list[dict[str, Any]]
+ engine: AsyncEngine,
+ raw_authors_bigint: list[dict[str, Any]],
+ raw_books_uuid: list[dict[str, Any]],
+ raw_rules_bigint: list[dict[str, Any]],
) -> AsyncGenerator[AsyncSession, None]:
session = async_sessionmaker(bind=engine)()
- await st.seed_db(engine, raw_authors_bigint, raw_books_uuid)
+ await st.seed_db(engine, raw_authors_bigint, raw_books_uuid, raw_rules_bigint)
try:
yield session
finally:
@@ -78,6 +82,11 @@ def fx_book_repo(session: AsyncSession) -> BookAsyncRepository:
return BookAsyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: AsyncSession) -> RuleAsyncRepository:
+ return RuleAsyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorAsyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -284,3 +293,16 @@ async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> Non
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_filter_collection(author_repo=author_repo)
+
+
+async def test_repo_json_methods(
+ raw_rules_bigint: list[dict[str, Any]],
+ rule_repo: RuleAsyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_bigint (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (AuthorAsyncRepository): The rules mock repository
+ """
+ await st.test_repo_json_methods(raw_rules_bigint=raw_rules_bigint, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/asyncpg/test_sqlalchemy_asyncpg_uuid.py b/tests/contrib/sqlalchemy/repository/asyncpg/test_sqlalchemy_asyncpg_uuid.py
--- a/tests/contrib/sqlalchemy/repository/asyncpg/test_sqlalchemy_asyncpg_uuid.py
+++ b/tests/contrib/sqlalchemy/repository/asyncpg/test_sqlalchemy_asyncpg_uuid.py
@@ -17,6 +17,7 @@
from tests.contrib.sqlalchemy.models_uuid import (
AuthorAsyncRepository,
BookAsyncRepository,
+ RuleAsyncRepository,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_async_uuid_tests as st
@@ -60,9 +61,10 @@ async def fx_session(
engine: AsyncEngine,
raw_authors_uuid: list[dict[str, Any]],
raw_books_uuid: list[dict[str, Any]],
+ raw_rules_uuid: list[dict[str, Any]],
) -> AsyncGenerator[AsyncSession, None]:
session = async_sessionmaker(bind=engine)()
- await st.seed_db(engine, raw_authors_uuid, raw_books_uuid)
+ await st.seed_db(engine, raw_authors_uuid, raw_books_uuid, raw_rules_uuid)
try:
yield session
finally:
@@ -80,6 +82,11 @@ def fx_book_repo(session: AsyncSession) -> BookAsyncRepository:
return BookAsyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: AsyncSession) -> RuleAsyncRepository:
+ return RuleAsyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorAsyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -284,3 +291,16 @@ async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> Non
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_filter_collection(author_repo=author_repo)
+
+
+async def test_repo_json_methods(
+ raw_rules_uuid: list[dict[str, Any]],
+ rule_repo: RuleAsyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_uuid (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rules_repo (AuthorAsyncRepository): The rules mock repository
+ """
+ await st.test_repo_json_methods(raw_rules_uuid=raw_rules_uuid, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/conftest.py b/tests/contrib/sqlalchemy/repository/conftest.py
--- a/tests/contrib/sqlalchemy/repository/conftest.py
+++ b/tests/contrib/sqlalchemy/repository/conftest.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import json
from datetime import datetime
from typing import TYPE_CHECKING, Any
from uuid import UUID
@@ -87,6 +88,27 @@ def fx_raw_log_events_uuid() -> list[dict[str, Any]]:
]
[email protected](name="raw_rules_uuid")
+def fx_raw_rules_uuid() -> list[dict[str, Any]]:
+ """Unstructured rules representations."""
+ return [
+ {
+ "id": "f34545b9-663c-4fce-915d-dd1ae9cea42a",
+ "name": "Initial loading rule.",
+ "config": json.dumps({"url": "https://litestar.dev", "setting_123": 1}),
+ "created": "2023-01-01T00:00:00",
+ "updated": "2023-02-01T00:00:00",
+ },
+ {
+ "id": "f34545b9-663c-4fce-915d-dd1ae9cea34b",
+ "name": "Secondary loading rule.",
+ "config": {"url": "https://litestar.dev", "bar": "foo", "setting_123": 4},
+ "created": "2023-02-01T00:00:00",
+ "updated": "2023-02-01T00:00:00",
+ },
+ ]
+
+
@pytest.fixture(name="raw_authors_bigint")
def fx_raw_authors_bigint() -> list[dict[str, Any]]:
"""Unstructured author representations."""
@@ -132,3 +154,24 @@ def fx_raw_log_events_bigint() -> list[dict[str, Any]]:
"updated": "0001-01-01T00:00:00",
},
]
+
+
[email protected](name="raw_rules_bigint")
+def fx_raw_rules_bigint() -> list[dict[str, Any]]:
+ """Unstructured rules representations."""
+ return [
+ {
+ "id": 2025,
+ "name": "Initial loading rule.",
+ "config": json.dumps({"url": "https://litestar.dev", "setting_123": 1}),
+ "created": "2023-01-01T00:00:00",
+ "updated": "2023-02-01T00:00:00",
+ },
+ {
+ "id": 2024,
+ "name": "Secondary loading rule.",
+ "config": {"url": "https://litestar.dev", "bar": "foo", "setting_123": 4},
+ "created": "2023-02-01T00:00:00",
+ "updated": "2023-02-01T00:00:00",
+ },
+ ]
diff --git a/tests/contrib/sqlalchemy/repository/duckdb/test_sqlalchemy_duckdb_bigint.py b/tests/contrib/sqlalchemy/repository/duckdb/test_sqlalchemy_duckdb_bigint.py
--- a/tests/contrib/sqlalchemy/repository/duckdb/test_sqlalchemy_duckdb_bigint.py
+++ b/tests/contrib/sqlalchemy/repository/duckdb/test_sqlalchemy_duckdb_bigint.py
@@ -12,6 +12,7 @@
from tests.contrib.sqlalchemy.models_bigint import (
AuthorSyncRepository,
BookSyncRepository,
+ RuleSyncRepository,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_sync_bigint_tests as st
@@ -44,10 +45,13 @@ def fx_engine(tmp_path: Path) -> Generator[Engine, None, None]:
name="session",
)
def fx_session(
- engine: Engine, raw_authors_bigint: list[dict[str, Any]], raw_books_bigint: list[dict[str, Any]]
+ engine: Engine,
+ raw_authors_bigint: list[dict[str, Any]],
+ raw_books_bigint: list[dict[str, Any]],
+ raw_rules_bigint: list[dict[str, Any]],
) -> Generator[Session, None, None]:
session = sessionmaker(bind=engine)()
- st.seed_db(engine, raw_authors_bigint, raw_books_bigint)
+ st.seed_db(engine, raw_authors_bigint, raw_books_bigint, raw_rules_bigint)
try:
yield session
finally:
@@ -65,6 +69,11 @@ def fx_book_repo(session: Session) -> BookSyncRepository:
return BookSyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: Session) -> RuleSyncRepository:
+ return RuleSyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -269,3 +278,16 @@ def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_filter_collection(author_repo=author_repo)
+
+
+def test_repo_json_methods(
+ raw_rules_bigint: list[dict[str, Any]],
+ rule_repo: RuleSyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_bigint (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (RuleSyncRepository): The rules mock repository
+ """
+ st.test_repo_json_methods(raw_rules_bigint=raw_rules_bigint, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/duckdb/test_sqlalchemy_duckdb_uuid.py b/tests/contrib/sqlalchemy/repository/duckdb/test_sqlalchemy_duckdb_uuid.py
--- a/tests/contrib/sqlalchemy/repository/duckdb/test_sqlalchemy_duckdb_uuid.py
+++ b/tests/contrib/sqlalchemy/repository/duckdb/test_sqlalchemy_duckdb_uuid.py
@@ -12,6 +12,7 @@
from tests.contrib.sqlalchemy.models_uuid import (
AuthorSyncRepository,
BookSyncRepository,
+ RuleSyncRepository,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_sync_uuid_tests as st
@@ -44,10 +45,13 @@ def fx_engine(tmp_path: Path) -> Generator[Engine, None, None]:
name="session",
)
def fx_session(
- engine: Engine, raw_authors_uuid: list[dict[str, Any]], raw_books_uuid: list[dict[str, Any]]
+ engine: Engine,
+ raw_authors_uuid: list[dict[str, Any]],
+ raw_books_uuid: list[dict[str, Any]],
+ raw_rules_uuid: list[dict[str, Any]],
) -> Generator[Session, None, None]:
session = sessionmaker(bind=engine)()
- st.seed_db(engine, raw_authors_uuid, raw_books_uuid)
+ st.seed_db(engine, raw_authors_uuid, raw_books_uuid, raw_rules_uuid)
try:
yield session
finally:
@@ -65,6 +69,11 @@ def fx_book_repo(session: Session) -> BookSyncRepository:
return BookSyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: Session) -> RuleSyncRepository:
+ return RuleSyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -267,3 +276,16 @@ def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_filter_collection(author_repo=author_repo)
+
+
+def test_repo_json_methods(
+ raw_rules_uuid: list[dict[str, Any]],
+ rule_repo: RuleSyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_uuid (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (RuleSyncRepository): The rules mock repository
+ """
+ st.test_repo_json_methods(raw_rules_uuid=raw_rules_uuid, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_bigint.py b/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_bigint.py
--- a/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_bigint.py
+++ b/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_bigint.py
@@ -9,7 +9,7 @@
from sqlalchemy import Engine, NullPool, create_engine
from sqlalchemy.orm import Session, sessionmaker
-from tests.contrib.sqlalchemy.models_bigint import AuthorSyncRepository, BookSyncRepository
+from tests.contrib.sqlalchemy.models_bigint import AuthorSyncRepository, BookSyncRepository, RuleSyncRepository
from tests.contrib.sqlalchemy.repository import sqlalchemy_sync_bigint_tests as st
pytestmark = [
@@ -52,10 +52,13 @@ def fx_engine(docker_ip: str) -> Engine:
name="session",
)
def fx_session(
- engine: Engine, raw_authors_bigint: list[dict[str, Any]], raw_books_bigint: list[dict[str, Any]]
+ engine: Engine,
+ raw_authors_bigint: list[dict[str, Any]],
+ raw_books_bigint: list[dict[str, Any]],
+ raw_rules_bigint: list[dict[str, Any]],
) -> Generator[Session, None, None]:
session = sessionmaker(bind=engine)()
- st.seed_db(engine, raw_authors_bigint, raw_books_bigint)
+ st.seed_db(engine, raw_authors_bigint, raw_books_bigint, raw_rules_bigint)
try:
yield session
finally:
@@ -73,6 +76,11 @@ def fx_book_repo(session: Session) -> BookSyncRepository:
return BookSyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: Session) -> RuleSyncRepository:
+ return RuleSyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -278,3 +286,16 @@ def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_filter_collection(author_repo=author_repo)
+
+
+def test_repo_json_methods(
+ raw_rules_bigint: list[dict[str, Any]],
+ rule_repo: RuleSyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_bigint (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (RuleSyncRepository): The rules mock repository
+ """
+ st.test_repo_json_methods(raw_rules_bigint=raw_rules_bigint, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_uuid.py b/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_uuid.py
--- a/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_uuid.py
+++ b/tests/contrib/sqlalchemy/repository/oracledb/test_sqlalchemy_oracledb_uuid.py
@@ -12,6 +12,7 @@
from tests.contrib.sqlalchemy.models_uuid import (
AuthorSyncRepository,
BookSyncRepository,
+ RuleSyncRepository,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_sync_uuid_tests as st
@@ -55,10 +56,13 @@ def fx_engine(docker_ip: str) -> Engine:
name="session",
)
def fx_session(
- engine: Engine, raw_authors_uuid: list[dict[str, Any]], raw_books_uuid: list[dict[str, Any]]
+ engine: Engine,
+ raw_authors_uuid: list[dict[str, Any]],
+ raw_books_uuid: list[dict[str, Any]],
+ raw_rules_uuid: list[dict[str, Any]],
) -> Generator[Session, None, None]:
session = sessionmaker(bind=engine)()
- st.seed_db(engine, raw_authors_uuid, raw_books_uuid)
+ st.seed_db(engine, raw_authors_uuid, raw_books_uuid, raw_rules_uuid)
try:
yield session
finally:
@@ -76,6 +80,11 @@ def fx_book_repo(session: Session) -> BookSyncRepository:
return BookSyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: Session) -> RuleSyncRepository:
+ return RuleSyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -278,3 +287,16 @@ def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_filter_collection(author_repo=author_repo)
+
+
+def test_repo_json_methods(
+ raw_rules_uuid: list[dict[str, Any]],
+ rule_repo: RuleSyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_uuid (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (RuleSyncRepository): The rules mock repository
+ """
+ st.test_repo_json_methods(raw_rules_uuid=raw_rules_uuid, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_bigint_async.py b/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_bigint_async.py
--- a/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_bigint_async.py
+++ b/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_bigint_async.py
@@ -17,6 +17,7 @@
from tests.contrib.sqlalchemy.models_bigint import (
AuthorAsyncRepository,
BookAsyncRepository,
+ RuleAsyncRepository,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_async_bigint_tests as st
@@ -57,10 +58,13 @@ async def fx_engine(docker_ip: str) -> AsyncEngine:
name="session",
)
async def fx_session(
- engine: AsyncEngine, raw_authors_bigint: list[dict[str, Any]], raw_books_bigint: list[dict[str, Any]]
+ engine: AsyncEngine,
+ raw_authors_bigint: list[dict[str, Any]],
+ raw_books_bigint: list[dict[str, Any]],
+ raw_rules_bigint: list[dict[str, Any]],
) -> AsyncGenerator[AsyncSession, None]:
session = async_sessionmaker(bind=engine)()
- await st.seed_db(engine, raw_authors_bigint, raw_books_bigint)
+ await st.seed_db(engine, raw_authors_bigint, raw_books_bigint, raw_rules_bigint)
try:
yield session
finally:
@@ -78,6 +82,11 @@ def fx_book_repo(session: AsyncSession) -> BookAsyncRepository:
return BookAsyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: AsyncSession) -> RuleAsyncRepository:
+ return RuleAsyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorAsyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -284,3 +293,16 @@ async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> Non
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_filter_collection(author_repo=author_repo)
+
+
+async def test_repo_json_methods(
+ raw_rules_bigint: list[dict[str, Any]],
+ rule_repo: RuleAsyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_bigint (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (AuthorAsyncRepository): The rules mock repository
+ """
+ await st.test_repo_json_methods(raw_rules_bigint=raw_rules_bigint, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_bigint_sync.py b/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_bigint_sync.py
--- a/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_bigint_sync.py
+++ b/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_bigint_sync.py
@@ -12,6 +12,7 @@
from tests.contrib.sqlalchemy.models_bigint import (
AuthorSyncRepository,
BookSyncRepository,
+ RuleSyncRepository,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_sync_bigint_tests as st
@@ -52,10 +53,13 @@ def fx_engine(docker_ip: str) -> Engine:
name="session",
)
def fx_session(
- engine: Engine, raw_authors_bigint: list[dict[str, Any]], raw_books_bigint: list[dict[str, Any]]
+ engine: Engine,
+ raw_authors_bigint: list[dict[str, Any]],
+ raw_books_bigint: list[dict[str, Any]],
+ raw_rules_bigint: list[dict[str, Any]],
) -> Generator[Session, None, None]:
session = sessionmaker(bind=engine)()
- st.seed_db(engine, raw_authors_bigint, raw_books_bigint)
+ st.seed_db(engine, raw_authors_bigint, raw_books_bigint, raw_rules_bigint)
try:
yield session
finally:
@@ -73,6 +77,11 @@ def fx_book_repo(session: Session) -> BookSyncRepository:
return BookSyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: Session) -> RuleSyncRepository:
+ return RuleSyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -277,3 +286,16 @@ def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_filter_collection(author_repo=author_repo)
+
+
+def test_repo_json_methods(
+ raw_rules_bigint: list[dict[str, Any]],
+ rule_repo: RuleSyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_bigint (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (RuleSyncRepository): The rules mock repository
+ """
+ st.test_repo_json_methods(raw_rules_bigint=raw_rules_bigint, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_uuid_async.py b/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_uuid_async.py
--- a/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_uuid_async.py
+++ b/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_uuid_async.py
@@ -17,6 +17,7 @@
from tests.contrib.sqlalchemy.models_uuid import (
AuthorAsyncRepository,
BookAsyncRepository,
+ RuleAsyncRepository,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_async_uuid_tests as st
@@ -57,10 +58,13 @@ async def fx_engine(docker_ip: str) -> AsyncEngine:
name="session",
)
async def fx_session(
- engine: AsyncEngine, raw_authors_uuid: list[dict[str, Any]], raw_books_bigint: list[dict[str, Any]]
+ engine: AsyncEngine,
+ raw_authors_uuid: list[dict[str, Any]],
+ raw_books_uuid: list[dict[str, Any]],
+ raw_rules_uuid: list[dict[str, Any]],
) -> AsyncGenerator[AsyncSession, None]:
session = async_sessionmaker(bind=engine)()
- await st.seed_db(engine, raw_authors_uuid, raw_books_bigint)
+ await st.seed_db(engine, raw_authors_uuid, raw_books_uuid, raw_rules_uuid)
try:
yield session
finally:
@@ -78,6 +82,11 @@ def fx_book_repo(session: AsyncSession) -> BookAsyncRepository:
return BookAsyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: AsyncSession) -> RuleAsyncRepository:
+ return RuleAsyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorAsyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -282,3 +291,16 @@ async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> Non
author_repo (AuthorRepository): The author mock repository
"""
await st.test_repo_filter_collection(author_repo=author_repo)
+
+
+async def test_repo_json_methods(
+ raw_rules_uuid: list[dict[str, Any]],
+ rule_repo: RuleAsyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_uuid (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rules_repo (AuthorAsyncRepository): The rules mock repository
+ """
+ await st.test_repo_json_methods(raw_rules_uuid=raw_rules_uuid, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_uuid_sync.py b/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_uuid_sync.py
--- a/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_uuid_sync.py
+++ b/tests/contrib/sqlalchemy/repository/psycopg/test_sqlalchemy_psycopg_uuid_sync.py
@@ -12,6 +12,7 @@
from tests.contrib.sqlalchemy.models_uuid import (
AuthorSyncRepository,
BookSyncRepository,
+ RuleSyncRepository,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_sync_uuid_tests as st
@@ -52,10 +53,13 @@ def fx_engine(docker_ip: str) -> Engine:
name="session",
)
def fx_session(
- engine: Engine, raw_authors_uuid: list[dict[str, Any]], raw_books_uuid: list[dict[str, Any]]
+ engine: Engine,
+ raw_authors_uuid: list[dict[str, Any]],
+ raw_books_uuid: list[dict[str, Any]],
+ raw_rules_uuid: list[dict[str, Any]],
) -> Generator[Session, None, None]:
session = sessionmaker(bind=engine)()
- st.seed_db(engine, raw_authors_uuid, raw_books_uuid)
+ st.seed_db(engine, raw_authors_uuid, raw_books_uuid, raw_rules_uuid)
try:
yield session
finally:
@@ -73,6 +77,11 @@ def fx_book_repo(session: Session) -> BookSyncRepository:
return BookSyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: Session) -> RuleSyncRepository:
+ return RuleSyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -278,3 +287,16 @@ def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_filter_collection(author_repo=author_repo)
+
+
+def test_repo_json_methods(
+ raw_rules_uuid: list[dict[str, Any]],
+ rule_repo: RuleSyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_uuid (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (RuleSyncRepository): The rules mock repository
+ """
+ st.test_repo_json_methods(raw_rules_uuid=raw_rules_uuid, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/spanner/test_sqlalchemy_spanner_uuid.py b/tests/contrib/sqlalchemy/repository/spanner/test_sqlalchemy_spanner_uuid.py
--- a/tests/contrib/sqlalchemy/repository/spanner/test_sqlalchemy_spanner_uuid.py
+++ b/tests/contrib/sqlalchemy/repository/spanner/test_sqlalchemy_spanner_uuid.py
@@ -13,7 +13,9 @@
from tests.contrib.sqlalchemy.models_uuid import (
AuthorSyncRepository,
BookSyncRepository,
+ RuleSyncRepository,
UUIDAuthor,
+ UUIDRule,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_sync_uuid_tests as st
@@ -21,10 +23,20 @@
pytest.mark.skipif(sys.platform != "linux", reason="docker not available on this platform"),
pytest.mark.usefixtures("spanner_service"),
pytest.mark.sqlalchemy_integration,
- pytest.mark.sqlalchemy_psycopg_sync,
+ pytest.mark.sqlalchemy_spanner,
]
[email protected]()
+def set_spanner_emulator_host(monkeypatch: pytest.MonkeyPatch) -> None:
+ monkeypatch.setenv("SPANNER_EMULATOR_HOST", "localhost:9010")
+
+
[email protected]()
+def set_google_cloud_project(monkeypatch: pytest.MonkeyPatch) -> None:
+ monkeypatch.setenv("GOOGLE_CLOUD_PROJECT", "emulator-test-project")
+
+
@pytest.fixture(name="engine")
def fx_engine(docker_ip: str) -> Engine:
"""Postgresql instance for end-to-end testing.
@@ -35,8 +47,6 @@ def fx_engine(docker_ip: str) -> Engine:
Returns:
Async SQLAlchemy engine instance.
"""
- os.environ["SPANNER_EMULATOR_HOST"] = "localhost:9010"
- os.environ["GOOGLE_CLOUD_PROJECT"] = "emulator-test-project"
return create_engine(
"spanner+spanner:///projects/emulator-test-project/instances/test-instance/databases/test-database",
echo=True,
@@ -45,12 +55,18 @@ def fx_engine(docker_ip: str) -> Engine:
@pytest.fixture(name="session")
def fx_session(
- engine: Engine, raw_authors_uuid: list[dict[str, Any]], raw_books_uuid: list[dict[str, Any]]
+ engine: Engine,
+ raw_authors_uuid: list[dict[str, Any]],
+ raw_books_uuid: list[dict[str, Any]],
+ raw_rules_uuid: list[dict[str, Any]],
) -> Generator[Session, None, None]:
for raw_author in raw_authors_uuid:
raw_author["dob"] = datetime.strptime(raw_author["dob"], "%Y-%m-%d").date()
raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ for raw_rule in raw_rules_uuid:
+ raw_rule["created"] = datetime.strptime(raw_rule["created"], "%Y-%m-%dT%H:%M:%S")
+ raw_rule["updated"] = datetime.strptime(raw_rule["updated"], "%Y-%m-%dT%H:%M:%S")
with engine.begin() as txn:
objs = []
for tbl in UUIDAuthor.registry.metadata.sorted_tables:
@@ -60,9 +76,15 @@ def fx_session(
session = sessionmaker(bind=engine)()
try:
- repo = AuthorSyncRepository(session=session)
+ author_repo = AuthorSyncRepository(session=session)
for author in raw_authors_uuid:
- _ = repo.get_or_create("name", **author)
+ _ = author_repo.get_or_create(match_fields="name", **author)
+ if not bool(os.environ.get("SPANNER_EMULATOR_HOST")):
+ rule_repo = RuleSyncRepository(session=session)
+ for rule in raw_rules_uuid:
+ _ = rule_repo.add(
+ UUIDRule(**rule),
+ )
yield session
finally:
session.rollback()
@@ -81,6 +103,11 @@ def fx_book_repo(session: Session) -> BookSyncRepository:
return BookSyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: Session) -> RuleSyncRepository:
+ return RuleSyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -151,6 +178,7 @@ def test_repo_add_many_method(raw_authors_uuid: list[dict[str, Any]], author_rep
# there's an emulator bug that causes this one to fail.
[email protected](bool(os.environ.get("SPANNER_EMULATOR_HOST")), reason="Skipped on emulator")
@pytest.mark.xfail
def test_repo_update_many_method(author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy Update Many.
@@ -285,3 +313,21 @@ def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_filter_collection(author_repo=author_repo)
+
+
+# there's an emulator bug that causes this one to fail.
+# The current google tests disable JSON tests when using the emulator.
+# https://github.com/googleapis/python-spanner-sqlalchemy/blob/main/test/test_suite_20.py#L2853
[email protected](bool(os.environ.get("SPANNER_EMULATOR_HOST")), reason="Skipped on emulator")
[email protected]
+def test_repo_json_methods(
+ raw_rules_uuid: list[dict[str, Any]],
+ rule_repo: RuleSyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_uuid (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (RuleSyncRepository): The rules mock repository
+ """
+ st.test_repo_json_methods(raw_rules_uuid=raw_rules_uuid, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/sqlalchemy_async_bigint_tests.py b/tests/contrib/sqlalchemy/repository/sqlalchemy_async_bigint_tests.py
--- a/tests/contrib/sqlalchemy/repository/sqlalchemy_async_bigint_tests.py
+++ b/tests/contrib/sqlalchemy/repository/sqlalchemy_async_bigint_tests.py
@@ -11,13 +11,20 @@
from litestar.contrib.repository.exceptions import RepositoryError
from litestar.contrib.repository.filters import BeforeAfter, CollectionFilter, OrderBy, SearchFilter
from litestar.contrib.sqlalchemy import base
-from tests.contrib.sqlalchemy.models_bigint import AuthorAsyncRepository, BigIntAuthor, BookAsyncRepository
+from tests.contrib.sqlalchemy.models_bigint import (
+ AuthorAsyncRepository,
+ BigIntAuthor,
+ BigIntRule,
+ BookAsyncRepository,
+ RuleAsyncRepository,
+)
async def seed_db(
engine: AsyncEngine,
raw_authors_bigint: list[dict[str, Any]],
raw_books_bigint: list[dict[str, Any]],
+ raw_rules_bigint: list[dict[str, Any]],
) -> None:
"""Populate test database with sample data.
@@ -29,10 +36,15 @@ async def seed_db(
raw_author["dob"] = datetime.strptime(raw_author["dob"], "%Y-%m-%d").date()
raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ for raw_author in raw_rules_bigint:
+ raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
+ raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+
async with engine.begin() as conn:
await conn.run_sync(base.orm_registry.metadata.drop_all)
await conn.run_sync(base.orm_registry.metadata.create_all)
await conn.execute(insert(BigIntAuthor).values(raw_authors_bigint))
+ await conn.execute(insert(BigIntRule).values(raw_rules_bigint))
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorAsyncRepository) -> None:
@@ -359,3 +371,43 @@ async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> Non
existing_obj = await author_repo.list(CollectionFilter(field_name="id", values=[2024]))
assert existing_obj[0].name == "Leo Tolstoy"
+
+
+async def test_repo_json_methods(
+ raw_rules_bigint: list[dict[str, Any]],
+ rule_repo: RuleAsyncRepository,
+) -> None:
+ """Test SQLALchemy JSON.
+
+ Args:
+ raw_rules_bigint (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (AuthorAsyncRepository): The rules mock repository
+ """
+ exp_count = len(raw_rules_bigint) + 1
+ new_rule = BigIntRule(name="Testing", config={"an": "object"})
+ obj = await rule_repo.add(new_rule)
+ count = await rule_repo.count()
+ assert exp_count == count
+ assert isinstance(obj, BigIntRule)
+ assert new_rule.name == obj.name
+ assert new_rule.config == obj.config
+ assert obj.id is not None
+ obj.config = {"the": "update"}
+ updated = await rule_repo.update(obj)
+ assert obj.config == updated.config
+
+ get_obj, get_created = await rule_repo.get_or_create(
+ match_fields=["name"],
+ name="Secondary loading rule.",
+ config={"another": "object"},
+ )
+ assert get_created is False
+ assert get_obj.id is not None
+ assert get_obj.config == {"another": "object"}
+
+ new_obj, new_created = await rule_repo.get_or_create(
+ match_fields=["name"], name="New rule.", config={"new": "object"}
+ )
+ assert new_created is True
+ assert new_obj.id is not None
+ assert new_obj.config == {"new": "object"}
diff --git a/tests/contrib/sqlalchemy/repository/sqlalchemy_async_uuid_tests.py b/tests/contrib/sqlalchemy/repository/sqlalchemy_async_uuid_tests.py
--- a/tests/contrib/sqlalchemy/repository/sqlalchemy_async_uuid_tests.py
+++ b/tests/contrib/sqlalchemy/repository/sqlalchemy_async_uuid_tests.py
@@ -14,13 +14,20 @@
from litestar.contrib.repository.exceptions import RepositoryError
from litestar.contrib.repository.filters import BeforeAfter, CollectionFilter, OrderBy, SearchFilter
from litestar.contrib.sqlalchemy import base
-from tests.contrib.sqlalchemy.models_uuid import AuthorAsyncRepository, BookAsyncRepository, UUIDAuthor
+from tests.contrib.sqlalchemy.models_uuid import (
+ AuthorAsyncRepository,
+ BookAsyncRepository,
+ RuleAsyncRepository,
+ UUIDAuthor,
+ UUIDRule,
+)
async def seed_db(
engine: AsyncEngine,
raw_authors_uuid: list[dict[str, Any]],
raw_books_uuid: list[dict[str, Any]],
+ raw_rules_uuid: list[dict[str, Any]],
) -> None:
"""Populate test database with sample data.
@@ -32,11 +39,15 @@ async def seed_db(
raw_author["dob"] = datetime.strptime(raw_author["dob"], "%Y-%m-%d").date()
raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ for raw_author in raw_rules_uuid:
+ raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
+ raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
async with engine.begin() as conn:
await conn.run_sync(base.orm_registry.metadata.drop_all)
await conn.run_sync(base.orm_registry.metadata.create_all)
await conn.execute(insert(UUIDAuthor).values(raw_authors_uuid))
+ await conn.execute(insert(UUIDRule).values(raw_rules_uuid))
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorAsyncRepository) -> None:
@@ -374,3 +385,41 @@ async def test_repo_filter_collection(author_repo: AuthorAsyncRepository) -> Non
CollectionFilter(field_name="id", values=[UUID("5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2")])
)
assert existing_obj[0].name == "Leo Tolstoy"
+
+
+async def test_repo_json_methods(
+ raw_rules_uuid: list[dict[str, Any]],
+ rule_repo: RuleAsyncRepository,
+) -> None:
+ """Test SQLALchemy JSON.
+
+ Args:
+ raw_rules_uuid (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rules_repo (AuthorSyncRepository): The rules mock repository
+ """
+ exp_count = len(raw_rules_uuid) + 1
+ new_rule = UUIDRule(name="Testing", config={"an": "object"})
+ obj = await rule_repo.add(new_rule)
+ count = await rule_repo.count()
+ assert exp_count == count
+ assert isinstance(obj, UUIDRule)
+ assert new_rule.name == obj.name
+ assert new_rule.config == obj.config
+ assert obj.id is not None
+ obj.config = {"the": "update"}
+ updated = await rule_repo.update(obj)
+ assert obj.config == updated.config
+
+ get_obj, get_created = await rule_repo.get_or_create(
+ match_fields=["name"], name="Secondary loading rule.", config={"another": "object"}
+ )
+ assert get_created is False
+ assert get_obj.id is not None
+ assert get_obj.config == {"another": "object"}
+
+ new_obj, new_created = await rule_repo.get_or_create(
+ match_fields=["name"], name="New rule.", config={"new": "object"}
+ )
+ assert new_created is True
+ assert new_obj.id is not None
+ assert new_obj.config == {"new": "object"}
diff --git a/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_bigint_tests.py b/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_bigint_tests.py
--- a/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_bigint_tests.py
+++ b/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_bigint_tests.py
@@ -10,13 +10,20 @@
from litestar.contrib.repository.exceptions import RepositoryError
from litestar.contrib.repository.filters import BeforeAfter, CollectionFilter, OrderBy, SearchFilter
from litestar.contrib.sqlalchemy import base
-from tests.contrib.sqlalchemy.models_bigint import AuthorSyncRepository, BigIntAuthor, BookSyncRepository
+from tests.contrib.sqlalchemy.models_bigint import (
+ AuthorSyncRepository,
+ BigIntAuthor,
+ BigIntRule,
+ BookSyncRepository,
+ RuleSyncRepository,
+)
def seed_db(
engine: Engine,
raw_authors_bigint: list[dict[str, Any]],
raw_books_bigint: list[dict[str, Any]],
+ raw_rules_bigint: list[dict[str, Any]],
) -> None:
"""Populate test database with sample data.
@@ -28,12 +35,17 @@ def seed_db(
raw_author["dob"] = datetime.strptime(raw_author["dob"], "%Y-%m-%d").date()
raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ for raw_rule in raw_rules_bigint:
+ raw_rule["created"] = datetime.strptime(raw_rule["created"], "%Y-%m-%dT%H:%M:%S")
+ raw_rule["updated"] = datetime.strptime(raw_rule["updated"], "%Y-%m-%dT%H:%M:%S")
with engine.begin() as conn:
base.orm_registry.metadata.drop_all(conn)
base.orm_registry.metadata.create_all(conn)
for author in raw_authors_bigint:
conn.execute(insert(BigIntAuthor).values(author))
+ for rule in raw_rules_bigint:
+ conn.execute(insert(BigIntRule).values(rule))
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
@@ -358,3 +370,41 @@ def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
existing_obj = author_repo.list(CollectionFilter(field_name="id", values=[2024]))
assert existing_obj[0].name == "Leo Tolstoy"
+
+
+def test_repo_json_methods(
+ raw_rules_bigint: list[dict[str, Any]],
+ rule_repo: RuleSyncRepository,
+) -> None:
+ """Test SQLALchemy JSON.
+
+ Args:
+ raw_rules_bigint (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (AuthorSyncRepository): The rules mock repository
+ """
+ exp_count = len(raw_rules_bigint) + 1
+ new_rule = BigIntRule(name="Testing", config={"an": "object"})
+ obj = rule_repo.add(new_rule)
+ count = rule_repo.count()
+ assert exp_count == count
+ assert isinstance(obj, BigIntRule)
+ assert new_rule.name == obj.name
+ assert new_rule.config == obj.config
+ assert obj.id is not None
+ obj.config = {"the": "update"}
+ updated = rule_repo.update(obj)
+ assert obj.config == updated.config
+
+ get_obj, get_created = rule_repo.get_or_create(
+ match_fields=["name"],
+ name="Secondary loading rule.",
+ config={"another": "object"},
+ )
+ assert get_created is False
+ assert get_obj.id is not None
+ assert get_obj.config == {"another": "object"}
+
+ new_obj, new_created = rule_repo.get_or_create(match_fields=["name"], name="New rule.", config={"new": "object"})
+ assert new_created is True
+ assert new_obj.id is not None
+ assert new_obj.config == {"new": "object"}
diff --git a/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_uuid_tests.py b/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_uuid_tests.py
--- a/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_uuid_tests.py
+++ b/tests/contrib/sqlalchemy/repository/sqlalchemy_sync_uuid_tests.py
@@ -11,13 +11,20 @@
from litestar.contrib.repository.exceptions import RepositoryError
from litestar.contrib.repository.filters import BeforeAfter, CollectionFilter, OrderBy, SearchFilter
from litestar.contrib.sqlalchemy import base
-from tests.contrib.sqlalchemy.models_uuid import AuthorSyncRepository, BookSyncRepository, UUIDAuthor
+from tests.contrib.sqlalchemy.models_uuid import (
+ AuthorSyncRepository,
+ BookSyncRepository,
+ RuleSyncRepository,
+ UUIDAuthor,
+ UUIDRule,
+)
def seed_db(
engine: Engine,
raw_authors_uuid: list[dict[str, Any]],
raw_books_uuid: list[dict[str, Any]],
+ raw_rules_uuid: list[dict[str, Any]],
) -> None:
"""Populate test database with sample data.
@@ -29,6 +36,9 @@ def seed_db(
raw_author["dob"] = datetime.strptime(raw_author["dob"], "%Y-%m-%d").date()
raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ for raw_author in raw_rules_uuid:
+ raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
+ raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
with engine.begin() as conn:
base.orm_registry.metadata.drop_all(conn)
@@ -36,6 +46,8 @@ def seed_db(
with engine.begin() as conn:
for author in raw_authors_uuid:
conn.execute(insert(UUIDAuthor).values(author))
+ for rule in raw_rules_uuid:
+ conn.execute(insert(UUIDRule).values(rule))
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
@@ -373,3 +385,40 @@ def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
CollectionFilter(field_name="id", values=[UUID("5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2")])
)
assert existing_obj[0].name == "Leo Tolstoy"
+
+
+def test_repo_json_methods(
+ raw_rules_uuid: list[dict[str, Any]],
+ rule_repo: RuleSyncRepository,
+) -> None:
+ """Test SQLALchemy JSON.
+
+ Args:
+ raw_rules_uuid (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rules_repo (AuthorSyncRepository): The rules mock repository
+ """
+ exp_count = len(raw_rules_uuid) + 1
+ new_rule = UUIDRule(name="Testing", config={"an": "object"})
+ obj = rule_repo.add(new_rule)
+ count = rule_repo.count()
+ assert exp_count == count
+ assert isinstance(obj, UUIDRule)
+ assert new_rule.name == obj.name
+ assert new_rule.config == obj.config
+ assert obj.id is not None
+
+ obj.config = {"the": "update"}
+ updated = rule_repo.update(obj)
+ assert obj.config == updated.config
+
+ get_obj, get_created = rule_repo.get_or_create(
+ match_fields=["name"], name="Secondary loading rule.", config={"another": "object"}
+ )
+ assert get_created is False
+ assert get_obj.id is not None
+ assert get_obj.config == {"another": "object"}
+
+ new_obj, new_created = rule_repo.get_or_create(match_fields=["name"], name="New rule.", config={"new": "object"})
+ assert new_created is True
+ assert new_obj.id is not None
+ assert new_obj.config == {"new": "object"}
diff --git a/tests/contrib/sqlalchemy/repository/sqlite/test_sqlalchemy_sqlite_bigint.py b/tests/contrib/sqlalchemy/repository/sqlite/test_sqlalchemy_sqlite_bigint.py
--- a/tests/contrib/sqlalchemy/repository/sqlite/test_sqlalchemy_sqlite_bigint.py
+++ b/tests/contrib/sqlalchemy/repository/sqlite/test_sqlalchemy_sqlite_bigint.py
@@ -11,6 +11,7 @@
from tests.contrib.sqlalchemy.models_bigint import (
AuthorSyncRepository,
BookSyncRepository,
+ RuleSyncRepository,
)
from tests.contrib.sqlalchemy.repository import sqlalchemy_sync_bigint_tests as st
@@ -39,10 +40,13 @@ def fx_engine(tmp_path: Path) -> Generator[Engine, None, None]:
name="session",
)
def fx_session(
- engine: Engine, raw_authors_bigint: list[dict[str, Any]], raw_books_bigint: list[dict[str, Any]]
+ engine: Engine,
+ raw_authors_bigint: list[dict[str, Any]],
+ raw_books_bigint: list[dict[str, Any]],
+ raw_rules_bigint: list[dict[str, Any]],
) -> Generator[Session, None, None]:
session = sessionmaker(bind=engine)()
- st.seed_db(engine, raw_authors_bigint, raw_books_bigint)
+ st.seed_db(engine, raw_authors_bigint, raw_books_bigint, raw_rules_bigint)
try:
yield session
finally:
@@ -60,6 +64,11 @@ def fx_book_repo(session: Session) -> BookSyncRepository:
return BookSyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: Session) -> RuleSyncRepository:
+ return RuleSyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -265,3 +274,16 @@ def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_filter_collection(author_repo=author_repo)
+
+
+def test_repo_json_methods(
+ raw_rules_bigint: list[dict[str, Any]],
+ rule_repo: RuleSyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_bigint (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (RuleSyncRepository): The rules mock repository
+ """
+ st.test_repo_json_methods(raw_rules_bigint=raw_rules_bigint, rule_repo=rule_repo)
diff --git a/tests/contrib/sqlalchemy/repository/sqlite/test_sqlalchemy_sqlite_uuid.py b/tests/contrib/sqlalchemy/repository/sqlite/test_sqlalchemy_sqlite_uuid.py
--- a/tests/contrib/sqlalchemy/repository/sqlite/test_sqlalchemy_sqlite_uuid.py
+++ b/tests/contrib/sqlalchemy/repository/sqlite/test_sqlalchemy_sqlite_uuid.py
@@ -8,7 +8,7 @@
from sqlalchemy import Engine, NullPool, create_engine
from sqlalchemy.orm import Session, sessionmaker
-from tests.contrib.sqlalchemy.models_uuid import AuthorSyncRepository, BookSyncRepository
+from tests.contrib.sqlalchemy.models_uuid import AuthorSyncRepository, BookSyncRepository, RuleSyncRepository
from tests.contrib.sqlalchemy.repository import sqlalchemy_sync_uuid_tests as st
pytestmark = [pytest.mark.sqlalchemy_sqlite]
@@ -34,10 +34,13 @@ def fx_engine(tmp_path: Path) -> Generator[Engine, None, None]:
@pytest.fixture(name="session")
def fx_session(
- engine: Engine, raw_authors_uuid: list[dict[str, Any]], raw_books_uuid: list[dict[str, Any]]
+ engine: Engine,
+ raw_authors_uuid: list[dict[str, Any]],
+ raw_books_uuid: list[dict[str, Any]],
+ raw_rules_uuid: list[dict[str, Any]],
) -> Generator[Session, None, None]:
session = sessionmaker(bind=engine)()
- st.seed_db(engine, raw_authors_uuid, raw_books_uuid)
+ st.seed_db(engine, raw_authors_uuid, raw_books_uuid, raw_rules_uuid)
try:
yield session
finally:
@@ -55,6 +58,11 @@ def fx_book_repo(session: Session) -> BookSyncRepository:
return BookSyncRepository(session=session)
[email protected](name="rule_repo")
+def fx_rule_repo(session: Session) -> RuleSyncRepository:
+ return RuleSyncRepository(session=session)
+
+
def test_filter_by_kwargs_with_incorrect_attribute_name(author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy filter by kwargs with invalid column name.
@@ -257,3 +265,16 @@ def test_repo_filter_collection(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorRepository): The author mock repository
"""
st.test_repo_filter_collection(author_repo=author_repo)
+
+
+def test_repo_json_methods(
+ raw_rules_uuid: list[dict[str, Any]],
+ rule_repo: RuleSyncRepository,
+) -> None:
+ """Test SQLALchemy Collection filter.
+
+ Args:
+ raw_rules_uuid (list[dict[str, Any]]): list of rules pre-seeded into the mock repository
+ rule_repo (RuleSyncRepository): The rules mock repository
+ """
+ st.test_repo_json_methods(raw_rules_uuid=raw_rules_uuid, rule_repo=rule_repo)
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-06-07T13:53:16 |
litestar-org/litestar | 1,797 | litestar-org__litestar-1797 | [
"1639",
"4321"
] | 485f7929c5aed70956e6c05b1d22da3ecae96bfc | diff --git a/docs/examples/data_transfer_objects/factory/excluding_fields.py b/docs/examples/data_transfer_objects/factory/excluding_fields.py
--- a/docs/examples/data_transfer_objects/factory/excluding_fields.py
+++ b/docs/examples/data_transfer_objects/factory/excluding_fields.py
@@ -1,4 +1,5 @@
from datetime import datetime
+from typing import List
from uuid import UUID
from sqlalchemy import ForeignKey
@@ -19,16 +20,30 @@ class Address(Base):
zip: Mapped[str]
+class Pets(Base):
+ name: Mapped[str]
+ user_id: Mapped[UUID] = mapped_column(ForeignKey("user.id"))
+
+
class User(Base):
name: Mapped[str]
password: Mapped[str] = mapped_column(info=dto_field("private"))
created_at: Mapped[datetime] = mapped_column(info=dto_field("read-only"))
address_id: Mapped[UUID] = mapped_column(ForeignKey("address.id"), info=dto_field("private"))
address: Mapped[Address] = relationship(info=dto_field("read-only"))
+ pets: Mapped[List[Pets]] = relationship(info=dto_field("read-only"))
UserDTO = SQLAlchemyDTO[User]
-config = DTOConfig(exclude={"id", "address.id", "address.street"})
+config = DTOConfig(
+ exclude={
+ "id",
+ "address.id",
+ "address.street",
+ "pets.0.id",
+ "pets.0.user_id",
+ }
+)
ReadUserDTO = SQLAlchemyDTO[Annotated[User, config]]
@@ -36,6 +51,7 @@ class User(Base):
def create_user(data: User) -> User:
data.created_at = datetime.min
data.address = Address(street="123 Main St", city="Anytown", state="NY", zip="12345")
+ data.pets = [Pets(id=1, name="Fido"), Pets(id=2, name="Spot")]
return data
| diff --git a/docs/examples/tests/data_transfer_objects/factory/test_example_apps.py b/docs/examples/tests/data_transfer_objects/factory/test_example_apps.py
--- a/docs/examples/tests/data_transfer_objects/factory/test_example_apps.py
+++ b/docs/examples/tests/data_transfer_objects/factory/test_example_apps.py
@@ -58,3 +58,20 @@ def test_patch_requests_app() -> None:
"name": "Peter Pan",
"age": 40,
}
+
+
+def test_exclude_fields_app() -> None:
+ from docs.examples.data_transfer_objects.factory.excluding_fields import app
+
+ with TestClient(app) as client:
+ response = client.post(
+ "/users",
+ json={"name": "Litestar User", "password": "xyz", "created_at": "2023-04-24T00:00:00Z"},
+ )
+ assert response.status_code == 201
+ assert response.json() == {
+ "created_at": "0001-01-01T00:00:00",
+ "address": {"city": "Anytown", "state": "NY", "zip": "12345"},
+ "pets": [{"name": "Fido"}, {"name": "Spot"}],
+ "name": "Litestar User",
+ }
| Docs: DTO Factory exclude fields in nested complex types
### Summary
Need to document procedure for excluding fields from nested complex types, e.g.,
```py
class Model1:
a: int
b: str
class Model2:
c: int
d: str
class Model3:
e: Union[Model1, Model2]
config = DTOConfig(exclude={"e.0.a", "e.1.c"})
...
```
| 2023-06-08T00:08:41 |
|
litestar-org/litestar | 1,802 | litestar-org__litestar-1802 | [
"4321",
"1234"
] | 79338bd5fbae51c6d82ba57e07609a7949504e7a | diff --git a/litestar/contrib/sqlalchemy/base.py b/litestar/contrib/sqlalchemy/base.py
--- a/litestar/contrib/sqlalchemy/base.py
+++ b/litestar/contrib/sqlalchemy/base.py
@@ -137,7 +137,7 @@ def to_dict(self, exclude: set[str] | None = None) -> dict[str, Any]:
Returns:
dict[str, Any]: A dict representation of the model
"""
- exclude = exclude.union("_sentinel") if exclude else {"_sentinel"}
+ exclude = {"_sentinel"}.union(self._sa_instance_state.unloaded).union(exclude or []) # type: ignore[attr-defined]
return {field.name: getattr(self, field.name) for field in self.__table__.columns if field.name not in exclude}
| diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py
@@ -7,11 +7,12 @@
from uuid import uuid4
import pytest
+from sqlalchemy import String
from sqlalchemy.exc import IntegrityError, InvalidRequestError, SQLAlchemyError
from sqlalchemy.ext.asyncio import (
AsyncSession,
)
-from sqlalchemy.orm import MappedColumn
+from sqlalchemy.orm import Mapped, MappedColumn, mapped_column
from litestar.contrib.repository.exceptions import ConflictError, RepositoryError
from litestar.contrib.repository.filters import (
@@ -74,26 +75,35 @@ class AnotherModel(base.UUIDAuditBase):
"""Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
columns."""
- ...
+ the_extra_col: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore
class TheTestModel(base.UUIDBase):
"""Inheriting from DeclarativeBase gives the model 'id' columns."""
- ...
+ the_extra_col: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore
class TheBigIntModel(base.BigIntBase):
"""Inheriting from DeclarativeBase gives the model 'id' columns."""
- ...
+ the_extra_col: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore
+
+ unloaded_cols = {"the_extra_col"}
+ sa_instance_mock = MagicMock()
+ sa_instance_mock.unloaded = unloaded_cols
assert isinstance(AnotherModel._sentinel, MappedColumn)
assert isinstance(TheTestModel._sentinel, MappedColumn)
assert not hasattr(TheBigIntModel, "_sentinel")
model1, model2, model3 = AnotherModel(), TheTestModel(), TheBigIntModel()
+ monkeypatch.setattr(model1, "_sa_instance_state", sa_instance_mock)
+ monkeypatch.setattr(model2, "_sa_instance_state", sa_instance_mock)
+ monkeypatch.setattr(model3, "_sa_instance_state", sa_instance_mock)
assert "created" not in model1.to_dict(exclude={"created"}).keys()
+ assert "the_extra_col" not in model1.to_dict(exclude={"created"}).keys()
assert "_sentinel" not in model1.to_dict().keys()
assert "_sentinel" not in model2.to_dict().keys()
assert "_sentinel" not in model3.to_dict().keys()
+ assert "the_extra_col" not in model1.to_dict().keys()
def test_wrap_sqlalchemy_integrity_error() -> None:
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py
@@ -7,8 +7,9 @@
from uuid import uuid4
import pytest
+from sqlalchemy import String
from sqlalchemy.exc import IntegrityError, InvalidRequestError, SQLAlchemyError
-from sqlalchemy.orm import MappedColumn, Session
+from sqlalchemy.orm import Mapped, MappedColumn, Session, mapped_column
from litestar.contrib.repository.exceptions import ConflictError, RepositoryError
from litestar.contrib.repository.filters import (
@@ -71,26 +72,34 @@ class AnotherModel(base.UUIDAuditBase):
"""Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
columns."""
- ...
+ the_extra_col: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore
class TheTestModel(base.UUIDBase):
"""Inheriting from DeclarativeBase gives the model 'id' columns."""
- ...
+ the_extra_col: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore
class TheBigIntModel(base.BigIntBase):
"""Inheriting from DeclarativeBase gives the model 'id' columns."""
- ...
+ the_extra_col: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore
+ unloaded_cols = {"the_extra_col"}
+ sa_instance_mock = MagicMock()
+ sa_instance_mock.unloaded = unloaded_cols
assert isinstance(AnotherModel._sentinel, MappedColumn)
assert isinstance(TheTestModel._sentinel, MappedColumn)
assert not hasattr(TheBigIntModel, "_sentinel")
model1, model2, model3 = AnotherModel(), TheTestModel(), TheBigIntModel()
+ monkeypatch.setattr(model1, "_sa_instance_state", sa_instance_mock)
+ monkeypatch.setattr(model2, "_sa_instance_state", sa_instance_mock)
+ monkeypatch.setattr(model3, "_sa_instance_state", sa_instance_mock)
assert "created" not in model1.to_dict(exclude={"created"}).keys()
+ assert "the_extra_col" not in model1.to_dict(exclude={"created"}).keys()
assert "_sentinel" not in model1.to_dict().keys()
assert "_sentinel" not in model2.to_dict().keys()
assert "_sentinel" not in model3.to_dict().keys()
+ assert "the_extra_col" not in model1.to_dict().keys()
def test_wrap_sqlalchemy_integrity_error() -> None:
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-06-08T16:10:49 |
litestar-org/litestar | 1,807 | litestar-org__litestar-1807 | [
"1762",
"4321",
"1762"
] | 3297f6be1efefa0e1d74d5f2b99586e11267d448 | diff --git a/litestar/handlers/websocket_handlers/listener.py b/litestar/handlers/websocket_handlers/listener.py
--- a/litestar/handlers/websocket_handlers/listener.py
+++ b/litestar/handlers/websocket_handlers/listener.py
@@ -42,7 +42,7 @@
if TYPE_CHECKING:
from typing import Coroutine
- from litestar import Litestar
+ from litestar import Litestar, Router
from litestar.dto.interface import DTOInterface
from litestar.types.asgi_types import WebSocketMode
@@ -358,8 +358,16 @@ class WebsocketListener(ABC):
type_encoders: A mapping of types to callables that transform them into types supported for serialization.
"""
- def __init__(self) -> None:
- self._handler = websocket_listener(
+ def __init__(self, owner: Router) -> None:
+ """Initialize a WebsocketListener instance.
+
+ Args:
+ owner: The :class:`Router <.router.Router>` instance that owns this listener.
+ """
+ self._owner = owner
+
+ def to_handler(self) -> websocket_listener:
+ handler = websocket_listener(
dependencies=self.dependencies,
dto=self.dto,
exception_handlers=self.exception_handlers,
@@ -376,6 +384,8 @@ def __init__(self) -> None:
signature_namespace=self.signature_namespace,
type_encoders=self.type_encoders,
)(self.on_receive)
+ handler.owner = self._owner
+ return handler
@abstractmethod
def on_receive(self, *args: Any, **kwargs: Any) -> Any:
diff --git a/litestar/router.py b/litestar/router.py
--- a/litestar/router.py
+++ b/litestar/router.py
@@ -290,7 +290,7 @@ def _validate_registration_value(self, value: ControllerRouterHandler) -> Contro
# this narrows down to an ABC, but we assume a non-abstract subclass of the ABC superclass
if is_class_and_subclass(value, WebsocketListener): # type: ignore[type-abstract]
- return value()._handler # pyright: ignore
+ return value(owner=self).to_handler() # pyright: ignore
if isinstance(value, Router):
if value.owner:
| diff --git a/tests/handlers/websocket/test_listeners.py b/tests/handlers/websocket/test_listeners.py
--- a/tests/handlers/websocket/test_listeners.py
+++ b/tests/handlers/websocket/test_listeners.py
@@ -364,25 +364,72 @@ def test_hook_dependencies() -> None:
on_accept_mock = MagicMock()
on_disconnect_mock = MagicMock()
- def on_accept(name: str, state: State, query: dict) -> None:
- on_accept_mock(name=name, state=state, query=query)
+ def some_dependency() -> str:
+ return "hello"
- def on_disconnect(name: str, state: State, query: dict) -> None:
- on_disconnect_mock(name=name, state=state, query=query)
+ def on_accept(name: str, state: State, query: dict, some: str) -> None:
+ on_accept_mock(name=name, state=state, query=query, some=some)
+
+ def on_disconnect(name: str, state: State, query: dict, some: str) -> None:
+ on_disconnect_mock(name=name, state=state, query=query, some=some)
@websocket_listener("/{name: str}", on_accept=on_accept, on_disconnect=on_disconnect)
def handler(data: bytes) -> None:
pass
- with create_test_client([handler], debug=True) as client, client.websocket_connect("/foo") as ws:
+ with create_test_client(
+ [handler], debug=True, dependencies={"some": some_dependency}
+ ) as client, client.websocket_connect("/foo") as ws:
+ ws.send_text("")
+
+ on_accept_kwargs = on_accept_mock.call_args_list[0].kwargs
+ assert on_accept_kwargs["name"] == "foo"
+ assert on_accept_kwargs["some"] == "hello"
+ assert isinstance(on_accept_kwargs["state"], State)
+ assert isinstance(on_accept_kwargs["query"], dict)
+
+ on_disconnect_kwargs = on_disconnect_mock.call_args_list[0].kwargs
+ assert on_disconnect_kwargs["name"] == "foo"
+ assert on_disconnect_kwargs["some"] == "hello"
+ assert isinstance(on_disconnect_kwargs["state"], State)
+ assert isinstance(on_disconnect_kwargs["query"], dict)
+
+
+def test_websocket_listener_class_hook_dependencies() -> None:
+ on_accept_mock = MagicMock()
+ on_disconnect_mock = MagicMock()
+
+ def some_dependency() -> str:
+ return "hello"
+
+ class Listener(WebsocketListener):
+ path = "/{name: str}"
+
+ def on_accept(self, name: str, state: State, query: dict, some: str) -> None: # type: ignore[override]
+ on_accept_mock(name=name, state=state, query=query, some=some)
+
+ def on_disconnect(self, name: str, state: State, query: dict, some: str) -> None: # type: ignore[override]
+ on_disconnect_mock(name=name, state=state, query=query, some=some)
+
+ def on_receive(self, data: bytes) -> None:
+ pass
+
+ with create_test_client(
+ [Listener], debug=True, dependencies={"some": some_dependency}
+ ) as client, client.websocket_connect("/foo") as ws:
ws.send_text("")
- assert on_accept_mock.call_args_list[0].kwargs["name"] == "foo"
- assert isinstance(on_accept_mock.call_args_list[0].kwargs["state"], State)
- assert isinstance(on_accept_mock.call_args_list[0].kwargs["query"], dict)
- assert on_disconnect_mock.call_args_list[0].kwargs["name"] == "foo"
- assert isinstance(on_disconnect_mock.call_args_list[0].kwargs["state"], State)
- assert isinstance(on_disconnect_mock.call_args_list[0].kwargs["query"], dict)
+ on_accept_kwargs = on_accept_mock.call_args_list[0].kwargs
+ assert on_accept_kwargs["name"] == "foo"
+ assert on_accept_kwargs["some"] == "hello"
+ assert isinstance(on_accept_kwargs["state"], State)
+ assert isinstance(on_accept_kwargs["query"], dict)
+
+ on_disconnect_kwargs = on_disconnect_mock.call_args_list[0].kwargs
+ assert on_disconnect_kwargs["name"] == "foo"
+ assert on_disconnect_kwargs["some"] == "hello"
+ assert isinstance(on_disconnect_kwargs["state"], State)
+ assert isinstance(on_disconnect_kwargs["query"], dict)
@pytest.mark.parametrize("hook_name", ["on_accept", "on_disconnect", "connection_accept_handler"])
| Bug: Arbitrary dependencies in `WebSocketListener` fail
### Description
Injecting arbitrary dependencies (that is, dependencies that are not "reserved" like `state`) cannot be successfully injected into a `WebSocketListener` subclass. The injection works as intended when using the `websocket_listener` decorator.
### MCVE
```python
from litestar import WebSocket, Litestar
from litestar.handlers import WebsocketListener
from litestar.testing import TestClient
class Listener(WebsocketListener):
path = "/ws"
def on_accept(self, socket: WebSocket, foo: str) -> None:
pass
async def on_receive(self, data: dict) -> dict:
return data
app = Litestar(
[Listener],
dependencies={"foo": lambda: "bar"},
)
with TestClient(app) as client, client.websocket_connect("/ws") as ws:
ws.send_json({"foo": "bar"})
```
### Litestar Version
2.0.0alpha7
Bug: Arbitrary dependencies in `WebSocketListener` fail
### Description
Injecting arbitrary dependencies (that is, dependencies that are not "reserved" like `state`) cannot be successfully injected into a `WebSocketListener` subclass. The injection works as intended when using the `websocket_listener` decorator.
### MCVE
```python
from litestar import WebSocket, Litestar
from litestar.handlers import WebsocketListener
from litestar.testing import TestClient
class Listener(WebsocketListener):
path = "/ws"
def on_accept(self, socket: WebSocket, foo: str) -> None:
pass
async def on_receive(self, data: dict) -> dict:
return data
app = Litestar(
[Listener],
dependencies={"foo": lambda: "bar"},
)
with TestClient(app) as client, client.websocket_connect("/ws") as ws:
ws.send_json({"foo": "bar"})
```
### Litestar Version
2.0.0alpha7
| 2023-06-10T21:26:32 |
|
litestar-org/litestar | 1,816 | litestar-org__litestar-1816 | [
"4321",
"1234"
] | f1fc19677cc9115d3ff1b9b9d63cb3bd6ce5a0db | diff --git a/litestar/contrib/sqlalchemy/base.py b/litestar/contrib/sqlalchemy/base.py
--- a/litestar/contrib/sqlalchemy/base.py
+++ b/litestar/contrib/sqlalchemy/base.py
@@ -2,12 +2,12 @@
from __future__ import annotations
import re
-from datetime import date, datetime
+from datetime import date, datetime, timezone
from typing import TYPE_CHECKING, Any, ClassVar, Protocol, TypeVar, runtime_checkable
from uuid import UUID, uuid4
from pydantic import AnyHttpUrl, AnyUrl, EmailStr
-from sqlalchemy import Date, DateTime, MetaData, Sequence, String
+from sqlalchemy import Date, MetaData, Sequence, String
from sqlalchemy.event import listens_for
from sqlalchemy.orm import (
DeclarativeBase,
@@ -19,7 +19,7 @@
registry,
)
-from .types import GUID, BigIntIdentity, JsonB
+from .types import GUID, BigIntIdentity, DateTimeUTC, JsonB
if TYPE_CHECKING:
from sqlalchemy.sql import FromClause
@@ -65,8 +65,8 @@ def touch_updated_timestamp(session: Session, *_: Any) -> None:
session.
"""
for instance in session.dirty:
- if hasattr(instance, "updated"):
- instance.updated = datetime.now() # noqa: DTZ005
+ if hasattr(instance, "updated_at"):
+ instance.updated = (datetime.now(timezone.utc),)
@runtime_checkable
@@ -112,9 +112,15 @@ def id(cls) -> Mapped[int]:
class AuditColumns:
"""Created/Updated At Fields Mixin."""
- created: Mapped[datetime] = mapped_column(default=datetime.now) # pyright: ignore
+ created_at: Mapped[datetime] = mapped_column( # pyright: ignore
+ DateTimeUTC(timezone=True),
+ default=datetime.now(timezone.utc),
+ )
"""Date/time of instance creation."""
- updated: Mapped[datetime] = mapped_column(default=datetime.now) # pyright: ignore
+ updated_at: Mapped[datetime] = mapped_column( # pyright: ignore
+ DateTimeUTC(timezone=True),
+ default=datetime.now(timezone.utc),
+ )
"""Date/time of instance last update."""
@@ -152,7 +158,7 @@ def create_registry() -> registry:
AnyUrl: String,
AnyHttpUrl: String,
dict: JsonB,
- datetime: DateTime,
+ datetime: DateTimeUTC,
date: Date,
},
)
diff --git a/litestar/contrib/sqlalchemy/types.py b/litestar/contrib/sqlalchemy/types.py
--- a/litestar/contrib/sqlalchemy/types.py
+++ b/litestar/contrib/sqlalchemy/types.py
@@ -1,10 +1,11 @@
from __future__ import annotations
+import datetime
import uuid
from base64 import b64decode
from typing import TYPE_CHECKING, Any, cast
-from sqlalchemy import text, util
+from sqlalchemy import DateTime, text, util
from sqlalchemy.dialects.oracle import BLOB as ORA_BLOB
from sqlalchemy.dialects.oracle import RAW as ORA_RAW
from sqlalchemy.dialects.postgresql import JSONB as PG_JSONB
@@ -145,6 +146,30 @@ def _set_table(self, column: Any, table: Any) -> None:
table.append_constraint(e)
+class DateTimeUTC(TypeDecorator):
+ """Timezone Aware DateTime.
+
+ Ensure UTC is stored in the database and that TZ aware dates are returned for all dialects.
+ """
+
+ impl = DateTime(timezone=True)
+ cache_ok = True
+
+ def process_bind_param(self, value: datetime.datetime | None, dialect: Dialect) -> datetime.datetime | None:
+ if value is None:
+ return value
+ if not value.tzinfo:
+ raise TypeError("tzinfo is required")
+ return value.astimezone(datetime.timezone.utc)
+
+ def process_result_value(self, value: datetime.datetime | None, dialect: Dialect) -> datetime.datetime | None:
+ if value is None:
+ return value
+ if value.tzinfo is None:
+ return value.replace(tzinfo=datetime.timezone.utc)
+ return value
+
+
BigIntIdentity = BigInteger().with_variant(Integer, "sqlite")
"""A ``BigInteger`` variant that reverts to an ``Integer`` for unsupported variants."""
| diff --git a/litestar/contrib/repository/testing/generic_mock_repository.py b/litestar/contrib/repository/testing/generic_mock_repository.py
--- a/litestar/contrib/repository/testing/generic_mock_repository.py
+++ b/litestar/contrib/repository/testing/generic_mock_repository.py
@@ -35,8 +35,8 @@ class GenericAsyncMockRepository(AbstractAsyncRepository[ModelT], Generic[ModelT
model_type: type[ModelT]
match_fields: list[str] | str | None = None
- _model_has_created: bool
- _model_has_updated: bool
+ _model_has_created_at: bool
+ _model_has_updated_at: bool
def __init__(
self, id_factory: Callable[[], Any] = uuid4, tz: tzinfo = timezone.utc, allow_ids_on_add: bool = False, **_: Any
@@ -59,8 +59,8 @@ def __class_getitem__(cls: type[AsyncMockRepoT], item: type[ModelT]) -> type[Asy
{
"collection": {},
"model_type": item,
- "_model_has_created": hasattr(item, "created"),
- "_model_has_updated": hasattr(item, "updated"),
+ "_model_has_created_at": hasattr(item, "created_at"),
+ "_model_has_updated_at": hasattr(item, "updated_at"),
},
)
@@ -72,10 +72,10 @@ def _now(self) -> datetime:
def _update_audit_attributes(self, data: ModelT, now: datetime | None = None, do_created: bool = False) -> ModelT:
now = now or self._now()
- if self._model_has_updated:
- data.updated = now # type:ignore[attr-defined]
- if self._model_has_updated and do_created:
- data.created = now # type:ignore[attr-defined]
+ if self._model_has_updated_at:
+ data.updated_at = now # type:ignore[attr-defined]
+ if self._model_has_updated_at and do_created:
+ data.created_at = now # type:ignore[attr-defined]
return data
async def add(self, data: ModelT) -> ModelT:
@@ -386,8 +386,8 @@ class GenericSyncMockRepository(AbstractSyncRepository[ModelT], Generic[ModelT])
model_type: type[ModelT]
match_fields: list[str] | str | None = None
- _model_has_created: bool
- _model_has_updated: bool
+ _model_has_created_at: bool
+ _model_has_updated_at: bool
def __init__(
self,
@@ -414,8 +414,8 @@ def __class_getitem__(cls: type[SyncMockRepoT], item: type[ModelT]) -> type[Sync
{
"collection": {},
"model_type": item,
- "_model_has_created": hasattr(item, "created"),
- "_model_has_updated": hasattr(item, "updated"),
+ "_model_has_created_at": hasattr(item, "created_at"),
+ "_model_has_updated_at": hasattr(item, "updated_at"),
},
)
@@ -427,10 +427,10 @@ def _now(self) -> datetime:
def _update_audit_attributes(self, data: ModelT, now: datetime | None = None, do_created: bool = False) -> ModelT:
now = now or self._now()
- if self._model_has_updated:
- data.updated = now # type:ignore[attr-defined]
- if self._model_has_updated and do_created:
- data.created = now # type:ignore[attr-defined]
+ if self._model_has_updated_at:
+ data.updated_at = now # type:ignore[attr-defined]
+ if self._model_has_updated_at and do_created:
+ data.created_at = now # type:ignore[attr-defined]
return data
def add(self, data: ModelT) -> ModelT:
diff --git a/tests/contrib/sqlalchemy/models_bigint.py b/tests/contrib/sqlalchemy/models_bigint.py
--- a/tests/contrib/sqlalchemy/models_bigint.py
+++ b/tests/contrib/sqlalchemy/models_bigint.py
@@ -2,6 +2,7 @@
from __future__ import annotations
from datetime import date, datetime
+from typing import List
from sqlalchemy import ForeignKey, String
from sqlalchemy.orm import Mapped, mapped_column, relationship
@@ -15,6 +16,11 @@ class BigIntAuthor(BigIntAuditBase):
name: Mapped[str] = mapped_column(String(length=100)) # pyright: ignore
dob: Mapped[date] = mapped_column(nullable=True) # pyright: ignore
+ books: Mapped[List[BigIntBook]] = relationship( # pyright: ignore # noqa: UP
+ lazy="selectin",
+ back_populates="author",
+ cascade="all, delete",
+ )
class BigIntBook(BigIntBase):
@@ -22,7 +28,9 @@ class BigIntBook(BigIntBase):
title: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore
author_id: Mapped[int] = mapped_column(ForeignKey("big_int_author.id")) # pyright: ignore
- author: Mapped[BigIntAuthor] = relationship(lazy="joined", innerjoin=True) # pyright: ignore
+ author: Mapped[BigIntAuthor] = relationship( # pyright: ignore
+ lazy="joined", innerjoin=True, back_populates="books"
+ )
class BigIntEventLog(BigIntAuditBase):
diff --git a/tests/contrib/sqlalchemy/models_uuid.py b/tests/contrib/sqlalchemy/models_uuid.py
--- a/tests/contrib/sqlalchemy/models_uuid.py
+++ b/tests/contrib/sqlalchemy/models_uuid.py
@@ -1,7 +1,9 @@
"""Example domain objects for testing."""
+
from __future__ import annotations
from datetime import date, datetime
+from typing import List
from uuid import UUID
from sqlalchemy import ForeignKey, String
@@ -16,6 +18,11 @@ class UUIDAuthor(UUIDAuditBase):
name: Mapped[str] = mapped_column(String(length=100)) # pyright: ignore
dob: Mapped[date] = mapped_column(nullable=True) # pyright: ignore
+ books: Mapped[List[UUIDBook]] = relationship( # pyright: ignore # noqa: UP
+ lazy="selectin",
+ back_populates="author",
+ cascade="all, delete",
+ )
class UUIDBook(UUIDBase):
@@ -23,7 +30,7 @@ class UUIDBook(UUIDBase):
title: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore
author_id: Mapped[UUID] = mapped_column(ForeignKey("uuid_author.id")) # pyright: ignore
- author: Mapped[UUIDAuthor] = relationship(lazy="joined", innerjoin=True) # pyright: ignore
+ author: Mapped[UUIDAuthor] = relationship(lazy="joined", innerjoin=True, back_populates="books") # pyright: ignore
class UUIDEventLog(UUIDAuditBase):
diff --git a/tests/contrib/sqlalchemy/repository/conftest.py b/tests/contrib/sqlalchemy/repository/conftest.py
--- a/tests/contrib/sqlalchemy/repository/conftest.py
+++ b/tests/contrib/sqlalchemy/repository/conftest.py
@@ -53,15 +53,15 @@ def fx_raw_authors_uuid() -> list[dict[str, Any]]:
"id": UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"),
"name": "Agatha Christie",
"dob": "1890-09-15",
- "created": "2023-05-01T00:00:00",
- "updated": "2023-05-11T00:00:00",
+ "created_at": "2023-05-01T00:00:00",
+ "updated_at": "2023-05-11T00:00:00",
},
{
"id": "5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2",
"name": "Leo Tolstoy",
"dob": "1828-09-09",
- "created": "2023-03-01T00:00:00",
- "updated": "2023-05-15T00:00:00",
+ "created_at": "2023-03-01T00:00:00",
+ "updated_at": "2023-05-15T00:00:00",
},
]
@@ -87,8 +87,8 @@ def fx_raw_log_events_uuid() -> list[dict[str, Any]]:
"id": "f34545b9-663c-4fce-915d-dd1ae9cea42a",
"logged_at": "0001-01-01T00:00:00",
"payload": {"foo": "bar", "baz": datetime.now()},
- "created": "0001-01-01T00:00:00",
- "updated": "0001-01-01T00:00:00",
+ "created_at": "0001-01-01T00:00:00",
+ "updated_at": "0001-01-01T00:00:00",
},
]
@@ -101,15 +101,15 @@ def fx_raw_rules_uuid() -> list[dict[str, Any]]:
"id": "f34545b9-663c-4fce-915d-dd1ae9cea42a",
"name": "Initial loading rule.",
"config": json.dumps({"url": "https://litestar.dev", "setting_123": 1}),
- "created": "2023-01-01T00:00:00",
- "updated": "2023-02-01T00:00:00",
+ "created_at": "2023-01-01T00:00:00",
+ "updated_at": "2023-02-01T00:00:00",
},
{
"id": "f34545b9-663c-4fce-915d-dd1ae9cea34b",
"name": "Secondary loading rule.",
"config": {"url": "https://litestar.dev", "bar": "foo", "setting_123": 4},
- "created": "2023-02-01T00:00:00",
- "updated": "2023-02-01T00:00:00",
+ "created_at": "2023-02-01T00:00:00",
+ "updated_at": "2023-02-01T00:00:00",
},
]
@@ -122,15 +122,15 @@ def fx_raw_authors_bigint() -> list[dict[str, Any]]:
"id": 2023,
"name": "Agatha Christie",
"dob": "1890-09-15",
- "created": "2023-05-01T00:00:00",
- "updated": "2023-05-11T00:00:00",
+ "created_at": "2023-05-01T00:00:00",
+ "updated_at": "2023-05-11T00:00:00",
},
{
"id": 2024,
"name": "Leo Tolstoy",
"dob": "1828-09-09",
- "created": "2023-03-01T00:00:00",
- "updated": "2023-05-15T00:00:00",
+ "created_at": "2023-03-01T00:00:00",
+ "updated_at": "2023-05-15T00:00:00",
},
]
@@ -155,8 +155,8 @@ def fx_raw_log_events_bigint() -> list[dict[str, Any]]:
"id": 2025,
"logged_at": "0001-01-01T00:00:00",
"payload": {"foo": "bar", "baz": datetime.now()},
- "created": "0001-01-01T00:00:00",
- "updated": "0001-01-01T00:00:00",
+ "created_at": "0001-01-01T00:00:00",
+ "updated_at": "0001-01-01T00:00:00",
},
]
@@ -169,15 +169,15 @@ def fx_raw_rules_bigint() -> list[dict[str, Any]]:
"id": 2025,
"name": "Initial loading rule.",
"config": json.dumps({"url": "https://litestar.dev", "setting_123": 1}),
- "created": "2023-01-01T00:00:00",
- "updated": "2023-02-01T00:00:00",
+ "created_at": "2023-01-01T00:00:00",
+ "updated_at": "2023-02-01T00:00:00",
},
{
"id": 2024,
"name": "Secondary loading rule.",
"config": {"url": "https://litestar.dev", "bar": "foo", "setting_123": 4},
- "created": "2023-02-01T00:00:00",
- "updated": "2023-02-01T00:00:00",
+ "created_at": "2023-02-01T00:00:00",
+ "updated_at": "2023-02-01T00:00:00",
},
]
diff --git a/tests/contrib/sqlalchemy/repository/test_bigint_async.py b/tests/contrib/sqlalchemy/repository/test_bigint_async.py
--- a/tests/contrib/sqlalchemy/repository/test_bigint_async.py
+++ b/tests/contrib/sqlalchemy/repository/test_bigint_async.py
@@ -2,7 +2,7 @@
from __future__ import annotations
import sys
-from datetime import datetime
+from datetime import datetime, timezone
from typing import Any
import pytest
@@ -15,6 +15,7 @@
from tests.contrib.sqlalchemy.models_bigint import (
AuthorAsyncRepository,
BigIntAuthor,
+ BigIntBook,
BigIntRule,
BookAsyncRepository,
RuleAsyncRepository,
@@ -35,11 +36,19 @@ async def seed_db(
) -> None:
for raw_author in raw_authors_bigint:
raw_author["dob"] = datetime.strptime(raw_author["dob"], "%Y-%m-%d").date()
- raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
- raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ raw_author["created_at"] = datetime.strptime(raw_author["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
+ raw_author["updated_at"] = datetime.strptime(raw_author["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
for raw_author in raw_rules_bigint:
- raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
- raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ raw_author["created_at"] = datetime.strptime(raw_author["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
+ raw_author["updated_at"] = datetime.strptime(raw_author["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
async with async_engine.begin() as conn:
await conn.run_sync(base.orm_registry.metadata.drop_all)
@@ -85,7 +94,7 @@ async def test_repo_count_method(author_repo: AuthorAsyncRepository) -> None:
async def test_repo_list_and_count_method(
raw_authors_bigint: list[dict[str, Any]], author_repo: AuthorAsyncRepository
) -> None:
- """Test SQLALchemy list with count in asyncpg.
+ """Test SQLALchemy list with count.
Args:
raw_authors_bigint (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
@@ -99,7 +108,7 @@ async def test_repo_list_and_count_method(
async def test_repo_list_and_count_method_empty(book_repo: BookAsyncRepository) -> None:
- """Test SQLALchemy list with count in asyncpg.
+ """Test SQLALchemy list with count.
Args:
raw_authors_bigint (list[dict[str, Any]]): list of authors pre-seeded into the mock repository
@@ -112,6 +121,22 @@ async def test_repo_list_and_count_method_empty(book_repo: BookAsyncRepository)
assert len(collection) == 0
+async def test_repo_created_updated(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy created_at - updated_at.
+
+ Args:
+ author_repo (AuthorAsyncRepository): The author mock repository
+ """
+ author = await author_repo.get_one(name="Agatha Christie")
+ assert author.created_at is not None
+ assert author.updated_at is not None
+ original_update_dt = author.updated_at
+
+ author.books.append(BigIntBook(title="Testing"))
+ author = await author_repo.update(author)
+ assert author.updated_at == original_update_dt
+
+
async def test_repo_list_method(raw_authors_bigint: list[dict[str, Any]], author_repo: AuthorAsyncRepository) -> None:
"""Test SQLALchemy list.
@@ -329,13 +354,17 @@ async def test_repo_filter_before_after(author_repo: AuthorAsyncRepository) -> N
author_repo (AuthorAsyncRepository): The author mock repository
"""
before_filter = BeforeAfter(
- field_name="created", before=datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), after=None
+ field_name="created_at",
+ before=datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc),
+ after=None,
)
existing_obj = await author_repo.list(before_filter)
assert existing_obj[0].name == "Leo Tolstoy"
after_filter = BeforeAfter(
- field_name="created", after=datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), before=None
+ field_name="created_at",
+ after=datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc),
+ before=None,
)
existing_obj = await author_repo.list(after_filter)
assert existing_obj[0].name == "Agatha Christie"
@@ -369,9 +398,9 @@ async def test_repo_filter_order_by(author_repo: AuthorAsyncRepository) -> None:
author_repo (AuthorAsyncRepository): The author mock repository
"""
- existing_obj = await author_repo.list(OrderBy(field_name="created", sort_order="desc"))
+ existing_obj = await author_repo.list(OrderBy(field_name="created_at", sort_order="desc"))
assert existing_obj[0].name == "Agatha Christie"
- existing_obj = await author_repo.list(OrderBy(field_name="created", sort_order="asc"))
+ existing_obj = await author_repo.list(OrderBy(field_name="created_at", sort_order="asc"))
assert existing_obj[0].name == "Leo Tolstoy"
diff --git a/tests/contrib/sqlalchemy/repository/test_bigint_sync.py b/tests/contrib/sqlalchemy/repository/test_bigint_sync.py
--- a/tests/contrib/sqlalchemy/repository/test_bigint_sync.py
+++ b/tests/contrib/sqlalchemy/repository/test_bigint_sync.py
@@ -2,7 +2,7 @@
from __future__ import annotations
import sys
-from datetime import datetime
+from datetime import datetime, timezone
from typing import Any
import pytest
@@ -15,6 +15,7 @@
from tests.contrib.sqlalchemy.models_bigint import (
AuthorSyncRepository,
BigIntAuthor,
+ BigIntBook,
BigIntRule,
BookSyncRepository,
RuleSyncRepository,
@@ -35,11 +36,15 @@ def seed_db(
) -> None:
for raw_author in raw_authors_bigint:
raw_author["dob"] = datetime.strptime(raw_author["dob"], "%Y-%m-%d").date()
- raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
- raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ raw_author["created_at"] = datetime.strptime(raw_author["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
+ raw_author["updated_at"] = datetime.strptime(raw_author["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
for raw_rule in raw_rules_bigint:
- raw_rule["created"] = datetime.strptime(raw_rule["created"], "%Y-%m-%dT%H:%M:%S")
- raw_rule["updated"] = datetime.strptime(raw_rule["updated"], "%Y-%m-%dT%H:%M:%S")
+ raw_rule["created_at"] = datetime.strptime(raw_rule["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc)
+ raw_rule["updated_at"] = datetime.strptime(raw_rule["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc)
with engine.begin() as conn:
base.orm_registry.metadata.drop_all(conn)
@@ -114,6 +119,22 @@ def test_repo_list_and_count_method_empty(book_repo: BookSyncRepository) -> None
assert len(collection) == 0
+def test_repo_created_updated(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy created_at - updated_at.
+
+ Args:
+ author_repo (AuthorAsyncRepository): The author mock repository
+ """
+ author = author_repo.get_one(name="Agatha Christie")
+ assert author.created_at is not None
+ assert author.updated_at is not None
+ original_update_dt = author.updated_at
+
+ author.books.append(BigIntBook(title="Testing"))
+ author = author_repo.update(author)
+ assert author.updated_at == original_update_dt
+
+
def test_repo_list_method(raw_authors_bigint: list[dict[str, Any]], author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy list.
@@ -329,13 +350,17 @@ def test_repo_filter_before_after(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorSyncRepository): The author mock repository
"""
before_filter = BeforeAfter(
- field_name="created", before=datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), after=None
+ field_name="created_at",
+ before=datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc),
+ after=None,
)
existing_obj = author_repo.list(before_filter)
assert existing_obj[0].name == "Leo Tolstoy"
after_filter = BeforeAfter(
- field_name="created", after=datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), before=None
+ field_name="created_at",
+ after=datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc),
+ before=None,
)
existing_obj = author_repo.list(after_filter)
assert existing_obj[0].name == "Agatha Christie"
@@ -369,9 +394,9 @@ def test_repo_filter_order_by(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorSyncRepository): The author mock repository
"""
- existing_obj = author_repo.list(OrderBy(field_name="created", sort_order="desc"))
+ existing_obj = author_repo.list(OrderBy(field_name="created_at", sort_order="desc"))
assert existing_obj[0].name == "Agatha Christie"
- existing_obj = author_repo.list(OrderBy(field_name="created", sort_order="asc"))
+ existing_obj = author_repo.list(OrderBy(field_name="created_at", sort_order="asc"))
assert existing_obj[0].name == "Leo Tolstoy"
diff --git a/tests/contrib/sqlalchemy/repository/test_generic_mock_async_repository.py b/tests/contrib/sqlalchemy/repository/test_generic_mock_async_repository.py
--- a/tests/contrib/sqlalchemy/repository/test_generic_mock_async_repository.py
+++ b/tests/contrib/sqlalchemy/repository/test_generic_mock_async_repository.py
@@ -18,7 +18,7 @@
def fx_authors() -> list[UUIDAuthor]:
"""Collection of Author models."""
return [
- UUIDAuthor(id=uuid4(), name=name, dob=dob, created=datetime.min, updated=datetime.min)
+ UUIDAuthor(id=uuid4(), name=name, dob=dob, created_at=datetime.min, updated_at=datetime.min)
for name, dob in [("Agatha Christie", date(1890, 9, 15)), ("Leo Tolstoy", date(1828, 9, 9))]
]
@@ -113,36 +113,36 @@ def test_generic_mock_repository_raises_repository_exception_if_named_attribute_
async def test_sets_created_updated_on_add() -> None:
- """Test that the repository updates the 'created' and 'updated' timestamps
+ """Test that the repository updates the 'created_at' and 'updated_at' timestamps
if necessary."""
class UUIDModel(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
uuid_instance = UUIDModel()
- assert "created" not in vars(uuid_instance)
- assert "updated" not in vars(uuid_instance)
+ assert "created_at" not in vars(uuid_instance)
+ assert "updated_at" not in vars(uuid_instance)
uuid_instance = await GenericAsyncMockRepository[UUIDModel]().add(uuid_instance)
- assert "created" in vars(uuid_instance)
- assert "updated" in vars(uuid_instance)
+ assert "created_at" in vars(uuid_instance)
+ assert "updated_at" in vars(uuid_instance)
bigint_instance = BigIntModel()
- assert "created" not in vars(bigint_instance)
- assert "updated" not in vars(bigint_instance)
+ assert "created_at" not in vars(bigint_instance)
+ assert "updated_at" not in vars(bigint_instance)
bigint_instance = await GenericAsyncMockRepository[BigIntModel]().add(bigint_instance) # type: ignore[type-var]
- assert "created" in vars(bigint_instance)
- assert "updated" in vars(bigint_instance)
+ assert "created_at" in vars(bigint_instance)
+ assert "updated_at" in vars(bigint_instance)
async def test_sets_updated_on_update(author_repository: GenericAsyncMockRepository[UUIDAuthor]) -> None:
@@ -150,9 +150,9 @@ async def test_sets_updated_on_update(author_repository: GenericAsyncMockReposit
necessary."""
instance = list(author_repository.collection.values())[0]
- original_updated = instance.updated
+ original_updated = instance.updated_at
instance = await author_repository.update(instance)
- assert instance.updated > original_updated
+ assert instance.updated_at > original_updated
async def test_does_not_set_created_updated() -> None:
@@ -160,51 +160,51 @@ async def test_does_not_set_created_updated() -> None:
appropriate."""
class UUIDModel(base.UUIDBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
uuid_instance = UUIDModel()
uuid_repo = GenericAsyncMockRepository[UUIDModel]()
- assert "created" not in vars(uuid_instance)
- assert "updated" not in vars(uuid_instance)
+ assert "created_at" not in vars(uuid_instance)
+ assert "updated_at" not in vars(uuid_instance)
uuid_instance = await uuid_repo.add(uuid_instance)
- assert "created" not in vars(uuid_instance)
- assert "updated" not in vars(uuid_instance)
+ assert "created_at" not in vars(uuid_instance)
+ assert "updated_at" not in vars(uuid_instance)
uuid_instance = await uuid_repo.update(uuid_instance)
- assert "created" not in vars(uuid_instance)
- assert "updated" not in vars(uuid_instance)
+ assert "created_at" not in vars(uuid_instance)
+ assert "updated_at" not in vars(uuid_instance)
bigint_instance = BigIntModel()
bigint_repo = GenericAsyncMockRepository[BigIntModel]() # type: ignore[type-var]
- assert "created" not in vars(bigint_instance)
- assert "updated" not in vars(bigint_instance)
+ assert "created_at" not in vars(bigint_instance)
+ assert "updated_at" not in vars(bigint_instance)
bigint_instance = await bigint_repo.add(bigint_instance)
- assert "created" not in vars(bigint_instance)
- assert "updated" not in vars(bigint_instance)
+ assert "created_at" not in vars(bigint_instance)
+ assert "updated_at" not in vars(bigint_instance)
bigint_instance = await bigint_repo.update(bigint_instance)
- assert "created" not in vars(bigint_instance)
- assert "updated" not in vars(bigint_instance)
+ assert "created_at" not in vars(bigint_instance)
+ assert "updated_at" not in vars(bigint_instance)
async def test_add() -> None:
"""Test that the repository add method works correctly`."""
class UUIDModel(base.UUIDBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -224,13 +224,13 @@ async def test_add_many() -> None:
"""Test that the repository add_many method works correctly`."""
class UUIDModel(base.UUIDBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -249,7 +249,7 @@ async def test_update() -> None:
"""Test that the repository update method works correctly`."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -267,7 +267,7 @@ async def test_update_many() -> None:
"""Test that the repository add_many method works correctly`."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -287,7 +287,7 @@ async def test_upsert() -> None:
"""Test that the repository upsert method works correctly`."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -305,7 +305,7 @@ async def test_list() -> None:
"""Test that the repository list returns records."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -320,7 +320,7 @@ async def test_delete() -> None:
"""Test that the repository delete functionality."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -337,7 +337,7 @@ async def test_delete_many() -> None:
"""Test that the repository delete many functionality."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -354,7 +354,7 @@ async def test_list_and_count() -> None:
"""Test that the repository list_and_count returns records and the total record count."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -371,7 +371,7 @@ async def test_exists() -> None:
"""Test that the repository exists returns booleans."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -387,7 +387,7 @@ async def test_count() -> None:
"""Test that the repository count returns the total record count."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -403,7 +403,7 @@ async def test_get() -> None:
"""Test that the repository get returns a model record correctly."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -420,7 +420,7 @@ async def test_get_one() -> None:
"""Test that the repository get_one returns a model record correctly."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -438,7 +438,7 @@ async def test_get_one_or_none() -> None:
"""Test that the repository get_one_or_none returns a model record correctly."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -456,7 +456,7 @@ async def test_get_or_create() -> None:
"""Test that the repository get_or_create returns a model record correctly."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -478,7 +478,7 @@ async def test_get_or_create_match_fields() -> None:
"""Test that the repository get_or_create returns a model record correctly."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
diff --git a/tests/contrib/sqlalchemy/repository/test_generic_mock_sync_repository.py b/tests/contrib/sqlalchemy/repository/test_generic_mock_sync_repository.py
--- a/tests/contrib/sqlalchemy/repository/test_generic_mock_sync_repository.py
+++ b/tests/contrib/sqlalchemy/repository/test_generic_mock_sync_repository.py
@@ -18,7 +18,7 @@
def fx_authors() -> list[UUIDAuthor]:
"""Collection of Author models."""
return [
- UUIDAuthor(id=uuid4(), name=name, dob=dob, created=datetime.min, updated=datetime.min)
+ UUIDAuthor(id=uuid4(), name=name, dob=dob, created_at=datetime.min, updated_at=datetime.min)
for name, dob in [("Agatha Christie", date(1890, 9, 15)), ("Leo Tolstoy", date(1828, 9, 9))]
]
@@ -113,36 +113,36 @@ def test_generic_mock_repository_raises_repository_exception_if_named_attribute_
async def test_sets_created_updated_on_add() -> None:
- """Test that the repository updates the 'created' and 'updated' timestamps
+ """Test that the repository updates the 'created_at' and 'updated_at' timestamps
if necessary."""
class UUIDModel(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
uuid_instance = UUIDModel()
- assert "created" not in vars(uuid_instance)
- assert "updated" not in vars(uuid_instance)
+ assert "created_at" not in vars(uuid_instance)
+ assert "updated_at" not in vars(uuid_instance)
uuid_instance = GenericSyncMockRepository[UUIDModel]().add(uuid_instance)
- assert "created" in vars(uuid_instance)
- assert "updated" in vars(uuid_instance)
+ assert "created_at" in vars(uuid_instance)
+ assert "updated_at" in vars(uuid_instance)
bigint_instance = BigIntModel()
- assert "created" not in vars(bigint_instance)
- assert "updated" not in vars(bigint_instance)
+ assert "created_at" not in vars(bigint_instance)
+ assert "updated_at" not in vars(bigint_instance)
bigint_instance = GenericSyncMockRepository[BigIntModel]().add(bigint_instance) # type: ignore[type-var]
- assert "created" in vars(bigint_instance)
- assert "updated" in vars(bigint_instance)
+ assert "created_at" in vars(bigint_instance)
+ assert "updated_at" in vars(bigint_instance)
async def test_sets_updated_on_update(author_repository: GenericSyncMockRepository[UUIDAuthor]) -> None:
@@ -150,9 +150,9 @@ async def test_sets_updated_on_update(author_repository: GenericSyncMockReposito
necessary."""
instance = list(author_repository.collection.values())[0]
- original_updated = instance.updated
+ original_updated = instance.updated_at
instance = author_repository.update(instance)
- assert instance.updated > original_updated
+ assert instance.updated_at > original_updated
async def test_does_not_set_created_updated() -> None:
@@ -160,51 +160,51 @@ async def test_does_not_set_created_updated() -> None:
appropriate."""
class UUIDModel(base.UUIDBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
uuid_instance = UUIDModel()
uuid_repo = GenericSyncMockRepository[UUIDModel]()
- assert "created" not in vars(uuid_instance)
- assert "updated" not in vars(uuid_instance)
+ assert "created_at" not in vars(uuid_instance)
+ assert "updated_at" not in vars(uuid_instance)
uuid_instance = uuid_repo.add(uuid_instance)
- assert "created" not in vars(uuid_instance)
- assert "updated" not in vars(uuid_instance)
+ assert "created_at" not in vars(uuid_instance)
+ assert "updated_at" not in vars(uuid_instance)
uuid_instance = uuid_repo.update(uuid_instance)
- assert "created" not in vars(uuid_instance)
- assert "updated" not in vars(uuid_instance)
+ assert "created_at" not in vars(uuid_instance)
+ assert "updated_at" not in vars(uuid_instance)
bigint_instance = BigIntModel()
bigint_repo = GenericSyncMockRepository[BigIntModel]() # type: ignore[type-var]
- assert "created" not in vars(bigint_instance)
- assert "updated" not in vars(bigint_instance)
+ assert "created_at" not in vars(bigint_instance)
+ assert "updated_at" not in vars(bigint_instance)
bigint_instance = bigint_repo.add(bigint_instance)
- assert "created" not in vars(bigint_instance)
- assert "updated" not in vars(bigint_instance)
+ assert "created_at" not in vars(bigint_instance)
+ assert "updated_at" not in vars(bigint_instance)
bigint_instance = bigint_repo.update(bigint_instance)
- assert "created" not in vars(bigint_instance)
- assert "updated" not in vars(bigint_instance)
+ assert "created_at" not in vars(bigint_instance)
+ assert "updated_at" not in vars(bigint_instance)
async def test_add() -> None:
"""Test that the repository add method works correctly`."""
class UUIDModel(base.UUIDBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -224,13 +224,13 @@ async def test_add_many() -> None:
"""Test that the repository add_many method works correctly`."""
class UUIDModel(base.UUIDBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -249,7 +249,7 @@ async def test_update() -> None:
"""Test that the repository update method works correctly`."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -267,7 +267,7 @@ async def test_update_many() -> None:
"""Test that the repository add_many method works correctly`."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -287,7 +287,7 @@ async def test_upsert() -> None:
"""Test that the repository upsert method works correctly`."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -305,7 +305,7 @@ async def test_list() -> None:
"""Test that the repository list returns records."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -320,7 +320,7 @@ async def test_delete() -> None:
"""Test that the repository delete functionality."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -337,7 +337,7 @@ async def test_delete_many() -> None:
"""Test that the repository delete many functionality."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -354,7 +354,7 @@ async def test_list_and_count() -> None:
"""Test that the repository list_and_count returns records and the total record count."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -371,7 +371,7 @@ async def test_exists() -> None:
"""Test that the repository exists returns booleans."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -387,7 +387,7 @@ async def test_count() -> None:
"""Test that the repository count returns the total record count."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -403,7 +403,7 @@ async def test_get() -> None:
"""Test that the repository get returns a model record correctly."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -420,7 +420,7 @@ async def test_get_one() -> None:
"""Test that the repository get_one returns a model record correctly."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -438,7 +438,7 @@ async def test_get_one_or_none() -> None:
"""Test that the repository get_one_or_none returns a model record correctly."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -456,7 +456,7 @@ async def test_get_or_create() -> None:
"""Test that the repository get_or_create returns a model record correctly."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
@@ -478,7 +478,7 @@ async def test_get_or_create_match_fields() -> None:
"""Test that the repository get_or_create returns a model record correctly."""
class Model(base.UUIDAuditBase):
- """Inheriting from AuditBase gives the model 'created' and 'updated'
+ """Inheriting from AuditBase gives the model 'created_at' and 'updated_at'
columns."""
random_column: Mapped[str]
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_async.py
@@ -43,19 +43,19 @@ async def test_sqlalchemy_tablename(monkeypatch: MonkeyPatch) -> None:
"""Test the snake case conversion for table names."""
class BigModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class TESTModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntAuditBase):
- """Inheriting from BigIntAuditBase gives the model 'created' and 'updated'
+ """Inheriting from BigIntAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -69,7 +69,7 @@ async def test_sqlalchemy_sentinel(monkeypatch: MonkeyPatch) -> None:
"""Test the sqlalchemy sentinel column only exists on `UUIDPrimaryKey` models."""
class AnotherModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
the_extra_col: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore
@@ -95,8 +95,8 @@ class TheBigIntModel(base.BigIntBase):
monkeypatch.setattr(model1, "_sa_instance_state", sa_instance_mock)
monkeypatch.setattr(model2, "_sa_instance_state", sa_instance_mock)
monkeypatch.setattr(model3, "_sa_instance_state", sa_instance_mock)
- assert "created" not in model1.to_dict(exclude={"created"}).keys()
- assert "the_extra_col" not in model1.to_dict(exclude={"created"}).keys()
+ assert "created_at" not in model1.to_dict(exclude={"created_at"}).keys()
+ assert "the_extra_col" not in model1.to_dict(exclude={"created_at"}).keys()
assert "_sentinel" not in model1.to_dict().keys()
assert "_sentinel" not in model2.to_dict().keys()
assert "_sentinel" not in model3.to_dict().keys()
@@ -131,13 +131,13 @@ async def test_sqlalchemy_repo_add_many(mock_repo: SQLAlchemyAsyncRepository, mo
"""Test expected method calls for add many operation."""
class UUIDModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntAuditBase):
- """Inheriting from BigIntAuditBase gives the model 'created' and 'updated'
+ """Inheriting from BigIntAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -159,13 +159,13 @@ async def test_sqlalchemy_repo_update_many(mock_repo: SQLAlchemyAsyncRepository,
"""Test expected method calls for update many operation."""
class UUIDModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntAuditBase):
- """Inheriting from BigIntAuditBase gives the model 'created' and 'updated'
+ """Inheriting from BigIntAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -202,13 +202,13 @@ async def test_sqlalchemy_repo_delete_many(mock_repo: SQLAlchemyAsyncRepository,
"""Test expected method calls for delete operation."""
class UUIDModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntAuditBase):
- """Inheriting from BigIntAuditBase gives the model 'created' and 'updated'
+ """Inheriting from BigIntAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -424,7 +424,7 @@ async def test_sqlalchemy_repo_list_with_before_after_filter(
mock_repo: SQLAlchemyAsyncRepository, monkeypatch: MonkeyPatch
) -> None:
"""Test list operation with BeforeAfter filter."""
- field_name = "updated"
+ field_name = "updated_at"
# model has to support comparison with the datetimes
getattr(mock_repo.model_type, field_name).__lt__ = lambda self, compare: "lt"
getattr(mock_repo.model_type, field_name).__gt__ = lambda self, compare: "gt"
@@ -515,12 +515,12 @@ def test_filter_in_collection_noop_if_collection_empty(mock_repo: SQLAlchemyAsyn
(datetime.max, None),
],
)
-def test__filter_on_datetime_field(before: datetime, after: datetime, mock_repo: SQLAlchemyAsyncRepository) -> None:
+def test_filter_on_datetime_field(before: datetime, after: datetime, mock_repo: SQLAlchemyAsyncRepository) -> None:
"""Test through branches of _filter_on_datetime_field()"""
field_mock = MagicMock()
field_mock.__gt__ = field_mock.__lt__ = lambda self, other: True
- mock_repo.model_type.updated = field_mock
- mock_repo._filter_on_datetime_field("updated", before, after, statement=mock_repo.statement)
+ mock_repo.model_type.updated_at = field_mock
+ mock_repo._filter_on_datetime_field("updated_at", before, after, statement=mock_repo.statement)
def test_filter_collection_by_kwargs(mock_repo: SQLAlchemyAsyncRepository) -> None:
diff --git a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py
--- a/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py
+++ b/tests/contrib/sqlalchemy/repository/test_sqlalchemy_sync.py
@@ -43,19 +43,19 @@ def test_sqlalchemy_tablename(monkeypatch: MonkeyPatch) -> None:
"""Test the snake case conversion for table names."""
class BigModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class TESTModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntAuditBase):
- """Inheriting from BigIntAuditBase gives the model 'created' and 'updated'
+ """Inheriting from BigIntAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -69,7 +69,7 @@ def test_sqlalchemy_sentinel(monkeypatch: MonkeyPatch) -> None:
"""Test the sqlalchemy sentinel column only exists on `UUIDPrimaryKey` models."""
class AnotherModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
the_extra_col: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore
@@ -130,13 +130,13 @@ def test_sqlalchemy_repo_add_many(mock_repo: SQLAlchemySyncRepository, monkeypat
"""Test expected method calls for add many operation."""
class UUIDModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntAuditBase):
- """Inheriting from BigIntAuditBase gives the model 'created' and 'updated'
+ """Inheriting from BigIntAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -158,13 +158,13 @@ def test_sqlalchemy_repo_update_many(mock_repo: SQLAlchemySyncRepository, monkey
"""Test expected method calls for update many operation."""
class UUIDModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntAuditBase):
- """Inheriting from BigIntAuditBase gives the model 'created' and 'updated'
+ """Inheriting from BigIntAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -201,13 +201,13 @@ def test_sqlalchemy_repo_delete_many(mock_repo: SQLAlchemySyncRepository, monkey
"""Test expected method calls for delete operation."""
class UUIDModel(base.UUIDAuditBase):
- """Inheriting from UUIDAuditBase gives the model 'created' and 'updated'
+ """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
class BigIntModel(base.BigIntAuditBase):
- """Inheriting from BigIntAuditBase gives the model 'created' and 'updated'
+ """Inheriting from BigIntAuditBase gives the model 'created_at' and 'updated_at'
columns."""
...
@@ -420,7 +420,7 @@ def test_sqlalchemy_repo_list_with_before_after_filter(
mock_repo: SQLAlchemySyncRepository, monkeypatch: MonkeyPatch
) -> None:
"""Test list operation with BeforeAfter filter."""
- field_name = "updated"
+ field_name = "updated_at"
# model has to support comparison with the datetimes
getattr(mock_repo.model_type, field_name).__lt__ = lambda self, compare: "lt"
getattr(mock_repo.model_type, field_name).__gt__ = lambda self, compare: "gt"
@@ -511,12 +511,12 @@ def test_filter_in_collection_noop_if_collection_empty(mock_repo: SQLAlchemySync
(datetime.max, None),
],
)
-def test__filter_on_datetime_field(before: datetime, after: datetime, mock_repo: SQLAlchemySyncRepository) -> None:
+def test_filter_on_datetime_field(before: datetime, after: datetime, mock_repo: SQLAlchemySyncRepository) -> None:
"""Test through branches of _filter_on_datetime_field()"""
field_mock = MagicMock()
field_mock.__gt__ = field_mock.__lt__ = lambda self, other: True
- mock_repo.model_type.updated = field_mock
- mock_repo._filter_on_datetime_field("updated", before, after, statement=mock_repo.statement)
+ mock_repo.model_type.updated_at = field_mock
+ mock_repo._filter_on_datetime_field("updated_at", before, after, statement=mock_repo.statement)
def test_filter_collection_by_kwargs(mock_repo: SQLAlchemySyncRepository) -> None:
diff --git a/tests/contrib/sqlalchemy/repository/test_uuid_async.py b/tests/contrib/sqlalchemy/repository/test_uuid_async.py
--- a/tests/contrib/sqlalchemy/repository/test_uuid_async.py
+++ b/tests/contrib/sqlalchemy/repository/test_uuid_async.py
@@ -2,7 +2,7 @@
from __future__ import annotations
import sys
-from datetime import datetime
+from datetime import datetime, timezone
from typing import Any
from uuid import UUID, uuid4
@@ -21,6 +21,7 @@
BookAsyncRepository,
RuleAsyncRepository,
UUIDAuthor,
+ UUIDBook,
UUIDRule,
)
@@ -40,11 +41,19 @@ async def seed_db(
# convert date/time strings to dt objects.
for raw_author in raw_authors_uuid:
raw_author["dob"] = datetime.strptime(raw_author["dob"], "%Y-%m-%d").date()
- raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
- raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ raw_author["created_at"] = datetime.strptime(raw_author["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
+ raw_author["updated_at"] = datetime.strptime(raw_author["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
for raw_author in raw_rules_uuid:
- raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
- raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ raw_author["created_at"] = datetime.strptime(raw_author["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
+ raw_author["updated_at"] = datetime.strptime(raw_author["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
async with async_engine.begin() as conn:
await conn.run_sync(base.orm_registry.metadata.drop_all)
@@ -120,6 +129,22 @@ async def test_repo_list_and_count_method_empty(book_repo: BookAsyncRepository)
assert len(collection) == 0
+async def test_repo_created_updated(author_repo: AuthorAsyncRepository) -> None:
+ """Test SQLALchemy created_at - updated_at.
+
+ Args:
+ author_repo (AuthorAsyncRepository): The author mock repository
+ """
+ author = await author_repo.get_one(name="Agatha Christie")
+ assert author.created_at is not None
+ assert author.updated_at is not None
+ original_update_dt = author.updated_at
+
+ author.books.append(UUIDBook(title="Testing"))
+ author = await author_repo.update(author)
+ assert author.updated_at == original_update_dt
+
+
async def test_repo_list_method(
raw_authors_uuid: list[dict[str, Any]],
author_repo: AuthorAsyncRepository,
@@ -341,13 +366,17 @@ async def test_repo_filter_before_after(author_repo: AuthorAsyncRepository) -> N
author_repo (AuthorAsyncRepository): The author mock repository
"""
before_filter = BeforeAfter(
- field_name="created", before=datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), after=None
+ field_name="created_at",
+ before=datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc),
+ after=None,
)
existing_obj = await author_repo.list(before_filter)
assert existing_obj[0].name == "Leo Tolstoy"
after_filter = BeforeAfter(
- field_name="created", after=datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), before=None
+ field_name="created_at",
+ after=datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc),
+ before=None,
)
existing_obj = await author_repo.list(after_filter)
assert existing_obj[0].name == "Agatha Christie"
@@ -381,9 +410,9 @@ async def test_repo_filter_order_by(author_repo: AuthorAsyncRepository) -> None:
author_repo (AuthorAsyncRepository): The author mock repository
"""
- existing_obj = await author_repo.list(OrderBy(field_name="created", sort_order="desc"))
+ existing_obj = await author_repo.list(OrderBy(field_name="created_at", sort_order="desc"))
assert existing_obj[0].name == "Agatha Christie"
- existing_obj = await author_repo.list(OrderBy(field_name="created", sort_order="asc"))
+ existing_obj = await author_repo.list(OrderBy(field_name="created_at", sort_order="asc"))
assert existing_obj[0].name == "Leo Tolstoy"
diff --git a/tests/contrib/sqlalchemy/repository/test_uuid_sync.py b/tests/contrib/sqlalchemy/repository/test_uuid_sync.py
--- a/tests/contrib/sqlalchemy/repository/test_uuid_sync.py
+++ b/tests/contrib/sqlalchemy/repository/test_uuid_sync.py
@@ -2,7 +2,7 @@
from __future__ import annotations
import os
-from datetime import datetime
+from datetime import datetime, timezone
from typing import Any, Generator, cast
from uuid import UUID, uuid4
@@ -19,6 +19,7 @@
BookSyncRepository,
RuleSyncRepository,
UUIDAuthor,
+ UUIDBook,
UUIDRule,
)
@@ -52,11 +53,19 @@ def _seed_db(
# convert date/time strings to dt objects.
for raw_author in raw_authors_uuid:
raw_author["dob"] = datetime.strptime(raw_author["dob"], "%Y-%m-%d").date()
- raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
- raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ raw_author["created_at"] = datetime.strptime(raw_author["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
+ raw_author["updated_at"] = datetime.strptime(raw_author["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
for raw_author in raw_rules_uuid:
- raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
- raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ raw_author["created_at"] = datetime.strptime(raw_author["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
+ raw_author["updated_at"] = datetime.strptime(raw_author["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
with engine.begin() as conn:
base.orm_registry.metadata.drop_all(conn)
@@ -77,11 +86,15 @@ def _seed_spanner(
) -> list[Table]:
for raw_author in raw_authors_uuid:
raw_author["dob"] = datetime.strptime(raw_author["dob"], "%Y-%m-%d").date()
- raw_author["created"] = datetime.strptime(raw_author["created"], "%Y-%m-%dT%H:%M:%S")
- raw_author["updated"] = datetime.strptime(raw_author["updated"], "%Y-%m-%dT%H:%M:%S")
+ raw_author["created_at"] = datetime.strptime(raw_author["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
+ raw_author["updated_at"] = datetime.strptime(raw_author["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone(
+ timezone.utc
+ )
for raw_rule in raw_rules_uuid:
- raw_rule["created"] = datetime.strptime(raw_rule["created"], "%Y-%m-%dT%H:%M:%S")
- raw_rule["updated"] = datetime.strptime(raw_rule["updated"], "%Y-%m-%dT%H:%M:%S")
+ raw_rule["created_at"] = datetime.strptime(raw_rule["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc)
+ raw_rule["updated_at"] = datetime.strptime(raw_rule["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc)
with engine.begin() as txn:
objs = []
@@ -199,6 +212,22 @@ def test_repo_list_and_count_method_empty(book_repo: BookSyncRepository) -> None
assert len(collection) == 0
+def test_repo_created_updated(author_repo: AuthorSyncRepository) -> None:
+ """Test SQLALchemy created_at - updated_at.
+
+ Args:
+ author_repo (AuthorAsyncRepository): The author mock repository
+ """
+ author = author_repo.get_one(name="Agatha Christie")
+ assert author.created_at is not None
+ assert author.updated_at is not None
+ original_update_dt = author.updated_at
+
+ author.books.append(UUIDBook(title="Testing"))
+ author = author_repo.update(author)
+ assert author.updated_at == original_update_dt
+
+
def test_repo_list_method(raw_authors_uuid: list[dict[str, Any]], author_repo: AuthorSyncRepository) -> None:
"""Test SQLALchemy list.
@@ -417,13 +446,17 @@ def test_repo_filter_before_after(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorSyncRepository): The author mock repository
"""
before_filter = BeforeAfter(
- field_name="created", before=datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), after=None
+ field_name="created_at",
+ before=datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc),
+ after=None,
)
existing_obj = author_repo.list(before_filter)
assert existing_obj[0].name == "Leo Tolstoy"
after_filter = BeforeAfter(
- field_name="created", after=datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S"), before=None
+ field_name="created_at",
+ after=datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone(timezone.utc),
+ before=None,
)
existing_obj = author_repo.list(after_filter)
assert existing_obj[0].name == "Agatha Christie"
@@ -457,9 +490,9 @@ def test_repo_filter_order_by(author_repo: AuthorSyncRepository) -> None:
author_repo (AuthorSyncRepository): The author mock repository
"""
- existing_obj = author_repo.list(OrderBy(field_name="created", sort_order="desc"))
+ existing_obj = author_repo.list(OrderBy(field_name="created_at", sort_order="desc"))
assert existing_obj[0].name == "Agatha Christie"
- existing_obj = author_repo.list(OrderBy(field_name="created", sort_order="asc"))
+ existing_obj = author_repo.list(OrderBy(field_name="created_at", sort_order="asc"))
assert existing_obj[0].name == "Leo Tolstoy"
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-06-11T19:46:42 |
litestar-org/litestar | 1,826 | litestar-org__litestar-1826 | [
"4321",
"1234"
] | cdd6bc9fa023521a94abd734780f18b528c2020a | diff --git a/litestar/_openapi/schema_generation/schema.py b/litestar/_openapi/schema_generation/schema.py
--- a/litestar/_openapi/schema_generation/schema.py
+++ b/litestar/_openapi/schema_generation/schema.py
@@ -265,12 +265,8 @@
time: Schema(type=OpenAPIType.STRING, format=OpenAPIFormat.DURATION),
timedelta: Schema(type=OpenAPIType.STRING, format=OpenAPIFormat.DURATION),
tuple: Schema(type=OpenAPIType.ARRAY),
- # litestar types,
- # the following is a hack -https://www.openapis.org/blog/2021/02/16/migrating-from-openapi-3-0-to-3-1-0
- # the format for OA 3.1 is type + contentMediaType, for 3.0.* is type + format, we do both.
UploadFile: Schema(
type=OpenAPIType.STRING,
- format="binary", # type: ignore
content_media_type="application/octet-stream",
),
# pydantic types
diff --git a/litestar/datastructures/upload_file.py b/litestar/datastructures/upload_file.py
--- a/litestar/datastructures/upload_file.py
+++ b/litestar/datastructures/upload_file.py
@@ -119,6 +119,4 @@ def __modify_schema__(cls, field_schema: dict[str, Any], field: ModelField | Non
None
"""
if field:
- field_schema.update(
- {"type": OpenAPIType.STRING.value, "contentMediaType": "application/octet-stream", "format": "binary"}
- )
+ field_schema.update({"type": OpenAPIType.STRING.value, "contentMediaType": "application/octet-stream"})
diff --git a/litestar/openapi/controller.py b/litestar/openapi/controller.py
--- a/litestar/openapi/controller.py
+++ b/litestar/openapi/controller.py
@@ -1,6 +1,5 @@
from __future__ import annotations
-from copy import copy
from functools import cached_property
from typing import TYPE_CHECKING, Callable, Literal
@@ -32,7 +31,7 @@ class OpenAPIController(Controller):
"""Base styling of the html body."""
redoc_version: str = "next"
"""Redoc version to download from the CDN."""
- swagger_ui_version: str = "4.15.5"
+ swagger_ui_version: str = "5.0.0"
"""SwaggerUI version to download from the CDN."""
stoplight_elements_version: str = "7.7.5"
"""StopLight Elements version to download from the CDN."""
@@ -261,13 +260,6 @@ def render_swagger_ui(self, request: Request) -> bytes:
"""
schema = self.get_schema_from_request(request)
- # Note: Fix for Swagger rejection OpenAPI >=3.1
- if not self._dumped_modified_schema:
- schema_copy = copy(schema)
- schema_copy.openapi = "3.0.3"
-
- self._dumped_modified_schema = encode_json(schema_copy.to_schema()).decode("utf-8")
-
head = f"""
<head>
<title>{schema.info.title}</title>
@@ -286,7 +278,7 @@ def render_swagger_ui(self, request: Request) -> bytes:
<div id='swagger-container'/>
<script type="text/javascript">
const ui = SwaggerUIBundle({{
- spec: {self._dumped_modified_schema},
+ spec: {encode_json(schema.to_schema()).decode("utf-8")},
dom_id: '#swagger-container',
deepLinking: true,
showExtensions: true,
| diff --git a/tests/openapi/test_controller.py b/tests/openapi/test_controller.py
--- a/tests/openapi/test_controller.py
+++ b/tests/openapi/test_controller.py
@@ -169,7 +169,7 @@ def test_openapi_swagger_caching_schema(
client.get("/schema/redoc") # Cache the schema
response = client.get("/schema/swagger") # Request swagger, should use a different cache
- assert "3.0.3" in response.text # Make sure the injected version is still there
+ assert "3.1.0" in response.text # Make sure the injected version is still there
assert response.status_code == HTTP_200_OK
assert response.headers["content-type"].startswith(MediaType.HTML.value)
diff --git a/tests/openapi/test_request_body.py b/tests/openapi/test_request_body.py
--- a/tests/openapi/test_request_body.py
+++ b/tests/openapi/test_request_body.py
@@ -60,11 +60,10 @@ async def handle_file_list_upload(
components = schema_dict["components"]
assert paths["/file-upload"]["post"]["requestBody"]["content"]["multipart/form-data"]["schema"] == {
"type": "string",
- "format": "binary",
"contentMediaType": "application/octet-stream",
}
assert paths["/file-list-upload"]["post"]["requestBody"]["content"]["multipart/form-data"]["schema"] == {
- "items": {"type": "string", "format": "binary", "contentMediaType": "application/octet-stream"},
+ "items": {"type": "string", "contentMediaType": "application/octet-stream"},
"type": "array",
}
assert components == {
@@ -73,12 +72,10 @@ async def handle_file_list_upload(
"properties": {
"cv": {
"type": "string",
- "format": "binary",
"contentMediaType": "application/octet-stream",
},
"image": {
"type": "string",
- "format": "binary",
"contentMediaType": "application/octet-stream",
},
},
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-06-13T15:05:03 |
litestar-org/litestar | 1,828 | litestar-org__litestar-1828 | [
"1795"
] | d686e8776e577b35214f9c3ce230cd8399a650b8 | diff --git a/litestar/_openapi/schema_generation/schema.py b/litestar/_openapi/schema_generation/schema.py
--- a/litestar/_openapi/schema_generation/schema.py
+++ b/litestar/_openapi/schema_generation/schema.py
@@ -439,7 +439,20 @@ def create_schema_for_object_type(
A schema instance.
"""
if field.is_mapping:
- return Schema(type=OpenAPIType.OBJECT)
+ return Schema(
+ type=OpenAPIType.OBJECT,
+ additional_properties=(
+ create_schema(
+ field=field.children[1],
+ generate_examples=generate_examples,
+ plugins=plugins,
+ schemas=schemas,
+ prefer_alias=prefer_alias,
+ )
+ if field.children and len(field.children) == 2
+ else None
+ ),
+ )
if field.is_non_string_sequence or field.is_non_string_iterable:
items = [
| diff --git a/tests/openapi/test_spec_generation.py b/tests/openapi/test_spec_generation.py
--- a/tests/openapi/test_spec_generation.py
+++ b/tests/openapi/test_spec_generation.py
@@ -33,7 +33,13 @@ def handler(data: cls) -> cls:
"last_name": {"type": "string"},
"id": {"type": "string"},
"optional": {"oneOf": [{"type": "null"}, {"type": "string"}]},
- "complex": {"type": "object"},
+ "complex": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "array",
+ "items": {"type": "object", "additionalProperties": {"type": "string"}},
+ },
+ },
"pets": {
"oneOf": [
{"type": "null"},
| Bug: OpenApi generation for `Dict[str, X]`
### Description
The generated OpenApi schema for `Dict[str, X]` ignores `X`: for example `Dict[str, int]` generates just
`{"type": "object"}`
instead of
`{"type": "object", "additionalProperties": { "type": "integer" }}`.
In comparison `List[X]` generates the correct schema
`{"items": {"type": "integer"}, "type": "array"}` \
instead of just
`{"type": "array"}`
### URL to code causing the issue
_No response_
### MCVE
_No response_
### Steps to reproduce
_No response_
### Screenshots
_No response_
### Logs
_No response_
### Litestar Version
Main branch
### Platform
- [ ] Linux
- [ ] Mac
- [ ] Windows
- [ ] Other (Please specify in the description above)
| Thanks for the report!
The behavior can be observed in our [hello-world](https://github.com/litestar-org/litestar-hello-world) app too:

PRs welcome! | 2023-06-14T09:52:47 |
litestar-org/litestar | 1,829 | litestar-org__litestar-1829 | [
"1824"
] | d686e8776e577b35214f9c3ce230cd8399a650b8 | diff --git a/litestar/_openapi/schema_generation/schema.py b/litestar/_openapi/schema_generation/schema.py
--- a/litestar/_openapi/schema_generation/schema.py
+++ b/litestar/_openapi/schema_generation/schema.py
@@ -17,6 +17,7 @@
Hashable,
Iterable,
List,
+ Literal,
Mapping,
MutableMapping,
MutableSequence,
@@ -301,6 +302,22 @@ def create_enum_schema(annotation: EnumMeta) -> Schema:
return Schema(type=openapi_type, enum=enum_values)
+def iter_flat_literal_args(annotation: Any) -> Iterable[Any]:
+ """Iterate over the flattened arguments of a Literal.
+
+ Args:
+ annotation: An Literal annotation.
+
+ Yields:
+ The flattened arguments of the Literal.
+ """
+ for arg in get_args(annotation):
+ if get_origin_or_inner_type(arg) is Literal:
+ yield from iter_flat_literal_args(arg)
+ else:
+ yield arg
+
+
def create_literal_schema(annotation: Any) -> Schema:
"""Create a schema instance for a Literal.
@@ -310,7 +327,7 @@ def create_literal_schema(annotation: Any) -> Schema:
Returns:
A schema instance.
"""
- args = get_args(annotation)
+ args = tuple(iter_flat_literal_args(annotation))
schema = copy(TYPE_MAP[type(args[0])])
if len(args) > 1:
schema.enum = args
| diff --git a/tests/openapi/test_schema.py b/tests/openapi/test_schema.py
--- a/tests/openapi/test_schema.py
+++ b/tests/openapi/test_schema.py
@@ -125,10 +125,14 @@ class M(BaseModel):
def test_handling_of_literals() -> None:
+ ValueType = Literal["a", "b", "c"]
+ ConstType = Literal[1]
+
@dataclass
class DataclassWithLiteral:
- value: Literal["a", "b", "c"]
- const: Literal[1]
+ value: ValueType
+ const: ConstType
+ composite: Literal[ValueType, ConstType]
schemas: Dict[str, Schema] = {}
result = create_schema(
@@ -138,16 +142,23 @@ class DataclassWithLiteral:
schemas=schemas,
)
assert isinstance(result, Reference)
+
schema = schemas["DataclassWithLiteral"]
assert isinstance(schema, Schema)
assert schema.properties
+
value = schema.properties["value"]
assert isinstance(value, Schema)
assert value.enum == ("a", "b", "c")
+
const = schema.properties["const"]
assert isinstance(const, Schema)
assert const.const == 1
+ composite = schema.properties["composite"]
+ assert isinstance(composite, Schema)
+ assert composite.enum == ("a", "b", "c", 1)
+
def test_schema_hashing() -> None:
schema = Schema(
| Bug: Cannot generate OpenApi schema for Literal of Literals
### Description
`Literal` may be parameterized by other literal types (see https://peps.python.org/pep-0586/#legal-parameters-for-literal-at-type-check-time) but currently such annotations break the OpenApi generation.
### MCVE
Modified version of [test_handling_of_literals](https://github.com/litestar-org/litestar/blob/main/tests/openapi/test_schema.py#L127):
```python
def test_composite_literals() -> None:
Letters = Literal["a", "b", "c"]
Digits = Literal[1, 2, 3]
@dataclass
class DataclassWithLiteral:
composite: Literal[Letters, Digits]
schemas: Dict[str, Schema] = {}
result = create_schema(
field=SignatureField.create(name="", field_type=DataclassWithLiteral),
generate_examples=False,
plugins=[],
schemas=schemas,
prefer_alias=True,
)
```
Output: `KeyError: <class 'typing._GenericAlias'>`
### Litestar Version
Main branch
| 2023-06-14T09:55:27 |
|
litestar-org/litestar | 1,838 | litestar-org__litestar-1838 | [
"4321",
"1234"
] | a1842a000f2f3639bc9d1b0875ae31da050689cd | diff --git a/litestar/_openapi/responses.py b/litestar/_openapi/responses.py
--- a/litestar/_openapi/responses.py
+++ b/litestar/_openapi/responses.py
@@ -91,13 +91,13 @@ def create_success_response( # noqa: C901
if return_annotation is not Signature.empty and not return_type.is_subclass_of(
(NoneType, File, Redirect, Stream, ASGIResponse)
):
+ media_type = route_handler.media_type
if return_annotation is Template:
return_annotation = str
- route_handler.media_type = get_enum_string_value(MediaType.HTML)
+ media_type = media_type or MediaType.HTML
elif return_type.is_subclass_of(LitestarResponse):
return_annotation = return_type.inner_types[0].annotation if return_type.inner_types else Any
- if not route_handler.media_type:
- route_handler.media_type = get_enum_string_value(MediaType.JSON)
+ media_type = media_type or MediaType.JSON
if dto := route_handler.resolve_return_dto():
result = dto.create_openapi_schema("return", str(route_handler), generate_examples, schemas, False)
@@ -116,8 +116,7 @@ def create_success_response( # noqa: C901
schema.content_media_type = route_handler.content_media_type
response = OpenAPIResponse(
- content={route_handler.media_type: OpenAPIMediaType(schema=result)},
- description=description,
+ content={get_enum_string_value(media_type): OpenAPIMediaType(schema=result)}, description=description
)
elif return_type.is_subclass_of(Redirect):
| StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
| I agree. Do you want to submit a PR? | 2023-06-17T23:37:31 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.