Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/su6.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
python-version: '3.12'
- uses: yezz123/setup-uv@v4
with:
uv-venv: ".venv"
Expand Down
2 changes: 1 addition & 1 deletion .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ version: 2
build:
os: ubuntu-22.04
tools:
python: "3.11"
python: "3.13"

mkdocs:
configuration: mkdocs.yml
Expand Down
53 changes: 53 additions & 0 deletions docs/8_mixins.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,58 @@ class MyTable(TypedTable, SlugMixin, slug_field="title"):
# Now, whenever you insert a record into MyTable, the 'slug' field will be automatically generated based on the 'title' field.
```

## Using `PydanticMixin`

The `PydanticMixin` enables seamless integration with Pydantic-based frameworks (like FastAPI) by adding schema generation
capabilities to your models. Without this mixin, you cannot return TypedTable instances directly as FastAPI responses
or use them with `pydantic.TypeAdapter`.

Add the mixin to enable `model_dump()` for serialization, including support for relationships and computed properties:

```python
from typedal import TypedTable
from typedal.mixins import PydanticMixin


class Author(TypedTable, PydanticMixin):
name: str


class Book(TypedTable, PydanticMixin):
title: str
author: Author

@property
def display_title(self) -> str:
return f"{self.title} by {self.author.name}"


# After inserting records and joining relationships:
book = Book.where(id=1).join("author").first()

# model_dump() serializes the full object graph
data = book.model_dump()
# -> {"id": 1, "title": "...", "author": {"id": 1, "name": "..."}, "display_title": "..."}

# Use mode="json" for JSON-serializable output (dates as ISO strings, etc.)
data = book.model_dump(mode="json")
```

> **Note:** When referencing other TypedTable models from a PydanticMixin class, those models must also include
> `PydanticMixin`. This ensures the entire object graph can be serialized consistently.

With this mixin, TypedTable instances work seamlessly as FastAPI response models:

```python
from fastapi import FastAPI

app = FastAPI()

@app.get("/books/{book_id}")
def get_book(book_id: int) -> Book:
return Book.where(id=book_id).join("author").first()
```

## Creating Custom Mixins

To create your own mixins for additional functionality, follow these steps:
Expand Down Expand Up @@ -122,6 +174,7 @@ recent_articles = (
By using these mixins, you can enhance the functionality of your models in a modular and reusable manner, saving you
time and effort in your development process.


---

Looking to cache expensive function results? Head to [9. Function Memoization](./9_memoization.md) to learn about `db.memoize()`.
7 changes: 4 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ name = "TypeDAL"
dynamic = ["version"]
description = 'Typing support for PyDAL'
readme = "README.md"
requires-python = ">=3.11"
requires-python = ">=3.12"
license-expression = "MIT"
keywords = []
authors = [
Expand All @@ -19,7 +19,6 @@ authors = [
classifiers = [
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.14",
Expand Down Expand Up @@ -56,6 +55,7 @@ all = [
"tabulate",
"pydal2sql[all]>=1.2.0",
"edwh-migrate[full]>=0.8.0",
"pydantic < 3",
"questionary",
"tomlkit",
]
Expand All @@ -70,6 +70,7 @@ dev = [
"pytest-mypy-testing",
"contextlib-chdir",
"testcontainers",
"pydantic < 3",
# depends on ->
"requests<2.32",
# mypy:
Expand Down Expand Up @@ -142,7 +143,7 @@ exclude_also = [
]

[tool.mypy]
python_version = "3.11"
python_version = "3.13"

# `some: int = None` looks nicer than `some: int | None = None` and pycharm still understands it
no_implicit_optional = false # I guess 'strict_optional' should be true, but disable this one because it's double!
Expand Down
14 changes: 6 additions & 8 deletions src/typedal/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,10 +203,6 @@ def _remove_cache(s: Set, tablename: str) -> None:
indeces = s.select("id").column("id")
remove_cache(indeces, tablename)


T_TypedTable = t.TypeVar("T_TypedTable", bound=TypedTable)


def get_expire(
expires_at: t.Optional[dt.datetime] = None,
ttl: t.Optional[int | dt.timedelta] = None,
Expand Down Expand Up @@ -250,7 +246,7 @@ def _insert_cache_entry(
db.commit()


def save_to_cache(
def save_to_cache[T_TypedTable: TypedTable](
instance: TypedRows[T_TypedTable],
rows: Rows,
expires_at: t.Optional[dt.datetime] = None,
Expand Down Expand Up @@ -418,8 +414,10 @@ def _expired_and_valid_query() -> tuple[str, str]:
return expired_items, valid_items


T = t.TypeVar("T")
Stats = t.TypedDict("Stats", {"total": T, "valid": T, "expired": T})
class Stats[T](t.TypedDict):
total: T
valid: T
expired: T

RowStats = t.TypedDict(
"RowStats",
Expand Down Expand Up @@ -527,7 +525,7 @@ def calculate_stats(db: "TypeDAL") -> Stats[GenericStats]:
}


def memoize(
def memoize[T: t.Any](
db: "TypeDAL",
func: t.Callable[..., T],
*args: TypedRows[t.Any] | TypedTable,
Expand Down
10 changes: 4 additions & 6 deletions src/typedal/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,12 +94,10 @@
"fake_migrate": None, # only enable via config if required
}

T = typing.TypeVar("T")

notfound = object()


def _get_question(prop: str, annotation: typing.Type[T]) -> Optional[AnyDict]: # pragma: no cover
def _get_question[T](prop: str, annotation: typing.Type[T]) -> Optional[AnyDict]: # pragma: no cover
question = questionary_types.get(prop, notfound)
if question is notfound:
# None means skip the question, notfound means use the type default!
Expand All @@ -111,7 +109,7 @@ def _get_question(prop: str, annotation: typing.Type[T]) -> Optional[AnyDict]:
return question.copy() # type: ignore


def get_question(prop: str, annotation: typing.Type[T], default: T | None) -> Optional[T]: # pragma: no cover
def get_question[T](prop: str, annotation: typing.Type[T], default: T | None) -> Optional[T]: # pragma: no cover
"""
Generate a question based on a config property and prompt the user for it.
"""
Expand Down Expand Up @@ -449,7 +447,7 @@ def migrations_stub(
return 0


AnyNestedDict: typing.TypeAlias = dict[str, AnyDict]
type AnyNestedDict = dict[str, AnyDict]


def tabulate_data(data: AnyNestedDict) -> None:
Expand All @@ -466,7 +464,7 @@ def tabulate_data(data: AnyNestedDict) -> None:
print(tabulate(flattened_data, headers="keys"))


FormatOptions: typing.TypeAlias = typing.Literal["plaintext", "json", "yaml", "toml"]
type FormatOptions = typing.Literal["plaintext", "json", "yaml", "toml"]


def get_output_format(fmt: FormatOptions) -> typing.Callable[[AnyNestedDict], None]:
Expand Down
20 changes: 14 additions & 6 deletions src/typedal/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
sql_expression,
to_snake,
)
from .types import CacheStatus, Field, T, Template # type: ignore
from .types import CacheStatus, Field, Template # noqa: F401

try:
# python 3.14+
Expand Down Expand Up @@ -246,7 +246,7 @@ def __init__(
self.try_define(_TypedalCache)
self.try_define(_TypedalCacheDependency)

def try_define(self, model: t.Type[T], verbose: bool = False) -> t.Type[T]:
def try_define[T: t.Any](self, model: t.Type[T], verbose: bool = False) -> t.Type[T]:
"""
Try to define a model with migrate or fall back to fake migrate.
"""
Expand All @@ -270,7 +270,7 @@ def try_define(self, model: t.Type[T], verbose: bool = False) -> t.Type[T]:
}

@t.overload
def define(self, maybe_cls: None = None, **kwargs: t.Any) -> t.Callable[[t.Type[T]], t.Type[T]]:
def define[T: t.Any](self, maybe_cls: None = None, **kwargs: t.Any) -> t.Callable[[t.Type[T]], t.Type[T]]:
"""
Typing Overload for define without a class.

Expand All @@ -279,15 +279,15 @@ class MyTable(TypedTable): ...
"""

@t.overload
def define(self, maybe_cls: t.Type[T], **kwargs: t.Any) -> t.Type[T]:
def define[T: t.Any](self, maybe_cls: t.Type[T], **kwargs: t.Any) -> t.Type[T]:
"""
Typing Overload for define with a class.

@db.define
class MyTable(TypedTable): ...
"""

def define(
def define[T: t.Any](
self,
maybe_cls: t.Type[T] | None = None,
**kwargs: t.Any,
Expand Down Expand Up @@ -379,6 +379,14 @@ def _class_map(self) -> dict[str, t.Type["TypedTable"]]:
# alias for backward-compatibility
return self._builder.class_map

def _known_classes(self) -> dict[str, t.Type["TypedTable"]]:
"""
Return currently defined TypedTable classes keyed by class name.

Useful when resolving forward references in annotations/relationships.
"""
return {table.__name__: table for table in self._class_map.values()}

@staticmethod
def to_snake(camel: str) -> str:
"""
Expand Down Expand Up @@ -460,7 +468,7 @@ def sql_expression(
"""
return sql_expression(self, sql_fragment, *raw_args, output_type=output_type, **raw_kwargs)

def memoize(
def memoize[T: t.Any](
self,
func: t.Callable[..., T],
# should be TypedRows[TypedTable] or TypedTable but for some reason that breaks
Expand Down
5 changes: 2 additions & 3 deletions src/typedal/define.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
from .tables import TypedTable
from .types import (
Field,
T,
T_annotation,
Table,
_Types,
Expand All @@ -53,7 +52,7 @@ def __init__(self, db: "TypeDAL"):
self.db = db
self.class_map: dict[str, t.Type["TypedTable"]] = {}

def define(self, cls: t.Type[T], **kwargs: t.Any) -> t.Type[T]:
def define[T: t.Any](self, cls: t.Type[T], **kwargs: t.Any) -> t.Type[T]:
"""Build and register a table from a TypedTable class."""
full_dict = all_dict(cls)
tablename = to_snake(cls.__name__)
Expand Down Expand Up @@ -133,7 +132,7 @@ def annotation_to_pydal_fieldtype(
"""Convert Python type annotation to pydal field type string."""
ftype = t.cast(type, ftype_annotation) # cast from Type to type to make mypy happy)

known_classes = {table.__name__: table for table in self.class_map.values()}
known_classes = self.db._known_classes()

if isinstance(ftype, str):
# extract type from string
Expand Down
8 changes: 4 additions & 4 deletions src/typedal/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@
Query,
T_annotation,
T_MetaInstance,
T_subclass,
T_Value,
Validator,
)

Expand All @@ -37,7 +35,7 @@
## general


class TypedField(Expression, t.Generic[T_Value]): # pragma: no cover
class TypedField[T_Value](Expression): # pragma: no cover
"""
Typed version of pydal.Field, which will be converted to a normal Field in the background.
"""
Expand Down Expand Up @@ -375,7 +373,9 @@ def UploadField(**kw: t.Unpack[FieldSettings]) -> TypedField[str]:
Upload = UploadField


def ReferenceField(
def ReferenceField[
T_subclass: (TypedTable, Table)
](
other_table: str | t.Type[TypedTable] | TypedTable | Table | T_subclass,
**kw: t.Unpack[FieldSettings],
) -> TypedField[int]:
Expand Down
Loading
Loading