From 8029c485953292daccb614d989ece6d902f78822 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 28 Jan 2026 08:30:32 -0800 Subject: [PATCH 01/38] fix(core): switch to relative import --- .../overture-schema-core/src/overture/schema/core/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/overture-schema-core/src/overture/schema/core/models.py b/packages/overture-schema-core/src/overture/schema/core/models.py index 6460d26ba..a248ad615 100644 --- a/packages/overture-schema-core/src/overture/schema/core/models.py +++ b/packages/overture-schema-core/src/overture/schema/core/models.py @@ -12,7 +12,6 @@ from pydantic_core import core_schema from typing_extensions import Self -from overture.schema.core.sources import Sources from overture.schema.system.feature import Feature from overture.schema.system.field_constraint import UniqueItemsConstraint from overture.schema.system.model_constraint import no_extra_fields @@ -25,6 +24,7 @@ ) from .enums import PerspectiveMode +from .sources import Sources from .types import ( FeatureVersion, Level, From 537b36f9880808d28ce987916c4c829dc784909c Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 28 Jan 2026 08:33:17 -0800 Subject: [PATCH 02/38] fix(core): fix __name__ reference --- .../overture-schema-core/src/overture/schema/core/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/overture-schema-core/src/overture/schema/core/models.py b/packages/overture-schema-core/src/overture/schema/core/models.py index a248ad615..ed43eabe9 100644 --- a/packages/overture-schema-core/src/overture/schema/core/models.py +++ b/packages/overture-schema-core/src/overture/schema/core/models.py @@ -66,7 +66,7 @@ def __validate_ext_fields__(self) -> Self: raise ValueError( f"invalid extra field name{maybe_plural}: {', '.join(invalid_extra_fields)} " "(extra fields are temporarily allowed, but only if their names start with 'ext_', " - "but all extra field name support in {self.__class__.name} is on a deprecation path " + "but all extra field name support in {self.__class__.__name__} is on a deprecation path" "and will be removed)" ) return self From cb8b8db629b9ff86af0a162fc61165210febce8e Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 28 Jan 2026 08:35:56 -0800 Subject: [PATCH 03/38] chore: add install make target --- Makefile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Makefile b/Makefile index 5848adf8e..6788c8850 100644 --- a/Makefile +++ b/Makefile @@ -2,6 +2,8 @@ default: test-all +install: uv-sync + uv-sync: @uv sync --all-packages 2> /dev/null From e7771dc1c829770177b1c77c738997bb2bfda4cd Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 10 Feb 2026 16:07:58 -0800 Subject: [PATCH 04/38] Remove pytest-subtests dependency pytest-subtests merged into pytest core as of pytest 9. Update test imports from pytest_subtests.SubTests to _pytest.subtests.Subtests. --- packages/overture-schema-core/pyproject.toml | 1 - .../tests/primitive/test_geom.py | 8 +- .../tests/test_feature.py | 8 +- uv.lock | 911 ++++++++++-------- 4 files changed, 497 insertions(+), 431 deletions(-) diff --git a/packages/overture-schema-core/pyproject.toml b/packages/overture-schema-core/pyproject.toml index f7639dd34..d9b56766c 100644 --- a/packages/overture-schema-core/pyproject.toml +++ b/packages/overture-schema-core/pyproject.toml @@ -21,7 +21,6 @@ packages = ["src/overture"] [dependency-groups] dev = [ "jsonpath-ng>=1.7.0", - "pytest-subtests>=0.14.2", "types-pyyaml>=6.0.12.20250516", "types-shapely>=2.1.0.20250710", ] diff --git a/packages/overture-schema-system/tests/primitive/test_geom.py b/packages/overture-schema-system/tests/primitive/test_geom.py index c43f783e0..e849a4b02 100644 --- a/packages/overture-schema-system/tests/primitive/test_geom.py +++ b/packages/overture-schema-system/tests/primitive/test_geom.py @@ -5,8 +5,8 @@ from typing import Annotated, Any import pytest +from _pytest.subtests import Subtests from pydantic import BaseModel, ValidationError -from pytest_subtests import SubTests from shapely import wkt from overture.schema.system.primitive import ( @@ -275,7 +275,7 @@ def powerset( @pytest.mark.parametrize("geometry_type_case_subset", TEST_GEOMETRY_TYPE_CASE_SUBSETS) def test_geometry_type_constraint_on_allowed_geometry( - geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: SubTests + geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: Subtests ) -> None: allowed_types = tuple(g.geometry_type for g in geometry_type_case_subset) @@ -291,7 +291,7 @@ class ConstrainedModel(BaseModel): @pytest.mark.parametrize("geometry_type_case_subset", TEST_GEOMETRY_TYPE_CASE_SUBSETS) def test_geometry_type_constraint_on_disallowed_geometry( - geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: SubTests + geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: Subtests ) -> None: allowed_types = tuple(g.geometry_type for g in geometry_type_case_subset) @@ -313,7 +313,7 @@ class ConstrainedModel(BaseModel): @pytest.mark.parametrize("geometry_type_case_subset", TEST_GEOMETRY_TYPE_CASE_SUBSETS) def test_geometry_type_constraint_on_geometry_counterexamples( - geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: SubTests + geometry_type_case_subset: tuple[GeometryTypeCase, ...], subtests: Subtests ) -> None: allowed_types = tuple(g.geometry_type for g in geometry_type_case_subset) diff --git a/packages/overture-schema-system/tests/test_feature.py b/packages/overture-schema-system/tests/test_feature.py index 49dfbe85f..ec0dfc795 100644 --- a/packages/overture-schema-system/tests/test_feature.py +++ b/packages/overture-schema-system/tests/test_feature.py @@ -5,6 +5,7 @@ from typing import Annotated, Any, Literal, cast import pytest +from _pytest.subtests import Subtests from pydantic import ( BaseModel, ConfigDict, @@ -15,7 +16,6 @@ create_model, ) from pydantic.json_schema import JsonSchemaValue, JsonValue -from pytest_subtests import SubTests from util import assert_subset from overture.schema.system.feature import Feature, _FieldLevel, _maybe_refactor_schema @@ -37,7 +37,7 @@ class TestFieldDiscriminator: @pytest.mark.parametrize("field", ["hello", "type", "properties"]) - def test_validation_success_simple(self, field: str, subtests: SubTests) -> None: + def test_validation_success_simple(self, field: str, subtests: Subtests) -> None: """ Test the discriminated union success case for a discriminator that is a simple string. @@ -150,7 +150,7 @@ def test_validation_success_simple(self, field: str, subtests: SubTests) -> None actual = tap.validate_python(expect) assert expect == actual - def test_validation_success_convert(self, subtests: SubTests) -> None: + def test_validation_success_convert(self, subtests: Subtests) -> None: """ Test the discriminated union success case where the discriminator value is of a variety of types. @@ -226,7 +226,7 @@ class TestEnum(str, Enum): model1_actual = tap.validate_python(model1_expect) assert model1_expect == model1_actual - def test_validation_success_missing_discriminator(self, subtests: SubTests) -> None: + def test_validation_success_missing_discriminator(self, subtests: Subtests) -> None: """ Tests a union of discriminated unions against an input that doesn't contain the contain the discriminator field of the first union, but does contain the discriminator field diff --git a/uv.lock b/uv.lock index 8ee6263f7..2bc46eb72 100644 --- a/uv.lock +++ b/uv.lock @@ -31,25 +31,16 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] -[[package]] -name = "attrs" -version = "25.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, -] - [[package]] name = "click" -version = "8.3.0" +version = "8.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, ] [[package]] @@ -63,101 +54,101 @@ wheels = [ [[package]] name = "coverage" -version = "7.11.0" +version = "7.13.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/38/ee22495420457259d2f3390309505ea98f98a5eed40901cf62196abad006/coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050", size = 811905, upload-time = "2025-10-15T15:15:08.542Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ad/49/349848445b0e53660e258acbcc9b0d014895b6739237920886672240f84b/coverage-7.13.2.tar.gz", hash = "sha256:044c6951ec37146b72a50cc81ef02217d27d4c3640efd2640311393cbbf143d3", size = 826523, upload-time = "2026-01-25T13:00:04.889Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/95/c49df0aceb5507a80b9fe5172d3d39bf23f05be40c23c8d77d556df96cec/coverage-7.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb53f1e8adeeb2e78962bade0c08bfdc461853c7969706ed901821e009b35e31", size = 215800, upload-time = "2025-10-15T15:12:19.824Z" }, - { url = "https://files.pythonhosted.org/packages/dc/c6/7bb46ce01ed634fff1d7bb53a54049f539971862cc388b304ff3c51b4f66/coverage-7.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9a03ec6cb9f40a5c360f138b88266fd8f58408d71e89f536b4f91d85721d075", size = 216198, upload-time = "2025-10-15T15:12:22.549Z" }, - { url = "https://files.pythonhosted.org/packages/94/b2/75d9d8fbf2900268aca5de29cd0a0fe671b0f69ef88be16767cc3c828b85/coverage-7.11.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0d7f0616c557cbc3d1c2090334eddcbb70e1ae3a40b07222d62b3aa47f608fab", size = 242953, upload-time = "2025-10-15T15:12:24.139Z" }, - { url = "https://files.pythonhosted.org/packages/65/ac/acaa984c18f440170525a8743eb4b6c960ace2dbad80dc22056a437fc3c6/coverage-7.11.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e44a86a47bbdf83b0a3ea4d7df5410d6b1a0de984fbd805fa5101f3624b9abe0", size = 244766, upload-time = "2025-10-15T15:12:25.974Z" }, - { url = "https://files.pythonhosted.org/packages/d8/0d/938d0bff76dfa4a6b228c3fc4b3e1c0e2ad4aa6200c141fcda2bd1170227/coverage-7.11.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:596763d2f9a0ee7eec6e643e29660def2eef297e1de0d334c78c08706f1cb785", size = 246625, upload-time = "2025-10-15T15:12:27.387Z" }, - { url = "https://files.pythonhosted.org/packages/38/54/8f5f5e84bfa268df98f46b2cb396b1009734cfb1e5d6adb663d284893b32/coverage-7.11.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ef55537ff511b5e0a43edb4c50a7bf7ba1c3eea20b4f49b1490f1e8e0e42c591", size = 243568, upload-time = "2025-10-15T15:12:28.799Z" }, - { url = "https://files.pythonhosted.org/packages/68/30/8ba337c2877fe3f2e1af0ed7ff4be0c0c4aca44d6f4007040f3ca2255e99/coverage-7.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cbabd8f4d0d3dc571d77ae5bdbfa6afe5061e679a9d74b6797c48d143307088", size = 244665, upload-time = "2025-10-15T15:12:30.297Z" }, - { url = "https://files.pythonhosted.org/packages/cc/fb/c6f1d6d9a665536b7dde2333346f0cc41dc6a60bd1ffc10cd5c33e7eb000/coverage-7.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e24045453384e0ae2a587d562df2a04d852672eb63051d16096d3f08aa4c7c2f", size = 242681, upload-time = "2025-10-15T15:12:32.326Z" }, - { url = "https://files.pythonhosted.org/packages/be/38/1b532319af5f991fa153c20373291dc65c2bf532af7dbcffdeef745c8f79/coverage-7.11.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:7161edd3426c8d19bdccde7d49e6f27f748f3c31cc350c5de7c633fea445d866", size = 242912, upload-time = "2025-10-15T15:12:34.079Z" }, - { url = "https://files.pythonhosted.org/packages/67/3d/f39331c60ef6050d2a861dc1b514fa78f85f792820b68e8c04196ad733d6/coverage-7.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d4ed4de17e692ba6415b0587bc7f12bc80915031fc9db46a23ce70fc88c9841", size = 243559, upload-time = "2025-10-15T15:12:35.809Z" }, - { url = "https://files.pythonhosted.org/packages/4b/55/cb7c9df9d0495036ce582a8a2958d50c23cd73f84a23284bc23bd4711a6f/coverage-7.11.0-cp310-cp310-win32.whl", hash = "sha256:765c0bc8fe46f48e341ef737c91c715bd2a53a12792592296a095f0c237e09cf", size = 218266, upload-time = "2025-10-15T15:12:37.429Z" }, - { url = "https://files.pythonhosted.org/packages/68/a8/b79cb275fa7bd0208767f89d57a1b5f6ba830813875738599741b97c2e04/coverage-7.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:24d6f3128f1b2d20d84b24f4074475457faedc3d4613a7e66b5e769939c7d969", size = 219169, upload-time = "2025-10-15T15:12:39.25Z" }, - { url = "https://files.pythonhosted.org/packages/49/3a/ee1074c15c408ddddddb1db7dd904f6b81bc524e01f5a1c5920e13dbde23/coverage-7.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d58ecaa865c5b9fa56e35efc51d1014d4c0d22838815b9fce57a27dd9576847", size = 215912, upload-time = "2025-10-15T15:12:40.665Z" }, - { url = "https://files.pythonhosted.org/packages/70/c4/9f44bebe5cb15f31608597b037d78799cc5f450044465bcd1ae8cb222fe1/coverage-7.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b679e171f1c104a5668550ada700e3c4937110dbdd153b7ef9055c4f1a1ee3cc", size = 216310, upload-time = "2025-10-15T15:12:42.461Z" }, - { url = "https://files.pythonhosted.org/packages/42/01/5e06077cfef92d8af926bdd86b84fb28bf9bc6ad27343d68be9b501d89f2/coverage-7.11.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca61691ba8c5b6797deb221a0d09d7470364733ea9c69425a640f1f01b7c5bf0", size = 246706, upload-time = "2025-10-15T15:12:44.001Z" }, - { url = "https://files.pythonhosted.org/packages/40/b8/7a3f1f33b35cc4a6c37e759137533119560d06c0cc14753d1a803be0cd4a/coverage-7.11.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aef1747ede4bd8ca9cfc04cc3011516500c6891f1b33a94add3253f6f876b7b7", size = 248634, upload-time = "2025-10-15T15:12:45.768Z" }, - { url = "https://files.pythonhosted.org/packages/7a/41/7f987eb33de386bc4c665ab0bf98d15fcf203369d6aacae74f5dd8ec489a/coverage-7.11.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1839d08406e4cba2953dcc0ffb312252f14d7c4c96919f70167611f4dee2623", size = 250741, upload-time = "2025-10-15T15:12:47.222Z" }, - { url = "https://files.pythonhosted.org/packages/23/c1/a4e0ca6a4e83069fb8216b49b30a7352061ca0cb38654bd2dc96b7b3b7da/coverage-7.11.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0eb0a2dcc62478eb5b4cbb80b97bdee852d7e280b90e81f11b407d0b81c4287", size = 246837, upload-time = "2025-10-15T15:12:48.904Z" }, - { url = "https://files.pythonhosted.org/packages/5d/03/ced062a17f7c38b4728ff76c3acb40d8465634b20b4833cdb3cc3a74e115/coverage-7.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc1fbea96343b53f65d5351d8fd3b34fd415a2670d7c300b06d3e14a5af4f552", size = 248429, upload-time = "2025-10-15T15:12:50.73Z" }, - { url = "https://files.pythonhosted.org/packages/97/af/a7c6f194bb8c5a2705ae019036b8fe7f49ea818d638eedb15fdb7bed227c/coverage-7.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:214b622259dd0cf435f10241f1333d32caa64dbc27f8790ab693428a141723de", size = 246490, upload-time = "2025-10-15T15:12:52.646Z" }, - { url = "https://files.pythonhosted.org/packages/ab/c3/aab4df02b04a8fde79068c3c41ad7a622b0ef2b12e1ed154da986a727c3f/coverage-7.11.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:258d9967520cca899695d4eb7ea38be03f06951d6ca2f21fb48b1235f791e601", size = 246208, upload-time = "2025-10-15T15:12:54.586Z" }, - { url = "https://files.pythonhosted.org/packages/30/d8/e282ec19cd658238d60ed404f99ef2e45eed52e81b866ab1518c0d4163cf/coverage-7.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cf9e6ff4ca908ca15c157c409d608da77a56a09877b97c889b98fb2c32b6465e", size = 247126, upload-time = "2025-10-15T15:12:56.485Z" }, - { url = "https://files.pythonhosted.org/packages/d1/17/a635fa07fac23adb1a5451ec756216768c2767efaed2e4331710342a3399/coverage-7.11.0-cp311-cp311-win32.whl", hash = "sha256:fcc15fc462707b0680cff6242c48625da7f9a16a28a41bb8fd7a4280920e676c", size = 218314, upload-time = "2025-10-15T15:12:58.365Z" }, - { url = "https://files.pythonhosted.org/packages/2a/29/2ac1dfcdd4ab9a70026edc8d715ece9b4be9a1653075c658ee6f271f394d/coverage-7.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:865965bf955d92790f1facd64fe7ff73551bd2c1e7e6b26443934e9701ba30b9", size = 219203, upload-time = "2025-10-15T15:12:59.902Z" }, - { url = "https://files.pythonhosted.org/packages/03/21/5ce8b3a0133179115af4c041abf2ee652395837cb896614beb8ce8ddcfd9/coverage-7.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:5693e57a065760dcbeb292d60cc4d0231a6d4b6b6f6a3191561e1d5e8820b745", size = 217879, upload-time = "2025-10-15T15:13:01.35Z" }, - { url = "https://files.pythonhosted.org/packages/c4/db/86f6906a7c7edc1a52b2c6682d6dd9be775d73c0dfe2b84f8923dfea5784/coverage-7.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c49e77811cf9d024b95faf86c3f059b11c0c9be0b0d61bc598f453703bd6fd1", size = 216098, upload-time = "2025-10-15T15:13:02.916Z" }, - { url = "https://files.pythonhosted.org/packages/21/54/e7b26157048c7ba555596aad8569ff903d6cd67867d41b75287323678ede/coverage-7.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a61e37a403a778e2cda2a6a39abcc895f1d984071942a41074b5c7ee31642007", size = 216331, upload-time = "2025-10-15T15:13:04.403Z" }, - { url = "https://files.pythonhosted.org/packages/b9/19/1ce6bf444f858b83a733171306134a0544eaddf1ca8851ede6540a55b2ad/coverage-7.11.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c79cae102bb3b1801e2ef1511fb50e91ec83a1ce466b2c7c25010d884336de46", size = 247825, upload-time = "2025-10-15T15:13:05.92Z" }, - { url = "https://files.pythonhosted.org/packages/71/0b/d3bcbbc259fcced5fb67c5d78f6e7ee965f49760c14afd931e9e663a83b2/coverage-7.11.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16ce17ceb5d211f320b62df002fa7016b7442ea0fd260c11cec8ce7730954893", size = 250573, upload-time = "2025-10-15T15:13:07.471Z" }, - { url = "https://files.pythonhosted.org/packages/58/8d/b0ff3641a320abb047258d36ed1c21d16be33beed4152628331a1baf3365/coverage-7.11.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80027673e9d0bd6aef86134b0771845e2da85755cf686e7c7c59566cf5a89115", size = 251706, upload-time = "2025-10-15T15:13:09.4Z" }, - { url = "https://files.pythonhosted.org/packages/59/c8/5a586fe8c7b0458053d9c687f5cff515a74b66c85931f7fe17a1c958b4ac/coverage-7.11.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d3ffa07a08657306cd2215b0da53761c4d73cb54d9143b9303a6481ec0cd415", size = 248221, upload-time = "2025-10-15T15:13:10.964Z" }, - { url = "https://files.pythonhosted.org/packages/d0/ff/3a25e3132804ba44cfa9a778cdf2b73dbbe63ef4b0945e39602fc896ba52/coverage-7.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a3b6a5f8b2524fd6c1066bc85bfd97e78709bb5e37b5b94911a6506b65f47186", size = 249624, upload-time = "2025-10-15T15:13:12.5Z" }, - { url = "https://files.pythonhosted.org/packages/c5/12/ff10c8ce3895e1b17a73485ea79ebc1896a9e466a9d0f4aef63e0d17b718/coverage-7.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fcc0a4aa589de34bc56e1a80a740ee0f8c47611bdfb28cd1849de60660f3799d", size = 247744, upload-time = "2025-10-15T15:13:14.554Z" }, - { url = "https://files.pythonhosted.org/packages/16/02/d500b91f5471b2975947e0629b8980e5e90786fe316b6d7299852c1d793d/coverage-7.11.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dba82204769d78c3fd31b35c3d5f46e06511936c5019c39f98320e05b08f794d", size = 247325, upload-time = "2025-10-15T15:13:16.438Z" }, - { url = "https://files.pythonhosted.org/packages/77/11/dee0284fbbd9cd64cfce806b827452c6df3f100d9e66188e82dfe771d4af/coverage-7.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81b335f03ba67309a95210caf3eb43bd6fe75a4e22ba653ef97b4696c56c7ec2", size = 249180, upload-time = "2025-10-15T15:13:17.959Z" }, - { url = "https://files.pythonhosted.org/packages/59/1b/cdf1def928f0a150a057cab03286774e73e29c2395f0d30ce3d9e9f8e697/coverage-7.11.0-cp312-cp312-win32.whl", hash = "sha256:037b2d064c2f8cc8716fe4d39cb705779af3fbf1ba318dc96a1af858888c7bb5", size = 218479, upload-time = "2025-10-15T15:13:19.608Z" }, - { url = "https://files.pythonhosted.org/packages/ff/55/e5884d55e031da9c15b94b90a23beccc9d6beee65e9835cd6da0a79e4f3a/coverage-7.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d66c0104aec3b75e5fd897e7940188ea1892ca1d0235316bf89286d6a22568c0", size = 219290, upload-time = "2025-10-15T15:13:21.593Z" }, - { url = "https://files.pythonhosted.org/packages/23/a8/faa930cfc71c1d16bc78f9a19bb73700464f9c331d9e547bfbc1dbd3a108/coverage-7.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:d91ebeac603812a09cf6a886ba6e464f3bbb367411904ae3790dfe28311b15ad", size = 217924, upload-time = "2025-10-15T15:13:23.39Z" }, - { url = "https://files.pythonhosted.org/packages/60/7f/85e4dfe65e400645464b25c036a26ac226cf3a69d4a50c3934c532491cdd/coverage-7.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc3f49e65ea6e0d5d9bd60368684fe52a704d46f9e7fc413918f18d046ec40e1", size = 216129, upload-time = "2025-10-15T15:13:25.371Z" }, - { url = "https://files.pythonhosted.org/packages/96/5d/dc5fa98fea3c175caf9d360649cb1aa3715e391ab00dc78c4c66fabd7356/coverage-7.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f39ae2f63f37472c17b4990f794035c9890418b1b8cca75c01193f3c8d3e01be", size = 216380, upload-time = "2025-10-15T15:13:26.976Z" }, - { url = "https://files.pythonhosted.org/packages/b2/f5/3da9cc9596708273385189289c0e4d8197d37a386bdf17619013554b3447/coverage-7.11.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7db53b5cdd2917b6eaadd0b1251cf4e7d96f4a8d24e174bdbdf2f65b5ea7994d", size = 247375, upload-time = "2025-10-15T15:13:28.923Z" }, - { url = "https://files.pythonhosted.org/packages/65/6c/f7f59c342359a235559d2bc76b0c73cfc4bac7d61bb0df210965cb1ecffd/coverage-7.11.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10ad04ac3a122048688387828b4537bc9cf60c0bf4869c1e9989c46e45690b82", size = 249978, upload-time = "2025-10-15T15:13:30.525Z" }, - { url = "https://files.pythonhosted.org/packages/e7/8c/042dede2e23525e863bf1ccd2b92689692a148d8b5fd37c37899ba882645/coverage-7.11.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4036cc9c7983a2b1f2556d574d2eb2154ac6ed55114761685657e38782b23f52", size = 251253, upload-time = "2025-10-15T15:13:32.174Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a9/3c58df67bfa809a7bddd786356d9c5283e45d693edb5f3f55d0986dd905a/coverage-7.11.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ab934dd13b1c5e94b692b1e01bd87e4488cb746e3a50f798cb9464fd128374b", size = 247591, upload-time = "2025-10-15T15:13:34.147Z" }, - { url = "https://files.pythonhosted.org/packages/26/5b/c7f32efd862ee0477a18c41e4761305de6ddd2d49cdeda0c1116227570fd/coverage-7.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59a6e5a265f7cfc05f76e3bb53eca2e0dfe90f05e07e849930fecd6abb8f40b4", size = 249411, upload-time = "2025-10-15T15:13:38.425Z" }, - { url = "https://files.pythonhosted.org/packages/76/b5/78cb4f1e86c1611431c990423ec0768122905b03837e1b4c6a6f388a858b/coverage-7.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df01d6c4c81e15a7c88337b795bb7595a8596e92310266b5072c7e301168efbd", size = 247303, upload-time = "2025-10-15T15:13:40.464Z" }, - { url = "https://files.pythonhosted.org/packages/87/c9/23c753a8641a330f45f221286e707c427e46d0ffd1719b080cedc984ec40/coverage-7.11.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8c934bd088eed6174210942761e38ee81d28c46de0132ebb1801dbe36a390dcc", size = 247157, upload-time = "2025-10-15T15:13:42.087Z" }, - { url = "https://files.pythonhosted.org/packages/c5/42/6e0cc71dc8a464486e944a4fa0d85bdec031cc2969e98ed41532a98336b9/coverage-7.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a03eaf7ec24078ad64a07f02e30060aaf22b91dedf31a6b24d0d98d2bba7f48", size = 248921, upload-time = "2025-10-15T15:13:43.715Z" }, - { url = "https://files.pythonhosted.org/packages/e8/1c/743c2ef665e6858cccb0f84377dfe3a4c25add51e8c7ef19249be92465b6/coverage-7.11.0-cp313-cp313-win32.whl", hash = "sha256:695340f698a5f56f795b2836abe6fb576e7c53d48cd155ad2f80fd24bc63a040", size = 218526, upload-time = "2025-10-15T15:13:45.336Z" }, - { url = "https://files.pythonhosted.org/packages/ff/d5/226daadfd1bf8ddbccefbd3aa3547d7b960fb48e1bdac124e2dd13a2b71a/coverage-7.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2727d47fce3ee2bac648528e41455d1b0c46395a087a229deac75e9f88ba5a05", size = 219317, upload-time = "2025-10-15T15:13:47.401Z" }, - { url = "https://files.pythonhosted.org/packages/97/54/47db81dcbe571a48a298f206183ba8a7ba79200a37cd0d9f4788fcd2af4a/coverage-7.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:0efa742f431529699712b92ecdf22de8ff198df41e43aeaaadf69973eb93f17a", size = 217948, upload-time = "2025-10-15T15:13:49.096Z" }, - { url = "https://files.pythonhosted.org/packages/e5/8b/cb68425420154e7e2a82fd779a8cc01549b6fa83c2ad3679cd6c088ebd07/coverage-7.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:587c38849b853b157706407e9ebdca8fd12f45869edb56defbef2daa5fb0812b", size = 216837, upload-time = "2025-10-15T15:13:51.09Z" }, - { url = "https://files.pythonhosted.org/packages/33/55/9d61b5765a025685e14659c8d07037247de6383c0385757544ffe4606475/coverage-7.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b971bdefdd75096163dd4261c74be813c4508477e39ff7b92191dea19f24cd37", size = 217061, upload-time = "2025-10-15T15:13:52.747Z" }, - { url = "https://files.pythonhosted.org/packages/52/85/292459c9186d70dcec6538f06ea251bc968046922497377bf4a1dc9a71de/coverage-7.11.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:269bfe913b7d5be12ab13a95f3a76da23cf147be7fa043933320ba5625f0a8de", size = 258398, upload-time = "2025-10-15T15:13:54.45Z" }, - { url = "https://files.pythonhosted.org/packages/1f/e2/46edd73fb8bf51446c41148d81944c54ed224854812b6ca549be25113ee0/coverage-7.11.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dadbcce51a10c07b7c72b0ce4a25e4b6dcb0c0372846afb8e5b6307a121eb99f", size = 260574, upload-time = "2025-10-15T15:13:56.145Z" }, - { url = "https://files.pythonhosted.org/packages/07/5e/1df469a19007ff82e2ca8fe509822820a31e251f80ee7344c34f6cd2ec43/coverage-7.11.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ed43fa22c6436f7957df036331f8fe4efa7af132054e1844918866cd228af6c", size = 262797, upload-time = "2025-10-15T15:13:58.635Z" }, - { url = "https://files.pythonhosted.org/packages/f9/50/de216b31a1434b94d9b34a964c09943c6be45069ec704bfc379d8d89a649/coverage-7.11.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9516add7256b6713ec08359b7b05aeff8850c98d357784c7205b2e60aa2513fa", size = 257361, upload-time = "2025-10-15T15:14:00.409Z" }, - { url = "https://files.pythonhosted.org/packages/82/1e/3f9f8344a48111e152e0fd495b6fff13cc743e771a6050abf1627a7ba918/coverage-7.11.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb92e47c92fcbcdc692f428da67db33337fa213756f7adb6a011f7b5a7a20740", size = 260349, upload-time = "2025-10-15T15:14:02.188Z" }, - { url = "https://files.pythonhosted.org/packages/65/9b/3f52741f9e7d82124272f3070bbe316006a7de1bad1093f88d59bfc6c548/coverage-7.11.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d06f4fc7acf3cabd6d74941d53329e06bab00a8fe10e4df2714f0b134bfc64ef", size = 258114, upload-time = "2025-10-15T15:14:03.907Z" }, - { url = "https://files.pythonhosted.org/packages/0b/8b/918f0e15f0365d50d3986bbd3338ca01178717ac5678301f3f547b6619e6/coverage-7.11.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:6fbcee1a8f056af07ecd344482f711f563a9eb1c2cad192e87df00338ec3cdb0", size = 256723, upload-time = "2025-10-15T15:14:06.324Z" }, - { url = "https://files.pythonhosted.org/packages/44/9e/7776829f82d3cf630878a7965a7d70cc6ca94f22c7d20ec4944f7148cb46/coverage-7.11.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbbf012be5f32533a490709ad597ad8a8ff80c582a95adc8d62af664e532f9ca", size = 259238, upload-time = "2025-10-15T15:14:08.002Z" }, - { url = "https://files.pythonhosted.org/packages/9a/b8/49cf253e1e7a3bedb85199b201862dd7ca4859f75b6cf25ffa7298aa0760/coverage-7.11.0-cp313-cp313t-win32.whl", hash = "sha256:cee6291bb4fed184f1c2b663606a115c743df98a537c969c3c64b49989da96c2", size = 219180, upload-time = "2025-10-15T15:14:09.786Z" }, - { url = "https://files.pythonhosted.org/packages/ac/e1/1a541703826be7ae2125a0fb7f821af5729d56bb71e946e7b933cc7a89a4/coverage-7.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a386c1061bf98e7ea4758e4313c0ab5ecf57af341ef0f43a0bf26c2477b5c268", size = 220241, upload-time = "2025-10-15T15:14:11.471Z" }, - { url = "https://files.pythonhosted.org/packages/d5/d1/5ee0e0a08621140fd418ec4020f595b4d52d7eb429ae6a0c6542b4ba6f14/coverage-7.11.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f9ea02ef40bb83823b2b04964459d281688fe173e20643870bb5d2edf68bc836", size = 218510, upload-time = "2025-10-15T15:14:13.46Z" }, - { url = "https://files.pythonhosted.org/packages/f4/06/e923830c1985ce808e40a3fa3eb46c13350b3224b7da59757d37b6ce12b8/coverage-7.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c770885b28fb399aaf2a65bbd1c12bf6f307ffd112d6a76c5231a94276f0c497", size = 216110, upload-time = "2025-10-15T15:14:15.157Z" }, - { url = "https://files.pythonhosted.org/packages/42/82/cdeed03bfead45203fb651ed756dfb5266028f5f939e7f06efac4041dad5/coverage-7.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a3d0e2087dba64c86a6b254f43e12d264b636a39e88c5cc0a01a7c71bcfdab7e", size = 216395, upload-time = "2025-10-15T15:14:16.863Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ba/e1c80caffc3199aa699813f73ff097bc2df7b31642bdbc7493600a8f1de5/coverage-7.11.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73feb83bb41c32811973b8565f3705caf01d928d972b72042b44e97c71fd70d1", size = 247433, upload-time = "2025-10-15T15:14:18.589Z" }, - { url = "https://files.pythonhosted.org/packages/80/c0/5b259b029694ce0a5bbc1548834c7ba3db41d3efd3474489d7efce4ceb18/coverage-7.11.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c6f31f281012235ad08f9a560976cc2fc9c95c17604ff3ab20120fe480169bca", size = 249970, upload-time = "2025-10-15T15:14:20.307Z" }, - { url = "https://files.pythonhosted.org/packages/8c/86/171b2b5e1aac7e2fd9b43f7158b987dbeb95f06d1fbecad54ad8163ae3e8/coverage-7.11.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9570ad567f880ef675673992222746a124b9595506826b210fbe0ce3f0499cd", size = 251324, upload-time = "2025-10-15T15:14:22.419Z" }, - { url = "https://files.pythonhosted.org/packages/1a/7e/7e10414d343385b92024af3932a27a1caf75c6e27ee88ba211221ff1a145/coverage-7.11.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8badf70446042553a773547a61fecaa734b55dc738cacf20c56ab04b77425e43", size = 247445, upload-time = "2025-10-15T15:14:24.205Z" }, - { url = "https://files.pythonhosted.org/packages/c4/3b/e4f966b21f5be8c4bf86ad75ae94efa0de4c99c7bbb8114476323102e345/coverage-7.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a09c1211959903a479e389685b7feb8a17f59ec5a4ef9afde7650bd5eabc2777", size = 249324, upload-time = "2025-10-15T15:14:26.234Z" }, - { url = "https://files.pythonhosted.org/packages/00/a2/8479325576dfcd909244d0df215f077f47437ab852ab778cfa2f8bf4d954/coverage-7.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:5ef83b107f50db3f9ae40f69e34b3bd9337456c5a7fe3461c7abf8b75dd666a2", size = 247261, upload-time = "2025-10-15T15:14:28.42Z" }, - { url = "https://files.pythonhosted.org/packages/7b/d8/3a9e2db19d94d65771d0f2e21a9ea587d11b831332a73622f901157cc24b/coverage-7.11.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f91f927a3215b8907e214af77200250bb6aae36eca3f760f89780d13e495388d", size = 247092, upload-time = "2025-10-15T15:14:30.784Z" }, - { url = "https://files.pythonhosted.org/packages/b3/b1/bbca3c472544f9e2ad2d5116b2379732957048be4b93a9c543fcd0207e5f/coverage-7.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbcd376716d6b7fbfeedd687a6c4be019c5a5671b35f804ba76a4c0a778cba4", size = 248755, upload-time = "2025-10-15T15:14:32.585Z" }, - { url = "https://files.pythonhosted.org/packages/89/49/638d5a45a6a0f00af53d6b637c87007eb2297042186334e9923a61aa8854/coverage-7.11.0-cp314-cp314-win32.whl", hash = "sha256:bab7ec4bb501743edc63609320aaec8cd9188b396354f482f4de4d40a9d10721", size = 218793, upload-time = "2025-10-15T15:14:34.972Z" }, - { url = "https://files.pythonhosted.org/packages/30/cc/b675a51f2d068adb3cdf3799212c662239b0ca27f4691d1fff81b92ea850/coverage-7.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d4ba9a449e9364a936a27322b20d32d8b166553bfe63059bd21527e681e2fad", size = 219587, upload-time = "2025-10-15T15:14:37.047Z" }, - { url = "https://files.pythonhosted.org/packages/93/98/5ac886876026de04f00820e5094fe22166b98dcb8b426bf6827aaf67048c/coverage-7.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:ce37f215223af94ef0f75ac68ea096f9f8e8c8ec7d6e8c346ee45c0d363f0479", size = 218168, upload-time = "2025-10-15T15:14:38.861Z" }, - { url = "https://files.pythonhosted.org/packages/14/d1/b4145d35b3e3ecf4d917e97fc8895bcf027d854879ba401d9ff0f533f997/coverage-7.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f413ce6e07e0d0dc9c433228727b619871532674b45165abafe201f200cc215f", size = 216850, upload-time = "2025-10-15T15:14:40.651Z" }, - { url = "https://files.pythonhosted.org/packages/ca/d1/7f645fc2eccd318369a8a9948acc447bb7c1ade2911e31d3c5620544c22b/coverage-7.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05791e528a18f7072bf5998ba772fe29db4da1234c45c2087866b5ba4dea710e", size = 217071, upload-time = "2025-10-15T15:14:42.755Z" }, - { url = "https://files.pythonhosted.org/packages/54/7d/64d124649db2737ceced1dfcbdcb79898d5868d311730f622f8ecae84250/coverage-7.11.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cacb29f420cfeb9283b803263c3b9a068924474ff19ca126ba9103e1278dfa44", size = 258570, upload-time = "2025-10-15T15:14:44.542Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3f/6f5922f80dc6f2d8b2c6f974835c43f53eb4257a7797727e6ca5b7b2ec1f/coverage-7.11.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314c24e700d7027ae3ab0d95fbf8d53544fca1f20345fd30cd219b737c6e58d3", size = 260738, upload-time = "2025-10-15T15:14:46.436Z" }, - { url = "https://files.pythonhosted.org/packages/0e/5f/9e883523c4647c860b3812b417a2017e361eca5b635ee658387dc11b13c1/coverage-7.11.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:630d0bd7a293ad2fc8b4b94e5758c8b2536fdf36c05f1681270203e463cbfa9b", size = 262994, upload-time = "2025-10-15T15:14:48.3Z" }, - { url = "https://files.pythonhosted.org/packages/07/bb/43b5a8e94c09c8bf51743ffc65c4c841a4ca5d3ed191d0a6919c379a1b83/coverage-7.11.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e89641f5175d65e2dbb44db15fe4ea48fade5d5bbb9868fdc2b4fce22f4a469d", size = 257282, upload-time = "2025-10-15T15:14:50.236Z" }, - { url = "https://files.pythonhosted.org/packages/aa/e5/0ead8af411411330b928733e1d201384b39251a5f043c1612970310e8283/coverage-7.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c9f08ea03114a637dab06cedb2e914da9dc67fa52c6015c018ff43fdde25b9c2", size = 260430, upload-time = "2025-10-15T15:14:52.413Z" }, - { url = "https://files.pythonhosted.org/packages/ae/66/03dd8bb0ba5b971620dcaac145461950f6d8204953e535d2b20c6b65d729/coverage-7.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce9f3bde4e9b031eaf1eb61df95c1401427029ea1bfddb8621c1161dcb0fa02e", size = 258190, upload-time = "2025-10-15T15:14:54.268Z" }, - { url = "https://files.pythonhosted.org/packages/45/ae/28a9cce40bf3174426cb2f7e71ee172d98e7f6446dff936a7ccecee34b14/coverage-7.11.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e4dc07e95495923d6fd4d6c27bf70769425b71c89053083843fd78f378558996", size = 256658, upload-time = "2025-10-15T15:14:56.436Z" }, - { url = "https://files.pythonhosted.org/packages/5c/7c/3a44234a8599513684bfc8684878fd7b126c2760f79712bb78c56f19efc4/coverage-7.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:424538266794db2861db4922b05d729ade0940ee69dcf0591ce8f69784db0e11", size = 259342, upload-time = "2025-10-15T15:14:58.538Z" }, - { url = "https://files.pythonhosted.org/packages/e1/e6/0108519cba871af0351725ebdb8660fd7a0fe2ba3850d56d32490c7d9b4b/coverage-7.11.0-cp314-cp314t-win32.whl", hash = "sha256:4c1eeb3fb8eb9e0190bebafd0462936f75717687117339f708f395fe455acc73", size = 219568, upload-time = "2025-10-15T15:15:00.382Z" }, - { url = "https://files.pythonhosted.org/packages/c9/76/44ba876e0942b4e62fdde23ccb029ddb16d19ba1bef081edd00857ba0b16/coverage-7.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b56efee146c98dbf2cf5cffc61b9829d1e94442df4d7398b26892a53992d3547", size = 220687, upload-time = "2025-10-15T15:15:02.322Z" }, - { url = "https://files.pythonhosted.org/packages/b9/0c/0df55ecb20d0d0ed5c322e10a441775e1a3a5d78c60f0c4e1abfe6fcf949/coverage-7.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b5c2705afa83f49bd91962a4094b6b082f94aef7626365ab3f8f4bd159c5acf3", size = 218711, upload-time = "2025-10-15T15:15:04.575Z" }, - { url = "https://files.pythonhosted.org/packages/5f/04/642c1d8a448ae5ea1369eac8495740a79eb4e581a9fb0cbdce56bbf56da1/coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68", size = 207761, upload-time = "2025-10-15T15:15:06.439Z" }, + { url = "https://files.pythonhosted.org/packages/a4/2d/63e37369c8e81a643afe54f76073b020f7b97ddbe698c5c944b51b0a2bc5/coverage-7.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4af3b01763909f477ea17c962e2cca8f39b350a4e46e3a30838b2c12e31b81b", size = 218842, upload-time = "2026-01-25T12:57:15.3Z" }, + { url = "https://files.pythonhosted.org/packages/57/06/86ce882a8d58cbcb3030e298788988e618da35420d16a8c66dac34f138d0/coverage-7.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:36393bd2841fa0b59498f75466ee9bdec4f770d3254f031f23e8fd8e140ffdd2", size = 219360, upload-time = "2026-01-25T12:57:17.572Z" }, + { url = "https://files.pythonhosted.org/packages/cd/84/70b0eb1ee19ca4ef559c559054c59e5b2ae4ec9af61398670189e5d276e9/coverage-7.13.2-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9cc7573518b7e2186bd229b1a0fe24a807273798832c27032c4510f47ffdb896", size = 246123, upload-time = "2026-01-25T12:57:19.087Z" }, + { url = "https://files.pythonhosted.org/packages/35/fb/05b9830c2e8275ebc031e0019387cda99113e62bb500ab328bb72578183b/coverage-7.13.2-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ca9566769b69a5e216a4e176d54b9df88f29d750c5b78dbb899e379b4e14b30c", size = 247930, upload-time = "2026-01-25T12:57:20.929Z" }, + { url = "https://files.pythonhosted.org/packages/81/aa/3f37858ca2eed4f09b10ca3c6ddc9041be0a475626cd7fd2712f4a2d526f/coverage-7.13.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c9bdea644e94fd66d75a6f7e9a97bb822371e1fe7eadae2cacd50fcbc28e4dc", size = 249804, upload-time = "2026-01-25T12:57:22.904Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b3/c904f40c56e60a2d9678a5ee8df3d906d297d15fb8bec5756c3b0a67e2df/coverage-7.13.2-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5bd447332ec4f45838c1ad42268ce21ca87c40deb86eabd59888859b66be22a5", size = 246815, upload-time = "2026-01-25T12:57:24.314Z" }, + { url = "https://files.pythonhosted.org/packages/41/91/ddc1c5394ca7fd086342486440bfdd6b9e9bda512bf774599c7c7a0081e0/coverage-7.13.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7c79ad5c28a16a1277e1187cf83ea8dafdcc689a784228a7d390f19776db7c31", size = 247843, upload-time = "2026-01-25T12:57:26.544Z" }, + { url = "https://files.pythonhosted.org/packages/87/d2/cdff8f4cd33697883c224ea8e003e9c77c0f1a837dc41d95a94dd26aad67/coverage-7.13.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:76e06ccacd1fb6ada5d076ed98a8c6f66e2e6acd3df02819e2ee29fd637b76ad", size = 245850, upload-time = "2026-01-25T12:57:28.507Z" }, + { url = "https://files.pythonhosted.org/packages/f5/42/e837febb7866bf2553ab53dd62ed52f9bb36d60c7e017c55376ad21fbb05/coverage-7.13.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:49d49e9a5e9f4dc3d3dac95278a020afa6d6bdd41f63608a76fa05a719d5b66f", size = 246116, upload-time = "2026-01-25T12:57:30.16Z" }, + { url = "https://files.pythonhosted.org/packages/09/b1/4a3f935d7df154df02ff4f71af8d61298d713a7ba305d050ae475bfbdde2/coverage-7.13.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ed2bce0e7bfa53f7b0b01c722da289ef6ad4c18ebd52b1f93704c21f116360c8", size = 246720, upload-time = "2026-01-25T12:57:32.165Z" }, + { url = "https://files.pythonhosted.org/packages/e1/fe/538a6fd44c515f1c5197a3f078094cbaf2ce9f945df5b44e29d95c864bff/coverage-7.13.2-cp310-cp310-win32.whl", hash = "sha256:1574983178b35b9af4db4a9f7328a18a14a0a0ce76ffaa1c1bacb4cc82089a7c", size = 221465, upload-time = "2026-01-25T12:57:33.511Z" }, + { url = "https://files.pythonhosted.org/packages/5e/09/4b63a024295f326ec1a40ec8def27799300ce8775b1cbf0d33b1790605c4/coverage-7.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:a360a8baeb038928ceb996f5623a4cd508728f8f13e08d4e96ce161702f3dd99", size = 222397, upload-time = "2026-01-25T12:57:34.927Z" }, + { url = "https://files.pythonhosted.org/packages/6c/01/abca50583a8975bb6e1c59eff67ed8e48bb127c07dad5c28d9e96ccc09ec/coverage-7.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:060ebf6f2c51aff5ba38e1f43a2095e087389b1c69d559fde6049a4b0001320e", size = 218971, upload-time = "2026-01-25T12:57:36.953Z" }, + { url = "https://files.pythonhosted.org/packages/eb/0e/b6489f344d99cd1e5b4d5e1be52dfd3f8a3dc5112aa6c33948da8cabad4e/coverage-7.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1ea8ca9db5e7469cd364552985e15911548ea5b69c48a17291f0cac70484b2e", size = 219473, upload-time = "2026-01-25T12:57:38.934Z" }, + { url = "https://files.pythonhosted.org/packages/17/11/db2f414915a8e4ec53f60b17956c27f21fb68fcf20f8a455ce7c2ccec638/coverage-7.13.2-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b780090d15fd58f07cf2011943e25a5f0c1c894384b13a216b6c86c8a8a7c508", size = 249896, upload-time = "2026-01-25T12:57:40.365Z" }, + { url = "https://files.pythonhosted.org/packages/80/06/0823fe93913663c017e508e8810c998c8ebd3ec2a5a85d2c3754297bdede/coverage-7.13.2-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:88a800258d83acb803c38175b4495d293656d5fac48659c953c18e5f539a274b", size = 251810, upload-time = "2026-01-25T12:57:42.045Z" }, + { url = "https://files.pythonhosted.org/packages/61/dc/b151c3cc41b28cdf7f0166c5fa1271cbc305a8ec0124cce4b04f74791a18/coverage-7.13.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6326e18e9a553e674d948536a04a80d850a5eeefe2aae2e6d7cf05d54046c01b", size = 253920, upload-time = "2026-01-25T12:57:44.026Z" }, + { url = "https://files.pythonhosted.org/packages/2d/35/e83de0556e54a4729a2b94ea816f74ce08732e81945024adee46851c2264/coverage-7.13.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:59562de3f797979e1ff07c587e2ac36ba60ca59d16c211eceaa579c266c5022f", size = 250025, upload-time = "2026-01-25T12:57:45.624Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/af2eb9c3926ce3ea0d58a0d2516fcbdacf7a9fc9559fe63076beaf3f2596/coverage-7.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:27ba1ed6f66b0e2d61bfa78874dffd4f8c3a12f8e2b5410e515ab345ba7bc9c3", size = 251612, upload-time = "2026-01-25T12:57:47.713Z" }, + { url = "https://files.pythonhosted.org/packages/26/62/5be2e25f3d6c711d23b71296f8b44c978d4c8b4e5b26871abfc164297502/coverage-7.13.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8be48da4d47cc68754ce643ea50b3234557cbefe47c2f120495e7bd0a2756f2b", size = 249670, upload-time = "2026-01-25T12:57:49.378Z" }, + { url = "https://files.pythonhosted.org/packages/b3/51/400d1b09a8344199f9b6a6fc1868005d766b7ea95e7882e494fa862ca69c/coverage-7.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2a47a4223d3361b91176aedd9d4e05844ca67d7188456227b6bf5e436630c9a1", size = 249395, upload-time = "2026-01-25T12:57:50.86Z" }, + { url = "https://files.pythonhosted.org/packages/e0/36/f02234bc6e5230e2f0a63fd125d0a2093c73ef20fdf681c7af62a140e4e7/coverage-7.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c6f141b468740197d6bd38f2b26ade124363228cc3f9858bd9924ab059e00059", size = 250298, upload-time = "2026-01-25T12:57:52.287Z" }, + { url = "https://files.pythonhosted.org/packages/b0/06/713110d3dd3151b93611c9cbfc65c15b4156b44f927fced49ac0b20b32a4/coverage-7.13.2-cp311-cp311-win32.whl", hash = "sha256:89567798404af067604246e01a49ef907d112edf2b75ef814b1364d5ce267031", size = 221485, upload-time = "2026-01-25T12:57:53.876Z" }, + { url = "https://files.pythonhosted.org/packages/16/0c/3ae6255fa1ebcb7dec19c9a59e85ef5f34566d1265c70af5b2fc981da834/coverage-7.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:21dd57941804ae2ac7e921771a5e21bbf9aabec317a041d164853ad0a96ce31e", size = 222421, upload-time = "2026-01-25T12:57:55.433Z" }, + { url = "https://files.pythonhosted.org/packages/b5/37/fabc3179af4d61d89ea47bd04333fec735cd5e8b59baad44fed9fc4170d7/coverage-7.13.2-cp311-cp311-win_arm64.whl", hash = "sha256:10758e0586c134a0bafa28f2d37dd2cdb5e4a90de25c0fc0c77dabbad46eca28", size = 221088, upload-time = "2026-01-25T12:57:57.41Z" }, + { url = "https://files.pythonhosted.org/packages/46/39/e92a35f7800222d3f7b2cbb7bbc3b65672ae8d501cb31801b2d2bd7acdf1/coverage-7.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f106b2af193f965d0d3234f3f83fc35278c7fb935dfbde56ae2da3dd2c03b84d", size = 219142, upload-time = "2026-01-25T12:58:00.448Z" }, + { url = "https://files.pythonhosted.org/packages/45/7a/8bf9e9309c4c996e65c52a7c5a112707ecdd9fbaf49e10b5a705a402bbb4/coverage-7.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f45d21dc4d5d6bd29323f0320089ef7eae16e4bef712dff79d184fa7330af3", size = 219503, upload-time = "2026-01-25T12:58:02.451Z" }, + { url = "https://files.pythonhosted.org/packages/87/93/17661e06b7b37580923f3f12406ac91d78aeed293fb6da0b69cc7957582f/coverage-7.13.2-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:fae91dfecd816444c74531a9c3d6ded17a504767e97aa674d44f638107265b99", size = 251006, upload-time = "2026-01-25T12:58:04.059Z" }, + { url = "https://files.pythonhosted.org/packages/12/f0/f9e59fb8c310171497f379e25db060abef9fa605e09d63157eebec102676/coverage-7.13.2-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:264657171406c114787b441484de620e03d8f7202f113d62fcd3d9688baa3e6f", size = 253750, upload-time = "2026-01-25T12:58:05.574Z" }, + { url = "https://files.pythonhosted.org/packages/e5/b1/1935e31add2232663cf7edd8269548b122a7d100047ff93475dbaaae673e/coverage-7.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae47d8dcd3ded0155afbb59c62bd8ab07ea0fd4902e1c40567439e6db9dcaf2f", size = 254862, upload-time = "2026-01-25T12:58:07.647Z" }, + { url = "https://files.pythonhosted.org/packages/af/59/b5e97071ec13df5f45da2b3391b6cdbec78ba20757bc92580a5b3d5fa53c/coverage-7.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8a0b33e9fd838220b007ce8f299114d406c1e8edb21336af4c97a26ecfd185aa", size = 251420, upload-time = "2026-01-25T12:58:09.309Z" }, + { url = "https://files.pythonhosted.org/packages/3f/75/9495932f87469d013dc515fb0ce1aac5fa97766f38f6b1a1deb1ee7b7f3a/coverage-7.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b3becbea7f3ce9a2d4d430f223ec15888e4deb31395840a79e916368d6004cce", size = 252786, upload-time = "2026-01-25T12:58:10.909Z" }, + { url = "https://files.pythonhosted.org/packages/6a/59/af550721f0eb62f46f7b8cb7e6f1860592189267b1c411a4e3a057caacee/coverage-7.13.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f819c727a6e6eeb8711e4ce63d78c620f69630a2e9d53bc95ca5379f57b6ba94", size = 250928, upload-time = "2026-01-25T12:58:12.449Z" }, + { url = "https://files.pythonhosted.org/packages/9b/b1/21b4445709aae500be4ab43bbcfb4e53dc0811c3396dcb11bf9f23fd0226/coverage-7.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:4f7b71757a3ab19f7ba286e04c181004c1d61be921795ee8ba6970fd0ec91da5", size = 250496, upload-time = "2026-01-25T12:58:14.047Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b1/0f5d89dfe0392990e4f3980adbde3eb34885bc1effb2dc369e0bf385e389/coverage-7.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b7fc50d2afd2e6b4f6f2f403b70103d280a8e0cb35320cbbe6debcda02a1030b", size = 252373, upload-time = "2026-01-25T12:58:15.976Z" }, + { url = "https://files.pythonhosted.org/packages/01/c9/0cf1a6a57a9968cc049a6b896693faa523c638a5314b1fc374eb2b2ac904/coverage-7.13.2-cp312-cp312-win32.whl", hash = "sha256:292250282cf9bcf206b543d7608bda17ca6fc151f4cbae949fc7e115112fbd41", size = 221696, upload-time = "2026-01-25T12:58:17.517Z" }, + { url = "https://files.pythonhosted.org/packages/4d/05/d7540bf983f09d32803911afed135524570f8c47bb394bf6206c1dc3a786/coverage-7.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:eeea10169fac01549a7921d27a3e517194ae254b542102267bef7a93ed38c40e", size = 222504, upload-time = "2026-01-25T12:58:19.115Z" }, + { url = "https://files.pythonhosted.org/packages/15/8b/1a9f037a736ced0a12aacf6330cdaad5008081142a7070bc58b0f7930cbc/coverage-7.13.2-cp312-cp312-win_arm64.whl", hash = "sha256:2a5b567f0b635b592c917f96b9a9cb3dbd4c320d03f4bf94e9084e494f2e8894", size = 221120, upload-time = "2026-01-25T12:58:21.334Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f0/3d3eac7568ab6096ff23791a526b0048a1ff3f49d0e236b2af6fb6558e88/coverage-7.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ed75de7d1217cf3b99365d110975f83af0528c849ef5180a12fd91b5064df9d6", size = 219168, upload-time = "2026-01-25T12:58:23.376Z" }, + { url = "https://files.pythonhosted.org/packages/a3/a6/f8b5cfeddbab95fdef4dcd682d82e5dcff7a112ced57a959f89537ee9995/coverage-7.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97e596de8fa9bada4d88fde64a3f4d37f1b6131e4faa32bad7808abc79887ddc", size = 219537, upload-time = "2026-01-25T12:58:24.932Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e6/8d8e6e0c516c838229d1e41cadcec91745f4b1031d4db17ce0043a0423b4/coverage-7.13.2-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:68c86173562ed4413345410c9480a8d64864ac5e54a5cda236748031e094229f", size = 250528, upload-time = "2026-01-25T12:58:26.567Z" }, + { url = "https://files.pythonhosted.org/packages/8e/78/befa6640f74092b86961f957f26504c8fba3d7da57cc2ab7407391870495/coverage-7.13.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7be4d613638d678b2b3773b8f687537b284d7074695a43fe2fbbfc0e31ceaed1", size = 253132, upload-time = "2026-01-25T12:58:28.251Z" }, + { url = "https://files.pythonhosted.org/packages/9d/10/1630db1edd8ce675124a2ee0f7becc603d2bb7b345c2387b4b95c6907094/coverage-7.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d7f63ce526a96acd0e16c4af8b50b64334239550402fb1607ce6a584a6d62ce9", size = 254374, upload-time = "2026-01-25T12:58:30.294Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1d/0d9381647b1e8e6d310ac4140be9c428a0277330991e0c35bdd751e338a4/coverage-7.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:406821f37f864f968e29ac14c3fccae0fec9fdeba48327f0341decf4daf92d7c", size = 250762, upload-time = "2026-01-25T12:58:32.036Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5636dfc9a7c871ee8776af83ee33b4c26bc508ad6cee1e89b6419a366582/coverage-7.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ee68e5a4e3e5443623406b905db447dceddffee0dceb39f4e0cd9ec2a35004b5", size = 252502, upload-time = "2026-01-25T12:58:33.961Z" }, + { url = "https://files.pythonhosted.org/packages/02/2a/7ff2884d79d420cbb2d12fed6fff727b6d0ef27253140d3cdbbd03187ee0/coverage-7.13.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2ee0e58cca0c17dd9c6c1cdde02bb705c7b3fbfa5f3b0b5afeda20d4ebff8ef4", size = 250463, upload-time = "2026-01-25T12:58:35.529Z" }, + { url = "https://files.pythonhosted.org/packages/91/c0/ba51087db645b6c7261570400fc62c89a16278763f36ba618dc8657a187b/coverage-7.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6e5bbb5018bf76a56aabdb64246b5288d5ae1b7d0dd4d0534fe86df2c2992d1c", size = 250288, upload-time = "2026-01-25T12:58:37.226Z" }, + { url = "https://files.pythonhosted.org/packages/03/07/44e6f428551c4d9faf63ebcefe49b30e5c89d1be96f6a3abd86a52da9d15/coverage-7.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a55516c68ef3e08e134e818d5e308ffa6b1337cc8b092b69b24287bf07d38e31", size = 252063, upload-time = "2026-01-25T12:58:38.821Z" }, + { url = "https://files.pythonhosted.org/packages/c2/67/35b730ad7e1859dd57e834d1bc06080d22d2f87457d53f692fce3f24a5a9/coverage-7.13.2-cp313-cp313-win32.whl", hash = "sha256:5b20211c47a8abf4abc3319d8ce2464864fa9f30c5fcaf958a3eed92f4f1fef8", size = 221716, upload-time = "2026-01-25T12:58:40.484Z" }, + { url = "https://files.pythonhosted.org/packages/0d/82/e5fcf5a97c72f45fc14829237a6550bf49d0ab882ac90e04b12a69db76b4/coverage-7.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:14f500232e521201cf031549fb1ebdfc0a40f401cf519157f76c397e586c3beb", size = 222522, upload-time = "2026-01-25T12:58:43.247Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f1/25d7b2f946d239dd2d6644ca2cc060d24f97551e2af13b6c24c722ae5f97/coverage-7.13.2-cp313-cp313-win_arm64.whl", hash = "sha256:9779310cb5a9778a60c899f075a8514c89fa6d10131445c2207fc893e0b14557", size = 221145, upload-time = "2026-01-25T12:58:45Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f7/080376c029c8f76fadfe43911d0daffa0cbdc9f9418a0eead70c56fb7f4b/coverage-7.13.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e64fa5a1e41ce5df6b547cbc3d3699381c9e2c2c369c67837e716ed0f549d48e", size = 219861, upload-time = "2026-01-25T12:58:46.586Z" }, + { url = "https://files.pythonhosted.org/packages/42/11/0b5e315af5ab35f4c4a70e64d3314e4eec25eefc6dec13be3a7d5ffe8ac5/coverage-7.13.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b01899e82a04085b6561eb233fd688474f57455e8ad35cd82286463ba06332b7", size = 220207, upload-time = "2026-01-25T12:58:48.277Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0c/0874d0318fb1062117acbef06a09cf8b63f3060c22265adaad24b36306b7/coverage-7.13.2-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:838943bea48be0e2768b0cf7819544cdedc1bbb2f28427eabb6eb8c9eb2285d3", size = 261504, upload-time = "2026-01-25T12:58:49.904Z" }, + { url = "https://files.pythonhosted.org/packages/83/5e/1cd72c22ecb30751e43a72f40ba50fcef1b7e93e3ea823bd9feda8e51f9a/coverage-7.13.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:93d1d25ec2b27e90bcfef7012992d1f5121b51161b8bffcda756a816cf13c2c3", size = 263582, upload-time = "2026-01-25T12:58:51.582Z" }, + { url = "https://files.pythonhosted.org/packages/9b/da/8acf356707c7a42df4d0657020308e23e5a07397e81492640c186268497c/coverage-7.13.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93b57142f9621b0d12349c43fc7741fe578e4bc914c1e5a54142856cfc0bf421", size = 266008, upload-time = "2026-01-25T12:58:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/41/41/ea1730af99960309423c6ea8d6a4f1fa5564b2d97bd1d29dda4b42611f04/coverage-7.13.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f06799ae1bdfff7ccb8665d75f8291c69110ba9585253de254688aa8a1ccc6c5", size = 260762, upload-time = "2026-01-25T12:58:55.372Z" }, + { url = "https://files.pythonhosted.org/packages/22/fa/02884d2080ba71db64fdc127b311db60e01fe6ba797d9c8363725e39f4d5/coverage-7.13.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f9405ab4f81d490811b1d91c7a20361135a2df4c170e7f0b747a794da5b7f23", size = 263571, upload-time = "2026-01-25T12:58:57.52Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6b/4083aaaeba9b3112f55ac57c2ce7001dc4d8fa3fcc228a39f09cc84ede27/coverage-7.13.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f9ab1d5b86f8fbc97a5b3cd6280a3fd85fef3b028689d8a2c00918f0d82c728c", size = 261200, upload-time = "2026-01-25T12:58:59.255Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d2/aea92fa36d61955e8c416ede9cf9bf142aa196f3aea214bb67f85235a050/coverage-7.13.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:f674f59712d67e841525b99e5e2b595250e39b529c3bda14764e4f625a3fa01f", size = 260095, upload-time = "2026-01-25T12:59:01.066Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ae/04ffe96a80f107ea21b22b2367175c621da920063260a1c22f9452fd7866/coverage-7.13.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c6cadac7b8ace1ba9144feb1ae3cb787a6065ba6d23ffc59a934b16406c26573", size = 262284, upload-time = "2026-01-25T12:59:02.802Z" }, + { url = "https://files.pythonhosted.org/packages/1c/7a/6f354dcd7dfc41297791d6fb4e0d618acb55810bde2c1fd14b3939e05c2b/coverage-7.13.2-cp313-cp313t-win32.whl", hash = "sha256:14ae4146465f8e6e6253eba0cccd57423e598a4cb925958b240c805300918343", size = 222389, upload-time = "2026-01-25T12:59:04.563Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d5/080ad292a4a3d3daf411574be0a1f56d6dee2c4fdf6b005342be9fac807f/coverage-7.13.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9074896edd705a05769e3de0eac0a8388484b503b68863dd06d5e473f874fd47", size = 223450, upload-time = "2026-01-25T12:59:06.677Z" }, + { url = "https://files.pythonhosted.org/packages/88/96/df576fbacc522e9fb8d1c4b7a7fc62eb734be56e2cba1d88d2eabe08ea3f/coverage-7.13.2-cp313-cp313t-win_arm64.whl", hash = "sha256:69e526e14f3f854eda573d3cf40cffd29a1a91c684743d904c33dbdcd0e0f3e7", size = 221707, upload-time = "2026-01-25T12:59:08.363Z" }, + { url = "https://files.pythonhosted.org/packages/55/53/1da9e51a0775634b04fcc11eb25c002fc58ee4f92ce2e8512f94ac5fc5bf/coverage-7.13.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:387a825f43d680e7310e6f325b2167dd093bc8ffd933b83e9aa0983cf6e0a2ef", size = 219213, upload-time = "2026-01-25T12:59:11.909Z" }, + { url = "https://files.pythonhosted.org/packages/46/35/b3caac3ebbd10230fea5a33012b27d19e999a17c9285c4228b4b2e35b7da/coverage-7.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f0d7fea9d8e5d778cd5a9e8fc38308ad688f02040e883cdc13311ef2748cb40f", size = 219549, upload-time = "2026-01-25T12:59:13.638Z" }, + { url = "https://files.pythonhosted.org/packages/76/9c/e1cf7def1bdc72c1907e60703983a588f9558434a2ff94615747bd73c192/coverage-7.13.2-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e080afb413be106c95c4ee96b4fffdc9e2fa56a8bbf90b5c0918e5c4449412f5", size = 250586, upload-time = "2026-01-25T12:59:15.808Z" }, + { url = "https://files.pythonhosted.org/packages/ba/49/f54ec02ed12be66c8d8897270505759e057b0c68564a65c429ccdd1f139e/coverage-7.13.2-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a7fc042ba3c7ce25b8a9f097eb0f32a5ce1ccdb639d9eec114e26def98e1f8a4", size = 253093, upload-time = "2026-01-25T12:59:17.491Z" }, + { url = "https://files.pythonhosted.org/packages/fb/5e/aaf86be3e181d907e23c0f61fccaeb38de8e6f6b47aed92bf57d8fc9c034/coverage-7.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0ba505e021557f7f8173ee8cd6b926373d8653e5ff7581ae2efce1b11ef4c27", size = 254446, upload-time = "2026-01-25T12:59:19.752Z" }, + { url = "https://files.pythonhosted.org/packages/28/c8/a5fa01460e2d75b0c853b392080d6829d3ca8b5ab31e158fa0501bc7c708/coverage-7.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7de326f80e3451bd5cc7239ab46c73ddb658fe0b7649476bc7413572d36cd548", size = 250615, upload-time = "2026-01-25T12:59:21.928Z" }, + { url = "https://files.pythonhosted.org/packages/86/0b/6d56315a55f7062bb66410732c24879ccb2ec527ab6630246de5fe45a1df/coverage-7.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:abaea04f1e7e34841d4a7b343904a3f59481f62f9df39e2cd399d69a187a9660", size = 252452, upload-time = "2026-01-25T12:59:23.592Z" }, + { url = "https://files.pythonhosted.org/packages/30/19/9bc550363ebc6b0ea121977ee44d05ecd1e8bf79018b8444f1028701c563/coverage-7.13.2-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9f93959ee0c604bccd8e0697be21de0887b1f73efcc3aa73a3ec0fd13feace92", size = 250418, upload-time = "2026-01-25T12:59:25.392Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/580530a31ca2f0cc6f07a8f2ab5460785b02bb11bdf815d4c4d37a4c5169/coverage-7.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:13fe81ead04e34e105bf1b3c9f9cdf32ce31736ee5d90a8d2de02b9d3e1bcb82", size = 250231, upload-time = "2026-01-25T12:59:27.888Z" }, + { url = "https://files.pythonhosted.org/packages/e2/42/dd9093f919dc3088cb472893651884bd675e3df3d38a43f9053656dca9a2/coverage-7.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d6d16b0f71120e365741bca2cb473ca6fe38930bc5431c5e850ba949f708f892", size = 251888, upload-time = "2026-01-25T12:59:29.636Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a6/0af4053e6e819774626e133c3d6f70fae4d44884bfc4b126cb647baee8d3/coverage-7.13.2-cp314-cp314-win32.whl", hash = "sha256:9b2f4714bb7d99ba3790ee095b3b4ac94767e1347fe424278a0b10acb3ff04fe", size = 221968, upload-time = "2026-01-25T12:59:31.424Z" }, + { url = "https://files.pythonhosted.org/packages/c4/cc/5aff1e1f80d55862442855517bb8ad8ad3a68639441ff6287dde6a58558b/coverage-7.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:e4121a90823a063d717a96e0a0529c727fb31ea889369a0ee3ec00ed99bf6859", size = 222783, upload-time = "2026-01-25T12:59:33.118Z" }, + { url = "https://files.pythonhosted.org/packages/de/20/09abafb24f84b3292cc658728803416c15b79f9ee5e68d25238a895b07d9/coverage-7.13.2-cp314-cp314-win_arm64.whl", hash = "sha256:6873f0271b4a15a33e7590f338d823f6f66f91ed147a03938d7ce26efd04eee6", size = 221348, upload-time = "2026-01-25T12:59:34.939Z" }, + { url = "https://files.pythonhosted.org/packages/b6/60/a3820c7232db63be060e4019017cd3426751c2699dab3c62819cdbcea387/coverage-7.13.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:f61d349f5b7cd95c34017f1927ee379bfbe9884300d74e07cf630ccf7a610c1b", size = 219950, upload-time = "2026-01-25T12:59:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/fd/37/e4ef5975fdeb86b1e56db9a82f41b032e3d93a840ebaf4064f39e770d5c5/coverage-7.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a43d34ce714f4ca674c0d90beb760eb05aad906f2c47580ccee9da8fe8bfb417", size = 220209, upload-time = "2026-01-25T12:59:38.339Z" }, + { url = "https://files.pythonhosted.org/packages/54/df/d40e091d00c51adca1e251d3b60a8b464112efa3004949e96a74d7c19a64/coverage-7.13.2-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bff1b04cb9d4900ce5c56c4942f047dc7efe57e2608cb7c3c8936e9970ccdbee", size = 261576, upload-time = "2026-01-25T12:59:40.446Z" }, + { url = "https://files.pythonhosted.org/packages/c5/44/5259c4bed54e3392e5c176121af9f71919d96dde853386e7730e705f3520/coverage-7.13.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6ae99e4560963ad8e163e819e5d77d413d331fd00566c1e0856aa252303552c1", size = 263704, upload-time = "2026-01-25T12:59:42.346Z" }, + { url = "https://files.pythonhosted.org/packages/16/bd/ae9f005827abcbe2c70157459ae86053971c9fa14617b63903abbdce26d9/coverage-7.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e79a8c7d461820257d9aa43716c4efc55366d7b292e46b5b37165be1d377405d", size = 266109, upload-time = "2026-01-25T12:59:44.073Z" }, + { url = "https://files.pythonhosted.org/packages/a2/c0/8e279c1c0f5b1eaa3ad9b0fb7a5637fc0379ea7d85a781c0fe0bb3cfc2ab/coverage-7.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:060ee84f6a769d40c492711911a76811b4befb6fba50abb450371abb720f5bd6", size = 260686, upload-time = "2026-01-25T12:59:45.804Z" }, + { url = "https://files.pythonhosted.org/packages/b2/47/3a8112627e9d863e7cddd72894171c929e94491a597811725befdcd76bce/coverage-7.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bca209d001fd03ea2d978f8a4985093240a355c93078aee3f799852c23f561a", size = 263568, upload-time = "2026-01-25T12:59:47.929Z" }, + { url = "https://files.pythonhosted.org/packages/92/bc/7ea367d84afa3120afc3ce6de294fd2dcd33b51e2e7fbe4bbfd200f2cb8c/coverage-7.13.2-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:6b8092aa38d72f091db61ef83cb66076f18f02da3e1a75039a4f218629600e04", size = 261174, upload-time = "2026-01-25T12:59:49.717Z" }, + { url = "https://files.pythonhosted.org/packages/33/b7/f1092dcecb6637e31cc2db099581ee5c61a17647849bae6b8261a2b78430/coverage-7.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:4a3158dc2dcce5200d91ec28cd315c999eebff355437d2765840555d765a6e5f", size = 260017, upload-time = "2026-01-25T12:59:51.463Z" }, + { url = "https://files.pythonhosted.org/packages/2b/cd/f3d07d4b95fbe1a2ef0958c15da614f7e4f557720132de34d2dc3aa7e911/coverage-7.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3973f353b2d70bd9796cc12f532a05945232ccae966456c8ed7034cb96bbfd6f", size = 262337, upload-time = "2026-01-25T12:59:53.407Z" }, + { url = "https://files.pythonhosted.org/packages/e0/db/b0d5b2873a07cb1e06a55d998697c0a5a540dcefbf353774c99eb3874513/coverage-7.13.2-cp314-cp314t-win32.whl", hash = "sha256:79f6506a678a59d4ded048dc72f1859ebede8ec2b9a2d509ebe161f01c2879d3", size = 222749, upload-time = "2026-01-25T12:59:56.316Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2f/838a5394c082ac57d85f57f6aba53093b30d9089781df72412126505716f/coverage-7.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:196bfeabdccc5a020a57d5a368c681e3a6ceb0447d153aeccc1ab4d70a5032ba", size = 223857, upload-time = "2026-01-25T12:59:58.201Z" }, + { url = "https://files.pythonhosted.org/packages/44/d4/b608243e76ead3a4298824b50922b89ef793e50069ce30316a65c1b4d7ef/coverage-7.13.2-cp314-cp314t-win_arm64.whl", hash = "sha256:69269ab58783e090bfbf5b916ab3d188126e22d6070bbfc93098fdd474ef937c", size = 221881, upload-time = "2026-01-25T13:00:00.449Z" }, + { url = "https://files.pythonhosted.org/packages/d2/db/d291e30fdf7ea617a335531e72294e0c723356d7fdde8fba00610a76bda9/coverage-7.13.2-py3-none-any.whl", hash = "sha256:40ce1ea1e25125556d8e76bd0b61500839a07944cc287ac21d5626f3e620cad5", size = 210943, upload-time = "2026-01-25T13:00:02.388Z" }, ] [package.optional-dependencies] @@ -201,14 +192,14 @@ wheels = [ [[package]] name = "exceptiongroup" -version = "1.3.0" +version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, ] [[package]] @@ -222,11 +213,11 @@ wheels = [ [[package]] name = "iniconfig" -version = "2.1.0" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] [[package]] @@ -253,6 +244,79 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/35/5a/73ecb3d82f8615f32ccdadeb9356726d6cae3a4bbc840b437ceb95708063/jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6", size = 30105, upload-time = "2024-11-20T17:58:30.418Z" }, ] +[[package]] +name = "librt" +version = "0.7.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/24/5f3646ff414285e0f7708fa4e946b9bf538345a41d1c375c439467721a5e/librt-0.7.8.tar.gz", hash = "sha256:1a4ede613941d9c3470b0368be851df6bb78ab218635512d0370b27a277a0862", size = 148323, upload-time = "2026-01-14T12:56:16.876Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/13/57b06758a13550c5f09563893b004f98e9537ee6ec67b7df85c3571c8832/librt-0.7.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b45306a1fc5f53c9330fbee134d8b3227fe5da2ab09813b892790400aa49352d", size = 56521, upload-time = "2026-01-14T12:54:40.066Z" }, + { url = "https://files.pythonhosted.org/packages/c2/24/bbea34d1452a10612fb45ac8356f95351ba40c2517e429602160a49d1fd0/librt-0.7.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:864c4b7083eeee250ed55135d2127b260d7eb4b5e953a9e5df09c852e327961b", size = 58456, upload-time = "2026-01-14T12:54:41.471Z" }, + { url = "https://files.pythonhosted.org/packages/04/72/a168808f92253ec3a810beb1eceebc465701197dbc7e865a1c9ceb3c22c7/librt-0.7.8-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6938cc2de153bc927ed8d71c7d2f2ae01b4e96359126c602721340eb7ce1a92d", size = 164392, upload-time = "2026-01-14T12:54:42.843Z" }, + { url = "https://files.pythonhosted.org/packages/14/5c/4c0d406f1b02735c2e7af8ff1ff03a6577b1369b91aa934a9fa2cc42c7ce/librt-0.7.8-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66daa6ac5de4288a5bbfbe55b4caa7bf0cd26b3269c7a476ffe8ce45f837f87d", size = 172959, upload-time = "2026-01-14T12:54:44.602Z" }, + { url = "https://files.pythonhosted.org/packages/82/5f/3e85351c523f73ad8d938989e9a58c7f59fb9c17f761b9981b43f0025ce7/librt-0.7.8-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4864045f49dc9c974dadb942ac56a74cd0479a2aafa51ce272c490a82322ea3c", size = 186717, upload-time = "2026-01-14T12:54:45.986Z" }, + { url = "https://files.pythonhosted.org/packages/08/f8/18bfe092e402d00fe00d33aa1e01dda1bd583ca100b393b4373847eade6d/librt-0.7.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a36515b1328dc5b3ffce79fe204985ca8572525452eacabee2166f44bb387b2c", size = 184585, upload-time = "2026-01-14T12:54:47.139Z" }, + { url = "https://files.pythonhosted.org/packages/4e/fc/f43972ff56fd790a9fa55028a52ccea1875100edbb856b705bd393b601e3/librt-0.7.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b7e7f140c5169798f90b80d6e607ed2ba5059784968a004107c88ad61fb3641d", size = 180497, upload-time = "2026-01-14T12:54:48.946Z" }, + { url = "https://files.pythonhosted.org/packages/e1/3a/25e36030315a410d3ad0b7d0f19f5f188e88d1613d7d3fd8150523ea1093/librt-0.7.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff71447cb778a4f772ddc4ce360e6ba9c95527ed84a52096bd1bbf9fee2ec7c0", size = 200052, upload-time = "2026-01-14T12:54:50.382Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b8/f3a5a1931ae2a6ad92bf6893b9ef44325b88641d58723529e2c2935e8abe/librt-0.7.8-cp310-cp310-win32.whl", hash = "sha256:047164e5f68b7a8ebdf9fae91a3c2161d3192418aadd61ddd3a86a56cbe3dc85", size = 43477, upload-time = "2026-01-14T12:54:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/fe/91/c4202779366bc19f871b4ad25db10fcfa1e313c7893feb942f32668e8597/librt-0.7.8-cp310-cp310-win_amd64.whl", hash = "sha256:d6f254d096d84156a46a84861183c183d30734e52383602443292644d895047c", size = 49806, upload-time = "2026-01-14T12:54:53.149Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a3/87ea9c1049f2c781177496ebee29430e4631f439b8553a4969c88747d5d8/librt-0.7.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ff3e9c11aa260c31493d4b3197d1e28dd07768594a4f92bec4506849d736248f", size = 56507, upload-time = "2026-01-14T12:54:54.156Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4a/23bcef149f37f771ad30203d561fcfd45b02bc54947b91f7a9ac34815747/librt-0.7.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddb52499d0b3ed4aa88746aaf6f36a08314677d5c346234c3987ddc506404eac", size = 58455, upload-time = "2026-01-14T12:54:55.978Z" }, + { url = "https://files.pythonhosted.org/packages/22/6e/46eb9b85c1b9761e0f42b6e6311e1cc544843ac897457062b9d5d0b21df4/librt-0.7.8-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e9c0afebbe6ce177ae8edba0c7c4d626f2a0fc12c33bb993d163817c41a7a05c", size = 164956, upload-time = "2026-01-14T12:54:57.311Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3f/aa7c7f6829fb83989feb7ba9aa11c662b34b4bd4bd5b262f2876ba3db58d/librt-0.7.8-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:631599598e2c76ded400c0a8722dec09217c89ff64dc54b060f598ed68e7d2a8", size = 174364, upload-time = "2026-01-14T12:54:59.089Z" }, + { url = "https://files.pythonhosted.org/packages/3f/2d/d57d154b40b11f2cb851c4df0d4c4456bacd9b1ccc4ecb593ddec56c1a8b/librt-0.7.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c1ba843ae20db09b9d5c80475376168feb2640ce91cd9906414f23cc267a1ff", size = 188034, upload-time = "2026-01-14T12:55:00.141Z" }, + { url = "https://files.pythonhosted.org/packages/59/f9/36c4dad00925c16cd69d744b87f7001792691857d3b79187e7a673e812fb/librt-0.7.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b5b007bb22ea4b255d3ee39dfd06d12534de2fcc3438567d9f48cdaf67ae1ae3", size = 186295, upload-time = "2026-01-14T12:55:01.303Z" }, + { url = "https://files.pythonhosted.org/packages/23/9b/8a9889d3df5efb67695a67785028ccd58e661c3018237b73ad081691d0cb/librt-0.7.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dbd79caaf77a3f590cbe32dc2447f718772d6eea59656a7dcb9311161b10fa75", size = 181470, upload-time = "2026-01-14T12:55:02.492Z" }, + { url = "https://files.pythonhosted.org/packages/43/64/54d6ef11afca01fef8af78c230726a9394759f2addfbf7afc5e3cc032a45/librt-0.7.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:87808a8d1e0bd62a01cafc41f0fd6818b5a5d0ca0d8a55326a81643cdda8f873", size = 201713, upload-time = "2026-01-14T12:55:03.919Z" }, + { url = "https://files.pythonhosted.org/packages/2d/29/73e7ed2991330b28919387656f54109139b49e19cd72902f466bd44415fd/librt-0.7.8-cp311-cp311-win32.whl", hash = "sha256:31724b93baa91512bd0a376e7cf0b59d8b631ee17923b1218a65456fa9bda2e7", size = 43803, upload-time = "2026-01-14T12:55:04.996Z" }, + { url = "https://files.pythonhosted.org/packages/3f/de/66766ff48ed02b4d78deea30392ae200bcbd99ae61ba2418b49fd50a4831/librt-0.7.8-cp311-cp311-win_amd64.whl", hash = "sha256:978e8b5f13e52cf23a9e80f3286d7546baa70bc4ef35b51d97a709d0b28e537c", size = 50080, upload-time = "2026-01-14T12:55:06.489Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e3/33450438ff3a8c581d4ed7f798a70b07c3206d298cf0b87d3806e72e3ed8/librt-0.7.8-cp311-cp311-win_arm64.whl", hash = "sha256:20e3946863d872f7cabf7f77c6c9d370b8b3d74333d3a32471c50d3a86c0a232", size = 43383, upload-time = "2026-01-14T12:55:07.49Z" }, + { url = "https://files.pythonhosted.org/packages/56/04/79d8fcb43cae376c7adbab7b2b9f65e48432c9eced62ac96703bcc16e09b/librt-0.7.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9b6943885b2d49c48d0cff23b16be830ba46b0152d98f62de49e735c6e655a63", size = 57472, upload-time = "2026-01-14T12:55:08.528Z" }, + { url = "https://files.pythonhosted.org/packages/b4/ba/60b96e93043d3d659da91752689023a73981336446ae82078cddf706249e/librt-0.7.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46ef1f4b9b6cc364b11eea0ecc0897314447a66029ee1e55859acb3dd8757c93", size = 58986, upload-time = "2026-01-14T12:55:09.466Z" }, + { url = "https://files.pythonhosted.org/packages/7c/26/5215e4cdcc26e7be7eee21955a7e13cbf1f6d7d7311461a6014544596fac/librt-0.7.8-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:907ad09cfab21e3c86e8f1f87858f7049d1097f77196959c033612f532b4e592", size = 168422, upload-time = "2026-01-14T12:55:10.499Z" }, + { url = "https://files.pythonhosted.org/packages/0f/84/e8d1bc86fa0159bfc24f3d798d92cafd3897e84c7fea7fe61b3220915d76/librt-0.7.8-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2991b6c3775383752b3ca0204842743256f3ad3deeb1d0adc227d56b78a9a850", size = 177478, upload-time = "2026-01-14T12:55:11.577Z" }, + { url = "https://files.pythonhosted.org/packages/57/11/d0268c4b94717a18aa91df1100e767b010f87b7ae444dafaa5a2d80f33a6/librt-0.7.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03679b9856932b8c8f674e87aa3c55ea11c9274301f76ae8dc4d281bda55cf62", size = 192439, upload-time = "2026-01-14T12:55:12.7Z" }, + { url = "https://files.pythonhosted.org/packages/8d/56/1e8e833b95fe684f80f8894ae4d8b7d36acc9203e60478fcae599120a975/librt-0.7.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3968762fec1b2ad34ce57458b6de25dbb4142713e9ca6279a0d352fa4e9f452b", size = 191483, upload-time = "2026-01-14T12:55:13.838Z" }, + { url = "https://files.pythonhosted.org/packages/17/48/f11cf28a2cb6c31f282009e2208312aa84a5ee2732859f7856ee306176d5/librt-0.7.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bb7a7807523a31f03061288cc4ffc065d684c39db7644c676b47d89553c0d714", size = 185376, upload-time = "2026-01-14T12:55:15.017Z" }, + { url = "https://files.pythonhosted.org/packages/b8/6a/d7c116c6da561b9155b184354a60a3d5cdbf08fc7f3678d09c95679d13d9/librt-0.7.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad64a14b1e56e702e19b24aae108f18ad1bf7777f3af5fcd39f87d0c5a814449", size = 206234, upload-time = "2026-01-14T12:55:16.571Z" }, + { url = "https://files.pythonhosted.org/packages/61/de/1975200bb0285fc921c5981d9978ce6ce11ae6d797df815add94a5a848a3/librt-0.7.8-cp312-cp312-win32.whl", hash = "sha256:0241a6ed65e6666236ea78203a73d800dbed896cf12ae25d026d75dc1fcd1dac", size = 44057, upload-time = "2026-01-14T12:55:18.077Z" }, + { url = "https://files.pythonhosted.org/packages/8e/cd/724f2d0b3461426730d4877754b65d39f06a41ac9d0a92d5c6840f72b9ae/librt-0.7.8-cp312-cp312-win_amd64.whl", hash = "sha256:6db5faf064b5bab9675c32a873436b31e01d66ca6984c6f7f92621656033a708", size = 50293, upload-time = "2026-01-14T12:55:19.179Z" }, + { url = "https://files.pythonhosted.org/packages/bd/cf/7e899acd9ee5727ad8160fdcc9994954e79fab371c66535c60e13b968ffc/librt-0.7.8-cp312-cp312-win_arm64.whl", hash = "sha256:57175aa93f804d2c08d2edb7213e09276bd49097611aefc37e3fa38d1fb99ad0", size = 43574, upload-time = "2026-01-14T12:55:20.185Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fe/b1f9de2829cf7fc7649c1dcd202cfd873837c5cc2fc9e526b0e7f716c3d2/librt-0.7.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4c3995abbbb60b3c129490fa985dfe6cac11d88fc3c36eeb4fb1449efbbb04fc", size = 57500, upload-time = "2026-01-14T12:55:21.219Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d4/4a60fbe2e53b825f5d9a77325071d61cd8af8506255067bf0c8527530745/librt-0.7.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:44e0c2cbc9bebd074cf2cdbe472ca185e824be4e74b1c63a8e934cea674bebf2", size = 59019, upload-time = "2026-01-14T12:55:22.256Z" }, + { url = "https://files.pythonhosted.org/packages/6a/37/61ff80341ba5159afa524445f2d984c30e2821f31f7c73cf166dcafa5564/librt-0.7.8-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d2f1e492cae964b3463a03dc77a7fe8742f7855d7258c7643f0ee32b6651dd3", size = 169015, upload-time = "2026-01-14T12:55:23.24Z" }, + { url = "https://files.pythonhosted.org/packages/1c/86/13d4f2d6a93f181ebf2fc953868826653ede494559da8268023fe567fca3/librt-0.7.8-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:451e7ffcef8f785831fdb791bd69211f47e95dc4c6ddff68e589058806f044c6", size = 178161, upload-time = "2026-01-14T12:55:24.826Z" }, + { url = "https://files.pythonhosted.org/packages/88/26/e24ef01305954fc4d771f1f09f3dd682f9eb610e1bec188ffb719374d26e/librt-0.7.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3469e1af9f1380e093ae06bedcbdd11e407ac0b303a56bbe9afb1d6824d4982d", size = 193015, upload-time = "2026-01-14T12:55:26.04Z" }, + { url = "https://files.pythonhosted.org/packages/88/a0/92b6bd060e720d7a31ed474d046a69bd55334ec05e9c446d228c4b806ae3/librt-0.7.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f11b300027ce19a34f6d24ebb0a25fd0e24a9d53353225a5c1e6cadbf2916b2e", size = 192038, upload-time = "2026-01-14T12:55:27.208Z" }, + { url = "https://files.pythonhosted.org/packages/06/bb/6f4c650253704279c3a214dad188101d1b5ea23be0606628bc6739456624/librt-0.7.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4adc73614f0d3c97874f02f2c7fd2a27854e7e24ad532ea6b965459c5b757eca", size = 186006, upload-time = "2026-01-14T12:55:28.594Z" }, + { url = "https://files.pythonhosted.org/packages/dc/00/1c409618248d43240cadf45f3efb866837fa77e9a12a71481912135eb481/librt-0.7.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:60c299e555f87e4c01b2eca085dfccda1dde87f5a604bb45c2906b8305819a93", size = 206888, upload-time = "2026-01-14T12:55:30.214Z" }, + { url = "https://files.pythonhosted.org/packages/d9/83/b2cfe8e76ff5c1c77f8a53da3d5de62d04b5ebf7cf913e37f8bca43b5d07/librt-0.7.8-cp313-cp313-win32.whl", hash = "sha256:b09c52ed43a461994716082ee7d87618096851319bf695d57ec123f2ab708951", size = 44126, upload-time = "2026-01-14T12:55:31.44Z" }, + { url = "https://files.pythonhosted.org/packages/a9/0b/c59d45de56a51bd2d3a401fc63449c0ac163e4ef7f523ea8b0c0dee86ec5/librt-0.7.8-cp313-cp313-win_amd64.whl", hash = "sha256:f8f4a901a3fa28969d6e4519deceab56c55a09d691ea7b12ca830e2fa3461e34", size = 50262, upload-time = "2026-01-14T12:55:33.01Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b9/973455cec0a1ec592395250c474164c4a58ebf3e0651ee920fef1a2623f1/librt-0.7.8-cp313-cp313-win_arm64.whl", hash = "sha256:43d4e71b50763fcdcf64725ac680d8cfa1706c928b844794a7aa0fa9ac8e5f09", size = 43600, upload-time = "2026-01-14T12:55:34.054Z" }, + { url = "https://files.pythonhosted.org/packages/1a/73/fa8814c6ce2d49c3827829cadaa1589b0bf4391660bd4510899393a23ebc/librt-0.7.8-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:be927c3c94c74b05128089a955fba86501c3b544d1d300282cc1b4bd370cb418", size = 57049, upload-time = "2026-01-14T12:55:35.056Z" }, + { url = "https://files.pythonhosted.org/packages/53/fe/f6c70956da23ea235fd2e3cc16f4f0b4ebdfd72252b02d1164dd58b4e6c3/librt-0.7.8-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7b0803e9008c62a7ef79058233db7ff6f37a9933b8f2573c05b07ddafa226611", size = 58689, upload-time = "2026-01-14T12:55:36.078Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4d/7a2481444ac5fba63050d9abe823e6bc16896f575bfc9c1e5068d516cdce/librt-0.7.8-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:79feb4d00b2a4e0e05c9c56df707934f41fcb5fe53fd9efb7549068d0495b758", size = 166808, upload-time = "2026-01-14T12:55:37.595Z" }, + { url = "https://files.pythonhosted.org/packages/ac/3c/10901d9e18639f8953f57c8986796cfbf4c1c514844a41c9197cf87cb707/librt-0.7.8-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9122094e3f24aa759c38f46bd8863433820654927370250f460ae75488b66ea", size = 175614, upload-time = "2026-01-14T12:55:38.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/01/5cbdde0951a5090a80e5ba44e6357d375048123c572a23eecfb9326993a7/librt-0.7.8-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e03bea66af33c95ce3addf87a9bf1fcad8d33e757bc479957ddbc0e4f7207ac", size = 189955, upload-time = "2026-01-14T12:55:39.939Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b4/e80528d2f4b7eaf1d437fcbd6fc6ba4cbeb3e2a0cb9ed5a79f47c7318706/librt-0.7.8-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f1ade7f31675db00b514b98f9ab9a7698c7282dad4be7492589109471852d398", size = 189370, upload-time = "2026-01-14T12:55:41.057Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ab/938368f8ce31a9787ecd4becb1e795954782e4312095daf8fd22420227c8/librt-0.7.8-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a14229ac62adcf1b90a15992f1ab9c69ae8b99ffb23cb64a90878a6e8a2f5b81", size = 183224, upload-time = "2026-01-14T12:55:42.328Z" }, + { url = "https://files.pythonhosted.org/packages/3c/10/559c310e7a6e4014ac44867d359ef8238465fb499e7eb31b6bfe3e3f86f5/librt-0.7.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5bcaaf624fd24e6a0cb14beac37677f90793a96864c67c064a91458611446e83", size = 203541, upload-time = "2026-01-14T12:55:43.501Z" }, + { url = "https://files.pythonhosted.org/packages/f8/db/a0db7acdb6290c215f343835c6efda5b491bb05c3ddc675af558f50fdba3/librt-0.7.8-cp314-cp314-win32.whl", hash = "sha256:7aa7d5457b6c542ecaed79cec4ad98534373c9757383973e638ccced0f11f46d", size = 40657, upload-time = "2026-01-14T12:55:44.668Z" }, + { url = "https://files.pythonhosted.org/packages/72/e0/4f9bdc2a98a798511e81edcd6b54fe82767a715e05d1921115ac70717f6f/librt-0.7.8-cp314-cp314-win_amd64.whl", hash = "sha256:3d1322800771bee4a91f3b4bd4e49abc7d35e65166821086e5afd1e6c0d9be44", size = 46835, upload-time = "2026-01-14T12:55:45.655Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3d/59c6402e3dec2719655a41ad027a7371f8e2334aa794ed11533ad5f34969/librt-0.7.8-cp314-cp314-win_arm64.whl", hash = "sha256:5363427bc6a8c3b1719f8f3845ea53553d301382928a86e8fab7984426949bce", size = 39885, upload-time = "2026-01-14T12:55:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/4e/9c/2481d80950b83085fb14ba3c595db56330d21bbc7d88a19f20165f3538db/librt-0.7.8-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ca916919793a77e4a98d4a1701e345d337ce53be4a16620f063191f7322ac80f", size = 59161, upload-time = "2026-01-14T12:55:48.45Z" }, + { url = "https://files.pythonhosted.org/packages/96/79/108df2cfc4e672336765d54e3ff887294c1cc36ea4335c73588875775527/librt-0.7.8-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:54feb7b4f2f6706bb82325e836a01be805770443e2400f706e824e91f6441dde", size = 61008, upload-time = "2026-01-14T12:55:49.527Z" }, + { url = "https://files.pythonhosted.org/packages/46/f2/30179898f9994a5637459d6e169b6abdc982012c0a4b2d4c26f50c06f911/librt-0.7.8-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:39a4c76fee41007070f872b648cc2f711f9abf9a13d0c7162478043377b52c8e", size = 187199, upload-time = "2026-01-14T12:55:50.587Z" }, + { url = "https://files.pythonhosted.org/packages/b4/da/f7563db55cebdc884f518ba3791ad033becc25ff68eb70902b1747dc0d70/librt-0.7.8-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac9c8a458245c7de80bc1b9765b177055efff5803f08e548dd4bb9ab9a8d789b", size = 198317, upload-time = "2026-01-14T12:55:51.991Z" }, + { url = "https://files.pythonhosted.org/packages/b3/6c/4289acf076ad371471fa86718c30ae353e690d3de6167f7db36f429272f1/librt-0.7.8-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95b67aa7eff150f075fda09d11f6bfb26edffd300f6ab1666759547581e8f666", size = 210334, upload-time = "2026-01-14T12:55:53.682Z" }, + { url = "https://files.pythonhosted.org/packages/4a/7f/377521ac25b78ac0a5ff44127a0360ee6d5ddd3ce7327949876a30533daa/librt-0.7.8-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:535929b6eff670c593c34ff435d5440c3096f20fa72d63444608a5aef64dd581", size = 211031, upload-time = "2026-01-14T12:55:54.827Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b1/e1e96c3e20b23d00cf90f4aad48f0deb4cdfec2f0ed8380d0d85acf98bbf/librt-0.7.8-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:63937bd0f4d1cb56653dc7ae900d6c52c41f0015e25aaf9902481ee79943b33a", size = 204581, upload-time = "2026-01-14T12:55:56.811Z" }, + { url = "https://files.pythonhosted.org/packages/43/71/0f5d010e92ed9747e14bef35e91b6580533510f1e36a8a09eb79ee70b2f0/librt-0.7.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf243da9e42d914036fd362ac3fa77d80a41cadcd11ad789b1b5eec4daaf67ca", size = 224731, upload-time = "2026-01-14T12:55:58.175Z" }, + { url = "https://files.pythonhosted.org/packages/22/f0/07fb6ab5c39a4ca9af3e37554f9d42f25c464829254d72e4ebbd81da351c/librt-0.7.8-cp314-cp314t-win32.whl", hash = "sha256:171ca3a0a06c643bd0a2f62a8944e1902c94aa8e5da4db1ea9a8daf872685365", size = 41173, upload-time = "2026-01-14T12:55:59.315Z" }, + { url = "https://files.pythonhosted.org/packages/24/d4/7e4be20993dc6a782639625bd2f97f3c66125c7aa80c82426956811cfccf/librt-0.7.8-cp314-cp314t-win_amd64.whl", hash = "sha256:445b7304145e24c60288a2f172b5ce2ca35c0f81605f5299f3fa567e189d2e32", size = 47668, upload-time = "2026-01-14T12:56:00.261Z" }, + { url = "https://files.pythonhosted.org/packages/fc/85/69f92b2a7b3c0f88ffe107c86b952b397004b5b8ea5a81da3d9c04c04422/librt-0.7.8-cp314-cp314t-win_arm64.whl", hash = "sha256:8766ece9de08527deabcd7cb1b4f1a967a385d26e33e536d6d8913db6ef74f06", size = 40550, upload-time = "2026-01-14T12:56:01.542Z" }, +] + [[package]] name = "markdown-it-py" version = "4.0.0" @@ -265,6 +329,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, ] +[[package]] +name = "markdown2" +version = "2.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/42/f8/b2ae8bf5f28f9b510ae097415e6e4cb63226bb28d7ee01aec03a755ba03b/markdown2-2.5.4.tar.gz", hash = "sha256:a09873f0b3c23dbfae589b0080587df52ad75bb09a5fa6559147554736676889", size = 145652, upload-time = "2025-07-27T16:16:24.307Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/06/2697b5043c3ecb720ce0d243fc7cf5024c0b5b1e450506e9b21939019963/markdown2-2.5.4-py3-none-any.whl", hash = "sha256:3c4b2934e677be7fec0e6f2de4410e116681f4ad50ec8e5ba7557be506d3f439", size = 49954, upload-time = "2025-07-27T16:16:23.026Z" }, +] + [[package]] name = "markupsafe" version = "3.0.3" @@ -361,47 +434,48 @@ wheels = [ [[package]] name = "mypy" -version = "1.18.2" +version = "1.19.1" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, { name = "mypy-extensions" }, { name = "pathspec" }, { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/6f/657961a0743cff32e6c0611b63ff1c1970a0b482ace35b069203bf705187/mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c", size = 12807973, upload-time = "2025-09-19T00:10:35.282Z" }, - { url = "https://files.pythonhosted.org/packages/10/e9/420822d4f661f13ca8900f5fa239b40ee3be8b62b32f3357df9a3045a08b/mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e", size = 11896527, upload-time = "2025-09-19T00:10:55.791Z" }, - { url = "https://files.pythonhosted.org/packages/aa/73/a05b2bbaa7005f4642fcfe40fb73f2b4fb6bb44229bd585b5878e9a87ef8/mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b", size = 12507004, upload-time = "2025-09-19T00:11:05.411Z" }, - { url = "https://files.pythonhosted.org/packages/4f/01/f6e4b9f0d031c11ccbd6f17da26564f3a0f3c4155af344006434b0a05a9d/mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66", size = 13245947, upload-time = "2025-09-19T00:10:46.923Z" }, - { url = "https://files.pythonhosted.org/packages/d7/97/19727e7499bfa1ae0773d06afd30ac66a58ed7437d940c70548634b24185/mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428", size = 13499217, upload-time = "2025-09-19T00:09:39.472Z" }, - { url = "https://files.pythonhosted.org/packages/9f/4f/90dc8c15c1441bf31cf0f9918bb077e452618708199e530f4cbd5cede6ff/mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed", size = 9766753, upload-time = "2025-09-19T00:10:49.161Z" }, - { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, - { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, - { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, - { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, - { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, - { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, - { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, - { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, - { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, - { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, - { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, - { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, - { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, - { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, - { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, - { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, - { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, - { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" }, - { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" }, - { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" }, - { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, - { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, - { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, - { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, + { url = "https://files.pythonhosted.org/packages/2f/63/e499890d8e39b1ff2df4c0c6ce5d371b6844ee22b8250687a99fd2f657a8/mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec", size = 13101333, upload-time = "2025-12-15T05:03:03.28Z" }, + { url = "https://files.pythonhosted.org/packages/72/4b/095626fc136fba96effc4fd4a82b41d688ab92124f8c4f7564bffe5cf1b0/mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b", size = 12164102, upload-time = "2025-12-15T05:02:33.611Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/952928dd081bf88a83a5ccd49aaecfcd18fd0d2710c7ff07b8fb6f7032b9/mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6", size = 12765799, upload-time = "2025-12-15T05:03:28.44Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/93c2e4a287f74ef11a66fb6d49c7a9f05e47b0a4399040e6719b57f500d2/mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74", size = 13522149, upload-time = "2025-12-15T05:02:36.011Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/33a294b56aaad2b338d203e3a1d8b453637ac36cb278b45005e0901cf148/mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1", size = 13810105, upload-time = "2025-12-15T05:02:40.327Z" }, + { url = "https://files.pythonhosted.org/packages/0e/fd/3e82603a0cb66b67c5e7abababce6bf1a929ddf67bf445e652684af5c5a0/mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac", size = 10057200, upload-time = "2025-12-15T05:02:51.012Z" }, + { url = "https://files.pythonhosted.org/packages/ef/47/6b3ebabd5474d9cdc170d1342fbf9dddc1b0ec13ec90bf9004ee6f391c31/mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288", size = 13028539, upload-time = "2025-12-15T05:03:44.129Z" }, + { url = "https://files.pythonhosted.org/packages/5c/a6/ac7c7a88a3c9c54334f53a941b765e6ec6c4ebd65d3fe8cdcfbe0d0fd7db/mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab", size = 12083163, upload-time = "2025-12-15T05:03:37.679Z" }, + { url = "https://files.pythonhosted.org/packages/67/af/3afa9cf880aa4a2c803798ac24f1d11ef72a0c8079689fac5cfd815e2830/mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6", size = 12687629, upload-time = "2025-12-15T05:02:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/2d/46/20f8a7114a56484ab268b0ab372461cb3a8f7deed31ea96b83a4e4cfcfca/mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331", size = 13436933, upload-time = "2025-12-15T05:03:15.606Z" }, + { url = "https://files.pythonhosted.org/packages/5b/f8/33b291ea85050a21f15da910002460f1f445f8007adb29230f0adea279cb/mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925", size = 13661754, upload-time = "2025-12-15T05:02:26.731Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a3/47cbd4e85bec4335a9cd80cf67dbc02be21b5d4c9c23ad6b95d6c5196bac/mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042", size = 10055772, upload-time = "2025-12-15T05:03:26.179Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" }, + { url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" }, + { url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" }, + { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" }, + { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" }, + { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" }, + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, ] [[package]] @@ -480,86 +554,84 @@ wheels = [ [[package]] name = "numpy" -version = "2.3.4" +version = "2.4.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.11'", ] -sdist = { url = "https://files.pythonhosted.org/packages/b5/f4/098d2270d52b41f1bd7db9fc288aaa0400cb48c2a3e2af6fa365d9720947/numpy-2.3.4.tar.gz", hash = "sha256:a7d018bfedb375a8d979ac758b120ba846a7fe764911a64465fd87b8729f4a6a", size = 20582187, upload-time = "2025-10-15T16:18:11.77Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/62/ae72ff66c0f1fd959925b4c11f8c2dea61f47f6acaea75a08512cdfe3fed/numpy-2.4.1.tar.gz", hash = "sha256:a1ceafc5042451a858231588a104093474c6a5c57dcc724841f5c888d237d690", size = 20721320, upload-time = "2026-01-10T06:44:59.619Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/e7/0e07379944aa8afb49a556a2b54587b828eb41dc9adc56fb7615b678ca53/numpy-2.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e78aecd2800b32e8347ce49316d3eaf04aed849cd5b38e0af39f829a4e59f5eb", size = 21259519, upload-time = "2025-10-15T16:15:19.012Z" }, - { url = "https://files.pythonhosted.org/packages/d0/cb/5a69293561e8819b09e34ed9e873b9a82b5f2ade23dce4c51dc507f6cfe1/numpy-2.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fd09cc5d65bda1e79432859c40978010622112e9194e581e3415a3eccc7f43f", size = 14452796, upload-time = "2025-10-15T16:15:23.094Z" }, - { url = "https://files.pythonhosted.org/packages/e4/04/ff11611200acd602a1e5129e36cfd25bf01ad8e5cf927baf2e90236eb02e/numpy-2.3.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1b219560ae2c1de48ead517d085bc2d05b9433f8e49d0955c82e8cd37bd7bf36", size = 5381639, upload-time = "2025-10-15T16:15:25.572Z" }, - { url = "https://files.pythonhosted.org/packages/ea/77/e95c757a6fe7a48d28a009267408e8aa382630cc1ad1db7451b3bc21dbb4/numpy-2.3.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:bafa7d87d4c99752d07815ed7a2c0964f8ab311eb8168f41b910bd01d15b6032", size = 6914296, upload-time = "2025-10-15T16:15:27.079Z" }, - { url = "https://files.pythonhosted.org/packages/a3/d2/137c7b6841c942124eae921279e5c41b1c34bab0e6fc60c7348e69afd165/numpy-2.3.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36dc13af226aeab72b7abad501d370d606326a0029b9f435eacb3b8c94b8a8b7", size = 14591904, upload-time = "2025-10-15T16:15:29.044Z" }, - { url = "https://files.pythonhosted.org/packages/bb/32/67e3b0f07b0aba57a078c4ab777a9e8e6bc62f24fb53a2337f75f9691699/numpy-2.3.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7b2f9a18b5ff9824a6af80de4f37f4ec3c2aab05ef08f51c77a093f5b89adda", size = 16939602, upload-time = "2025-10-15T16:15:31.106Z" }, - { url = "https://files.pythonhosted.org/packages/95/22/9639c30e32c93c4cee3ccdb4b09c2d0fbff4dcd06d36b357da06146530fb/numpy-2.3.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9984bd645a8db6ca15d850ff996856d8762c51a2239225288f08f9050ca240a0", size = 16372661, upload-time = "2025-10-15T16:15:33.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/e9/a685079529be2b0156ae0c11b13d6be647743095bb51d46589e95be88086/numpy-2.3.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:64c5825affc76942973a70acf438a8ab618dbd692b84cd5ec40a0a0509edc09a", size = 18884682, upload-time = "2025-10-15T16:15:36.105Z" }, - { url = "https://files.pythonhosted.org/packages/cf/85/f6f00d019b0cc741e64b4e00ce865a57b6bed945d1bbeb1ccadbc647959b/numpy-2.3.4-cp311-cp311-win32.whl", hash = "sha256:ed759bf7a70342f7817d88376eb7142fab9fef8320d6019ef87fae05a99874e1", size = 6570076, upload-time = "2025-10-15T16:15:38.225Z" }, - { url = "https://files.pythonhosted.org/packages/7d/10/f8850982021cb90e2ec31990291f9e830ce7d94eef432b15066e7cbe0bec/numpy-2.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:faba246fb30ea2a526c2e9645f61612341de1a83fb1e0c5edf4ddda5a9c10996", size = 13089358, upload-time = "2025-10-15T16:15:40.404Z" }, - { url = "https://files.pythonhosted.org/packages/d1/ad/afdd8351385edf0b3445f9e24210a9c3971ef4de8fd85155462fc4321d79/numpy-2.3.4-cp311-cp311-win_arm64.whl", hash = "sha256:4c01835e718bcebe80394fd0ac66c07cbb90147ebbdad3dcecd3f25de2ae7e2c", size = 10462292, upload-time = "2025-10-15T16:15:42.896Z" }, - { url = "https://files.pythonhosted.org/packages/96/7a/02420400b736f84317e759291b8edaeee9dc921f72b045475a9cbdb26b17/numpy-2.3.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ef1b5a3e808bc40827b5fa2c8196151a4c5abe110e1726949d7abddfe5c7ae11", size = 20957727, upload-time = "2025-10-15T16:15:44.9Z" }, - { url = "https://files.pythonhosted.org/packages/18/90/a014805d627aa5750f6f0e878172afb6454552da929144b3c07fcae1bb13/numpy-2.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2f91f496a87235c6aaf6d3f3d89b17dba64996abadccb289f48456cff931ca9", size = 14187262, upload-time = "2025-10-15T16:15:47.761Z" }, - { url = "https://files.pythonhosted.org/packages/c7/e4/0a94b09abe89e500dc748e7515f21a13e30c5c3fe3396e6d4ac108c25fca/numpy-2.3.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f77e5b3d3da652b474cc80a14084927a5e86a5eccf54ca8ca5cbd697bf7f2667", size = 5115992, upload-time = "2025-10-15T16:15:50.144Z" }, - { url = "https://files.pythonhosted.org/packages/88/dd/db77c75b055c6157cbd4f9c92c4458daef0dd9cbe6d8d2fe7f803cb64c37/numpy-2.3.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ab1c5f5ee40d6e01cbe96de5863e39b215a4d24e7d007cad56c7184fdf4aeef", size = 6648672, upload-time = "2025-10-15T16:15:52.442Z" }, - { url = "https://files.pythonhosted.org/packages/e1/e6/e31b0d713719610e406c0ea3ae0d90760465b086da8783e2fd835ad59027/numpy-2.3.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77b84453f3adcb994ddbd0d1c5d11db2d6bda1a2b7fd5ac5bd4649d6f5dc682e", size = 14284156, upload-time = "2025-10-15T16:15:54.351Z" }, - { url = "https://files.pythonhosted.org/packages/f9/58/30a85127bfee6f108282107caf8e06a1f0cc997cb6b52cdee699276fcce4/numpy-2.3.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4121c5beb58a7f9e6dfdee612cb24f4df5cd4db6e8261d7f4d7450a997a65d6a", size = 16641271, upload-time = "2025-10-15T16:15:56.67Z" }, - { url = "https://files.pythonhosted.org/packages/06/f2/2e06a0f2adf23e3ae29283ad96959267938d0efd20a2e25353b70065bfec/numpy-2.3.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65611ecbb00ac9846efe04db15cbe6186f562f6bb7e5e05f077e53a599225d16", size = 16059531, upload-time = "2025-10-15T16:15:59.412Z" }, - { url = "https://files.pythonhosted.org/packages/b0/e7/b106253c7c0d5dc352b9c8fab91afd76a93950998167fa3e5afe4ef3a18f/numpy-2.3.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dabc42f9c6577bcc13001b8810d300fe814b4cfbe8a92c873f269484594f9786", size = 18578983, upload-time = "2025-10-15T16:16:01.804Z" }, - { url = "https://files.pythonhosted.org/packages/73/e3/04ecc41e71462276ee867ccbef26a4448638eadecf1bc56772c9ed6d0255/numpy-2.3.4-cp312-cp312-win32.whl", hash = "sha256:a49d797192a8d950ca59ee2d0337a4d804f713bb5c3c50e8db26d49666e351dc", size = 6291380, upload-time = "2025-10-15T16:16:03.938Z" }, - { url = "https://files.pythonhosted.org/packages/3d/a8/566578b10d8d0e9955b1b6cd5db4e9d4592dd0026a941ff7994cedda030a/numpy-2.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:985f1e46358f06c2a09921e8921e2c98168ed4ae12ccd6e5e87a4f1857923f32", size = 12787999, upload-time = "2025-10-15T16:16:05.801Z" }, - { url = "https://files.pythonhosted.org/packages/58/22/9c903a957d0a8071b607f5b1bff0761d6e608b9a965945411f867d515db1/numpy-2.3.4-cp312-cp312-win_arm64.whl", hash = "sha256:4635239814149e06e2cb9db3dd584b2fa64316c96f10656983b8026a82e6e4db", size = 10197412, upload-time = "2025-10-15T16:16:07.854Z" }, - { url = "https://files.pythonhosted.org/packages/57/7e/b72610cc91edf138bc588df5150957a4937221ca6058b825b4725c27be62/numpy-2.3.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c090d4860032b857d94144d1a9976b8e36709e40386db289aaf6672de2a81966", size = 20950335, upload-time = "2025-10-15T16:16:10.304Z" }, - { url = "https://files.pythonhosted.org/packages/3e/46/bdd3370dcea2f95ef14af79dbf81e6927102ddf1cc54adc0024d61252fd9/numpy-2.3.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a13fc473b6db0be619e45f11f9e81260f7302f8d180c49a22b6e6120022596b3", size = 14179878, upload-time = "2025-10-15T16:16:12.595Z" }, - { url = "https://files.pythonhosted.org/packages/ac/01/5a67cb785bda60f45415d09c2bc245433f1c68dd82eef9c9002c508b5a65/numpy-2.3.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:3634093d0b428e6c32c3a69b78e554f0cd20ee420dcad5a9f3b2a63762ce4197", size = 5108673, upload-time = "2025-10-15T16:16:14.877Z" }, - { url = "https://files.pythonhosted.org/packages/c2/cd/8428e23a9fcebd33988f4cb61208fda832800ca03781f471f3727a820704/numpy-2.3.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:043885b4f7e6e232d7df4f51ffdef8c36320ee9d5f227b380ea636722c7ed12e", size = 6641438, upload-time = "2025-10-15T16:16:16.805Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d1/913fe563820f3c6b079f992458f7331278dcd7ba8427e8e745af37ddb44f/numpy-2.3.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ee6a571d1e4f0ea6d5f22d6e5fbd6ed1dc2b18542848e1e7301bd190500c9d7", size = 14281290, upload-time = "2025-10-15T16:16:18.764Z" }, - { url = "https://files.pythonhosted.org/packages/9e/7e/7d306ff7cb143e6d975cfa7eb98a93e73495c4deabb7d1b5ecf09ea0fd69/numpy-2.3.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fc8a63918b04b8571789688b2780ab2b4a33ab44bfe8ccea36d3eba51228c953", size = 16636543, upload-time = "2025-10-15T16:16:21.072Z" }, - { url = "https://files.pythonhosted.org/packages/47/6a/8cfc486237e56ccfb0db234945552a557ca266f022d281a2f577b98e955c/numpy-2.3.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:40cc556d5abbc54aabe2b1ae287042d7bdb80c08edede19f0c0afb36ae586f37", size = 16056117, upload-time = "2025-10-15T16:16:23.369Z" }, - { url = "https://files.pythonhosted.org/packages/b1/0e/42cb5e69ea901e06ce24bfcc4b5664a56f950a70efdcf221f30d9615f3f3/numpy-2.3.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ecb63014bb7f4ce653f8be7f1df8cbc6093a5a2811211770f6606cc92b5a78fd", size = 18577788, upload-time = "2025-10-15T16:16:27.496Z" }, - { url = "https://files.pythonhosted.org/packages/86/92/41c3d5157d3177559ef0a35da50f0cda7fa071f4ba2306dd36818591a5bc/numpy-2.3.4-cp313-cp313-win32.whl", hash = "sha256:e8370eb6925bb8c1c4264fec52b0384b44f675f191df91cbe0140ec9f0955646", size = 6282620, upload-time = "2025-10-15T16:16:29.811Z" }, - { url = "https://files.pythonhosted.org/packages/09/97/fd421e8bc50766665ad35536c2bb4ef916533ba1fdd053a62d96cc7c8b95/numpy-2.3.4-cp313-cp313-win_amd64.whl", hash = "sha256:56209416e81a7893036eea03abcb91c130643eb14233b2515c90dcac963fe99d", size = 12784672, upload-time = "2025-10-15T16:16:31.589Z" }, - { url = "https://files.pythonhosted.org/packages/ad/df/5474fb2f74970ca8eb978093969b125a84cc3d30e47f82191f981f13a8a0/numpy-2.3.4-cp313-cp313-win_arm64.whl", hash = "sha256:a700a4031bc0fd6936e78a752eefb79092cecad2599ea9c8039c548bc097f9bc", size = 10196702, upload-time = "2025-10-15T16:16:33.902Z" }, - { url = "https://files.pythonhosted.org/packages/11/83/66ac031464ec1767ea3ed48ce40f615eb441072945e98693bec0bcd056cc/numpy-2.3.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:86966db35c4040fdca64f0816a1c1dd8dbd027d90fca5a57e00e1ca4cd41b879", size = 21049003, upload-time = "2025-10-15T16:16:36.101Z" }, - { url = "https://files.pythonhosted.org/packages/5f/99/5b14e0e686e61371659a1d5bebd04596b1d72227ce36eed121bb0aeab798/numpy-2.3.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:838f045478638b26c375ee96ea89464d38428c69170360b23a1a50fa4baa3562", size = 14302980, upload-time = "2025-10-15T16:16:39.124Z" }, - { url = "https://files.pythonhosted.org/packages/2c/44/e9486649cd087d9fc6920e3fc3ac2aba10838d10804b1e179fb7cbc4e634/numpy-2.3.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d7315ed1dab0286adca467377c8381cd748f3dc92235f22a7dfc42745644a96a", size = 5231472, upload-time = "2025-10-15T16:16:41.168Z" }, - { url = "https://files.pythonhosted.org/packages/3e/51/902b24fa8887e5fe2063fd61b1895a476d0bbf46811ab0c7fdf4bd127345/numpy-2.3.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:84f01a4d18b2cc4ade1814a08e5f3c907b079c847051d720fad15ce37aa930b6", size = 6739342, upload-time = "2025-10-15T16:16:43.777Z" }, - { url = "https://files.pythonhosted.org/packages/34/f1/4de9586d05b1962acdcdb1dc4af6646361a643f8c864cef7c852bf509740/numpy-2.3.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:817e719a868f0dacde4abdfc5c1910b301877970195db9ab6a5e2c4bd5b121f7", size = 14354338, upload-time = "2025-10-15T16:16:46.081Z" }, - { url = "https://files.pythonhosted.org/packages/1f/06/1c16103b425de7969d5a76bdf5ada0804b476fed05d5f9e17b777f1cbefd/numpy-2.3.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85e071da78d92a214212cacea81c6da557cab307f2c34b5f85b628e94803f9c0", size = 16702392, upload-time = "2025-10-15T16:16:48.455Z" }, - { url = "https://files.pythonhosted.org/packages/34/b2/65f4dc1b89b5322093572b6e55161bb42e3e0487067af73627f795cc9d47/numpy-2.3.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2ec646892819370cf3558f518797f16597b4e4669894a2ba712caccc9da53f1f", size = 16134998, upload-time = "2025-10-15T16:16:51.114Z" }, - { url = "https://files.pythonhosted.org/packages/d4/11/94ec578896cdb973aaf56425d6c7f2aff4186a5c00fac15ff2ec46998b46/numpy-2.3.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:035796aaaddfe2f9664b9a9372f089cfc88bd795a67bd1bfe15e6e770934cf64", size = 18651574, upload-time = "2025-10-15T16:16:53.429Z" }, - { url = "https://files.pythonhosted.org/packages/62/b7/7efa763ab33dbccf56dade36938a77345ce8e8192d6b39e470ca25ff3cd0/numpy-2.3.4-cp313-cp313t-win32.whl", hash = "sha256:fea80f4f4cf83b54c3a051f2f727870ee51e22f0248d3114b8e755d160b38cfb", size = 6413135, upload-time = "2025-10-15T16:16:55.992Z" }, - { url = "https://files.pythonhosted.org/packages/43/70/aba4c38e8400abcc2f345e13d972fb36c26409b3e644366db7649015f291/numpy-2.3.4-cp313-cp313t-win_amd64.whl", hash = "sha256:15eea9f306b98e0be91eb344a94c0e630689ef302e10c2ce5f7e11905c704f9c", size = 12928582, upload-time = "2025-10-15T16:16:57.943Z" }, - { url = "https://files.pythonhosted.org/packages/67/63/871fad5f0073fc00fbbdd7232962ea1ac40eeaae2bba66c76214f7954236/numpy-2.3.4-cp313-cp313t-win_arm64.whl", hash = "sha256:b6c231c9c2fadbae4011ca5e7e83e12dc4a5072f1a1d85a0a7b3ed754d145a40", size = 10266691, upload-time = "2025-10-15T16:17:00.048Z" }, - { url = "https://files.pythonhosted.org/packages/72/71/ae6170143c115732470ae3a2d01512870dd16e0953f8a6dc89525696069b/numpy-2.3.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:81c3e6d8c97295a7360d367f9f8553973651b76907988bb6066376bc2252f24e", size = 20955580, upload-time = "2025-10-15T16:17:02.509Z" }, - { url = "https://files.pythonhosted.org/packages/af/39/4be9222ffd6ca8a30eda033d5f753276a9c3426c397bb137d8e19dedd200/numpy-2.3.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7c26b0b2bf58009ed1f38a641f3db4be8d960a417ca96d14e5b06df1506d41ff", size = 14188056, upload-time = "2025-10-15T16:17:04.873Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3d/d85f6700d0a4aa4f9491030e1021c2b2b7421b2b38d01acd16734a2bfdc7/numpy-2.3.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:62b2198c438058a20b6704351b35a1d7db881812d8512d67a69c9de1f18ca05f", size = 5116555, upload-time = "2025-10-15T16:17:07.499Z" }, - { url = "https://files.pythonhosted.org/packages/bf/04/82c1467d86f47eee8a19a464c92f90a9bb68ccf14a54c5224d7031241ffb/numpy-2.3.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:9d729d60f8d53a7361707f4b68a9663c968882dd4f09e0d58c044c8bf5faee7b", size = 6643581, upload-time = "2025-10-15T16:17:09.774Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d3/c79841741b837e293f48bd7db89d0ac7a4f2503b382b78a790ef1dc778a5/numpy-2.3.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd0c630cf256b0a7fd9d0a11c9413b42fef5101219ce6ed5a09624f5a65392c7", size = 14299186, upload-time = "2025-10-15T16:17:11.937Z" }, - { url = "https://files.pythonhosted.org/packages/e8/7e/4a14a769741fbf237eec5a12a2cbc7a4c4e061852b6533bcb9e9a796c908/numpy-2.3.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5e081bc082825f8b139f9e9fe42942cb4054524598aaeb177ff476cc76d09d2", size = 16638601, upload-time = "2025-10-15T16:17:14.391Z" }, - { url = "https://files.pythonhosted.org/packages/93/87/1c1de269f002ff0a41173fe01dcc925f4ecff59264cd8f96cf3b60d12c9b/numpy-2.3.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:15fb27364ed84114438fff8aaf998c9e19adbeba08c0b75409f8c452a8692c52", size = 16074219, upload-time = "2025-10-15T16:17:17.058Z" }, - { url = "https://files.pythonhosted.org/packages/cd/28/18f72ee77408e40a76d691001ae599e712ca2a47ddd2c4f695b16c65f077/numpy-2.3.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:85d9fb2d8cd998c84d13a79a09cc0c1091648e848e4e6249b0ccd7f6b487fa26", size = 18576702, upload-time = "2025-10-15T16:17:19.379Z" }, - { url = "https://files.pythonhosted.org/packages/c3/76/95650169b465ececa8cf4b2e8f6df255d4bf662775e797ade2025cc51ae6/numpy-2.3.4-cp314-cp314-win32.whl", hash = "sha256:e73d63fd04e3a9d6bc187f5455d81abfad05660b212c8804bf3b407e984cd2bc", size = 6337136, upload-time = "2025-10-15T16:17:22.886Z" }, - { url = "https://files.pythonhosted.org/packages/dc/89/a231a5c43ede5d6f77ba4a91e915a87dea4aeea76560ba4d2bf185c683f0/numpy-2.3.4-cp314-cp314-win_amd64.whl", hash = "sha256:3da3491cee49cf16157e70f607c03a217ea6647b1cea4819c4f48e53d49139b9", size = 12920542, upload-time = "2025-10-15T16:17:24.783Z" }, - { url = "https://files.pythonhosted.org/packages/0d/0c/ae9434a888f717c5ed2ff2393b3f344f0ff6f1c793519fa0c540461dc530/numpy-2.3.4-cp314-cp314-win_arm64.whl", hash = "sha256:6d9cd732068e8288dbe2717177320723ccec4fb064123f0caf9bbd90ab5be868", size = 10480213, upload-time = "2025-10-15T16:17:26.935Z" }, - { url = "https://files.pythonhosted.org/packages/83/4b/c4a5f0841f92536f6b9592694a5b5f68c9ab37b775ff342649eadf9055d3/numpy-2.3.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:22758999b256b595cf0b1d102b133bb61866ba5ceecf15f759623b64c020c9ec", size = 21052280, upload-time = "2025-10-15T16:17:29.638Z" }, - { url = "https://files.pythonhosted.org/packages/3e/80/90308845fc93b984d2cc96d83e2324ce8ad1fd6efea81b324cba4b673854/numpy-2.3.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9cb177bc55b010b19798dc5497d540dea67fd13a8d9e882b2dae71de0cf09eb3", size = 14302930, upload-time = "2025-10-15T16:17:32.384Z" }, - { url = "https://files.pythonhosted.org/packages/3d/4e/07439f22f2a3b247cec4d63a713faae55e1141a36e77fb212881f7cda3fb/numpy-2.3.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0f2bcc76f1e05e5ab58893407c63d90b2029908fa41f9f1cc51eecce936c3365", size = 5231504, upload-time = "2025-10-15T16:17:34.515Z" }, - { url = "https://files.pythonhosted.org/packages/ab/de/1e11f2547e2fe3d00482b19721855348b94ada8359aef5d40dd57bfae9df/numpy-2.3.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dc20bde86802df2ed8397a08d793da0ad7a5fd4ea3ac85d757bf5dd4ad7c252", size = 6739405, upload-time = "2025-10-15T16:17:36.128Z" }, - { url = "https://files.pythonhosted.org/packages/3b/40/8cd57393a26cebe2e923005db5134a946c62fa56a1087dc7c478f3e30837/numpy-2.3.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e199c087e2aa71c8f9ce1cb7a8e10677dc12457e7cc1be4798632da37c3e86e", size = 14354866, upload-time = "2025-10-15T16:17:38.884Z" }, - { url = "https://files.pythonhosted.org/packages/93/39/5b3510f023f96874ee6fea2e40dfa99313a00bf3ab779f3c92978f34aace/numpy-2.3.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85597b2d25ddf655495e2363fe044b0ae999b75bc4d630dc0d886484b03a5eb0", size = 16703296, upload-time = "2025-10-15T16:17:41.564Z" }, - { url = "https://files.pythonhosted.org/packages/41/0d/19bb163617c8045209c1996c4e427bccbc4bbff1e2c711f39203c8ddbb4a/numpy-2.3.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04a69abe45b49c5955923cf2c407843d1c85013b424ae8a560bba16c92fe44a0", size = 16136046, upload-time = "2025-10-15T16:17:43.901Z" }, - { url = "https://files.pythonhosted.org/packages/e2/c1/6dba12fdf68b02a21ac411c9df19afa66bed2540f467150ca64d246b463d/numpy-2.3.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e1708fac43ef8b419c975926ce1eaf793b0c13b7356cfab6ab0dc34c0a02ac0f", size = 18652691, upload-time = "2025-10-15T16:17:46.247Z" }, - { url = "https://files.pythonhosted.org/packages/f8/73/f85056701dbbbb910c51d846c58d29fd46b30eecd2b6ba760fc8b8a1641b/numpy-2.3.4-cp314-cp314t-win32.whl", hash = "sha256:863e3b5f4d9915aaf1b8ec79ae560ad21f0b8d5e3adc31e73126491bb86dee1d", size = 6485782, upload-time = "2025-10-15T16:17:48.872Z" }, - { url = "https://files.pythonhosted.org/packages/17/90/28fa6f9865181cb817c2471ee65678afa8a7e2a1fb16141473d5fa6bacc3/numpy-2.3.4-cp314-cp314t-win_amd64.whl", hash = "sha256:962064de37b9aef801d33bc579690f8bfe6c5e70e29b61783f60bcba838a14d6", size = 13113301, upload-time = "2025-10-15T16:17:50.938Z" }, - { url = "https://files.pythonhosted.org/packages/54/23/08c002201a8e7e1f9afba93b97deceb813252d9cfd0d3351caed123dcf97/numpy-2.3.4-cp314-cp314t-win_arm64.whl", hash = "sha256:8b5a9a39c45d852b62693d9b3f3e0fe052541f804296ff401a72a1b60edafb29", size = 10547532, upload-time = "2025-10-15T16:17:53.48Z" }, - { url = "https://files.pythonhosted.org/packages/b1/b6/64898f51a86ec88ca1257a59c1d7fd077b60082a119affefcdf1dd0df8ca/numpy-2.3.4-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6e274603039f924c0fe5cb73438fa9246699c78a6df1bd3decef9ae592ae1c05", size = 21131552, upload-time = "2025-10-15T16:17:55.845Z" }, - { url = "https://files.pythonhosted.org/packages/ce/4c/f135dc6ebe2b6a3c77f4e4838fa63d350f85c99462012306ada1bd4bc460/numpy-2.3.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d149aee5c72176d9ddbc6803aef9c0f6d2ceeea7626574fc68518da5476fa346", size = 14377796, upload-time = "2025-10-15T16:17:58.308Z" }, - { url = "https://files.pythonhosted.org/packages/d0/a4/f33f9c23fcc13dd8412fc8614559b5b797e0aba9d8e01dfa8bae10c84004/numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:6d34ed9db9e6395bb6cd33286035f73a59b058169733a9db9f85e650b88df37e", size = 5306904, upload-time = "2025-10-15T16:18:00.596Z" }, - { url = "https://files.pythonhosted.org/packages/28/af/c44097f25f834360f9fb960fa082863e0bad14a42f36527b2a121abdec56/numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:fdebe771ca06bb8d6abce84e51dca9f7921fe6ad34a0c914541b063e9a68928b", size = 6819682, upload-time = "2025-10-15T16:18:02.32Z" }, - { url = "https://files.pythonhosted.org/packages/c5/8c/cd283b54c3c2b77e188f63e23039844f56b23bba1712318288c13fe86baf/numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e92defe6c08211eb77902253b14fe5b480ebc5112bc741fd5e9cd0608f847", size = 14422300, upload-time = "2025-10-15T16:18:04.271Z" }, - { url = "https://files.pythonhosted.org/packages/b0/f0/8404db5098d92446b3e3695cf41c6f0ecb703d701cb0b7566ee2177f2eee/numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13b9062e4f5c7ee5c7e5be96f29ba71bc5a37fed3d1d77c37390ae00724d296d", size = 16760806, upload-time = "2025-10-15T16:18:06.668Z" }, - { url = "https://files.pythonhosted.org/packages/95/8e/2844c3959ce9a63acc7c8e50881133d86666f0420bcde695e115ced0920f/numpy-2.3.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:81b3a59793523e552c4a96109dde028aa4448ae06ccac5a76ff6532a85558a7f", size = 12973130, upload-time = "2025-10-15T16:18:09.397Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/2b1bc18424f3ad9af577f6ce23600319968a70575bd7db31ce66731bbef9/numpy-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0cce2a669e3c8ba02ee563c7835f92c153cf02edff1ae05e1823f1dde21b16a5", size = 16944563, upload-time = "2026-01-10T06:42:14.615Z" }, + { url = "https://files.pythonhosted.org/packages/2c/57/26e5f97d075aef3794045a6ca9eada6a4ed70eb9a40e7a4a93f9ac80d704/numpy-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:899d2c18024984814ac7e83f8f49d8e8180e2fbe1b2e252f2e7f1d06bea92425", size = 12645658, upload-time = "2026-01-10T06:42:17.298Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ba/80fc0b1e3cb2fd5c6143f00f42eb67762aa043eaa05ca924ecc3222a7849/numpy-2.4.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:09aa8a87e45b55a1c2c205d42e2808849ece5c484b2aab11fecabec3841cafba", size = 5474132, upload-time = "2026-01-10T06:42:19.637Z" }, + { url = "https://files.pythonhosted.org/packages/40/ae/0a5b9a397f0e865ec171187c78d9b57e5588afc439a04ba9cab1ebb2c945/numpy-2.4.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:edee228f76ee2dab4579fad6f51f6a305de09d444280109e0f75df247ff21501", size = 6804159, upload-time = "2026-01-10T06:42:21.44Z" }, + { url = "https://files.pythonhosted.org/packages/86/9c/841c15e691c7085caa6fd162f063eff494099c8327aeccd509d1ab1e36ab/numpy-2.4.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a92f227dbcdc9e4c3e193add1a189a9909947d4f8504c576f4a732fd0b54240a", size = 14708058, upload-time = "2026-01-10T06:42:23.546Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9d/7862db06743f489e6a502a3b93136d73aea27d97b2cf91504f70a27501d6/numpy-2.4.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:538bf4ec353709c765ff75ae616c34d3c3dca1a68312727e8f2676ea644f8509", size = 16651501, upload-time = "2026-01-10T06:42:25.909Z" }, + { url = "https://files.pythonhosted.org/packages/a6/9c/6fc34ebcbd4015c6e5f0c0ce38264010ce8a546cb6beacb457b84a75dfc8/numpy-2.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ac08c63cb7779b85e9d5318e6c3518b424bc1f364ac4cb2c6136f12e5ff2dccc", size = 16492627, upload-time = "2026-01-10T06:42:28.938Z" }, + { url = "https://files.pythonhosted.org/packages/aa/63/2494a8597502dacda439f61b3c0db4da59928150e62be0e99395c3ad23c5/numpy-2.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f9c360ecef085e5841c539a9a12b883dff005fbd7ce46722f5e9cef52634d82", size = 18585052, upload-time = "2026-01-10T06:42:31.312Z" }, + { url = "https://files.pythonhosted.org/packages/6a/93/098e1162ae7522fc9b618d6272b77404c4656c72432ecee3abc029aa3de0/numpy-2.4.1-cp311-cp311-win32.whl", hash = "sha256:0f118ce6b972080ba0758c6087c3617b5ba243d806268623dc34216d69099ba0", size = 6236575, upload-time = "2026-01-10T06:42:33.872Z" }, + { url = "https://files.pythonhosted.org/packages/8c/de/f5e79650d23d9e12f38a7bc6b03ea0835b9575494f8ec94c11c6e773b1b1/numpy-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:18e14c4d09d55eef39a6ab5b08406e84bc6869c1e34eef45564804f90b7e0574", size = 12604479, upload-time = "2026-01-10T06:42:35.778Z" }, + { url = "https://files.pythonhosted.org/packages/dd/65/e1097a7047cff12ce3369bd003811516b20ba1078dbdec135e1cd7c16c56/numpy-2.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:6461de5113088b399d655d45c3897fa188766415d0f568f175ab071c8873bd73", size = 10578325, upload-time = "2026-01-10T06:42:38.518Z" }, + { url = "https://files.pythonhosted.org/packages/78/7f/ec53e32bf10c813604edf07a3682616bd931d026fcde7b6d13195dfb684a/numpy-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d3703409aac693fa82c0aee023a1ae06a6e9d065dba10f5e8e80f642f1e9d0a2", size = 16656888, upload-time = "2026-01-10T06:42:40.913Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e0/1f9585d7dae8f14864e948fd7fa86c6cb72dee2676ca2748e63b1c5acfe0/numpy-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7211b95ca365519d3596a1d8688a95874cc94219d417504d9ecb2df99fa7bfa8", size = 12373956, upload-time = "2026-01-10T06:42:43.091Z" }, + { url = "https://files.pythonhosted.org/packages/8e/43/9762e88909ff2326f5e7536fa8cb3c49fb03a7d92705f23e6e7f553d9cb3/numpy-2.4.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5adf01965456a664fc727ed69cc71848f28d063217c63e1a0e200a118d5eec9a", size = 5202567, upload-time = "2026-01-10T06:42:45.107Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ee/34b7930eb61e79feb4478800a4b95b46566969d837546aa7c034c742ef98/numpy-2.4.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26f0bcd9c79a00e339565b303badc74d3ea2bd6d52191eeca5f95936cad107d0", size = 6549459, upload-time = "2026-01-10T06:42:48.152Z" }, + { url = "https://files.pythonhosted.org/packages/79/e3/5f115fae982565771be994867c89bcd8d7208dbfe9469185497d70de5ddf/numpy-2.4.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0093e85df2960d7e4049664b26afc58b03236e967fb942354deef3208857a04c", size = 14404859, upload-time = "2026-01-10T06:42:49.947Z" }, + { url = "https://files.pythonhosted.org/packages/d9/7d/9c8a781c88933725445a859cac5d01b5871588a15969ee6aeb618ba99eee/numpy-2.4.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad270f438cbdd402c364980317fb6b117d9ec5e226fff5b4148dd9aa9fc6e02", size = 16371419, upload-time = "2026-01-10T06:42:52.409Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d2/8aa084818554543f17cf4162c42f162acbd3bb42688aefdba6628a859f77/numpy-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:297c72b1b98100c2e8f873d5d35fb551fce7040ade83d67dd51d38c8d42a2162", size = 16182131, upload-time = "2026-01-10T06:42:54.694Z" }, + { url = "https://files.pythonhosted.org/packages/60/db/0425216684297c58a8df35f3284ef56ec4a043e6d283f8a59c53562caf1b/numpy-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf6470d91d34bf669f61d515499859fa7a4c2f7c36434afb70e82df7217933f9", size = 18295342, upload-time = "2026-01-10T06:42:56.991Z" }, + { url = "https://files.pythonhosted.org/packages/31/4c/14cb9d86240bd8c386c881bafbe43f001284b7cce3bc01623ac9475da163/numpy-2.4.1-cp312-cp312-win32.whl", hash = "sha256:b6bcf39112e956594b3331316d90c90c90fb961e39696bda97b89462f5f3943f", size = 5959015, upload-time = "2026-01-10T06:42:59.631Z" }, + { url = "https://files.pythonhosted.org/packages/51/cf/52a703dbeb0c65807540d29699fef5fda073434ff61846a564d5c296420f/numpy-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:e1a27bb1b2dee45a2a53f5ca6ff2d1a7f135287883a1689e930d44d1ff296c87", size = 12310730, upload-time = "2026-01-10T06:43:01.627Z" }, + { url = "https://files.pythonhosted.org/packages/69/80/a828b2d0ade5e74a9fe0f4e0a17c30fdc26232ad2bc8c9f8b3197cf7cf18/numpy-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:0e6e8f9d9ecf95399982019c01223dc130542960a12edfa8edd1122dfa66a8a8", size = 10312166, upload-time = "2026-01-10T06:43:03.673Z" }, + { url = "https://files.pythonhosted.org/packages/04/68/732d4b7811c00775f3bd522a21e8dd5a23f77eb11acdeb663e4a4ebf0ef4/numpy-2.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d797454e37570cfd61143b73b8debd623c3c0952959adb817dd310a483d58a1b", size = 16652495, upload-time = "2026-01-10T06:43:06.283Z" }, + { url = "https://files.pythonhosted.org/packages/20/ca/857722353421a27f1465652b2c66813eeeccea9d76d5f7b74b99f298e60e/numpy-2.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c55962006156aeef1629b953fd359064aa47e4d82cfc8e67f0918f7da3344f", size = 12368657, upload-time = "2026-01-10T06:43:09.094Z" }, + { url = "https://files.pythonhosted.org/packages/81/0d/2377c917513449cc6240031a79d30eb9a163d32a91e79e0da47c43f2c0c8/numpy-2.4.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:71abbea030f2cfc3092a0ff9f8c8fdefdc5e0bf7d9d9c99663538bb0ecdac0b9", size = 5197256, upload-time = "2026-01-10T06:43:13.634Z" }, + { url = "https://files.pythonhosted.org/packages/17/39/569452228de3f5de9064ac75137082c6214be1f5c532016549a7923ab4b5/numpy-2.4.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5b55aa56165b17aaf15520beb9cbd33c9039810e0d9643dd4379e44294c7303e", size = 6545212, upload-time = "2026-01-10T06:43:15.661Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/77333f4d1e4dac4395385482557aeecf4826e6ff517e32ca48e1dafbe42a/numpy-2.4.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0faba4a331195bfa96f93dd9dfaa10b2c7aa8cda3a02b7fd635e588fe821bf5", size = 14402871, upload-time = "2026-01-10T06:43:17.324Z" }, + { url = "https://files.pythonhosted.org/packages/ba/87/d341e519956273b39d8d47969dd1eaa1af740615394fe67d06f1efa68773/numpy-2.4.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e3087f53e2b4428766b54932644d148613c5a595150533ae7f00dab2f319a8", size = 16359305, upload-time = "2026-01-10T06:43:19.376Z" }, + { url = "https://files.pythonhosted.org/packages/32/91/789132c6666288eaa20ae8066bb99eba1939362e8f1a534949a215246e97/numpy-2.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:49e792ec351315e16da54b543db06ca8a86985ab682602d90c60ef4ff4db2a9c", size = 16181909, upload-time = "2026-01-10T06:43:21.808Z" }, + { url = "https://files.pythonhosted.org/packages/cf/b8/090b8bd27b82a844bb22ff8fdf7935cb1980b48d6e439ae116f53cdc2143/numpy-2.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79e9e06c4c2379db47f3f6fc7a8652e7498251789bf8ff5bd43bf478ef314ca2", size = 18284380, upload-time = "2026-01-10T06:43:23.957Z" }, + { url = "https://files.pythonhosted.org/packages/67/78/722b62bd31842ff029412271556a1a27a98f45359dea78b1548a3a9996aa/numpy-2.4.1-cp313-cp313-win32.whl", hash = "sha256:3d1a100e48cb266090a031397863ff8a30050ceefd798f686ff92c67a486753d", size = 5957089, upload-time = "2026-01-10T06:43:27.535Z" }, + { url = "https://files.pythonhosted.org/packages/da/a6/cf32198b0b6e18d4fbfa9a21a992a7fca535b9bb2b0cdd217d4a3445b5ca/numpy-2.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:92a0e65272fd60bfa0d9278e0484c2f52fe03b97aedc02b357f33fe752c52ffb", size = 12307230, upload-time = "2026-01-10T06:43:29.298Z" }, + { url = "https://files.pythonhosted.org/packages/44/6c/534d692bfb7d0afe30611320c5fb713659dcb5104d7cc182aff2aea092f5/numpy-2.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:20d4649c773f66cc2fc36f663e091f57c3b7655f936a4c681b4250855d1da8f5", size = 10313125, upload-time = "2026-01-10T06:43:31.782Z" }, + { url = "https://files.pythonhosted.org/packages/da/a1/354583ac5c4caa566de6ddfbc42744409b515039e085fab6e0ff942e0df5/numpy-2.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f93bc6892fe7b0663e5ffa83b61aab510aacffd58c16e012bb9352d489d90cb7", size = 12496156, upload-time = "2026-01-10T06:43:34.237Z" }, + { url = "https://files.pythonhosted.org/packages/51/b0/42807c6e8cce58c00127b1dc24d365305189991f2a7917aa694a109c8d7d/numpy-2.4.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:178de8f87948163d98a4c9ab5bee4ce6519ca918926ec8df195af582de28544d", size = 5324663, upload-time = "2026-01-10T06:43:36.211Z" }, + { url = "https://files.pythonhosted.org/packages/fe/55/7a621694010d92375ed82f312b2f28017694ed784775269115323e37f5e2/numpy-2.4.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:98b35775e03ab7f868908b524fc0a84d38932d8daf7b7e1c3c3a1b6c7a2c9f15", size = 6645224, upload-time = "2026-01-10T06:43:37.884Z" }, + { url = "https://files.pythonhosted.org/packages/50/96/9fa8635ed9d7c847d87e30c834f7109fac5e88549d79ef3324ab5c20919f/numpy-2.4.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:941c2a93313d030f219f3a71fd3d91a728b82979a5e8034eb2e60d394a2b83f9", size = 14462352, upload-time = "2026-01-10T06:43:39.479Z" }, + { url = "https://files.pythonhosted.org/packages/03/d1/8cf62d8bb2062da4fb82dd5d49e47c923f9c0738032f054e0a75342faba7/numpy-2.4.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:529050522e983e00a6c1c6b67411083630de8b57f65e853d7b03d9281b8694d2", size = 16407279, upload-time = "2026-01-10T06:43:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/86/1c/95c86e17c6b0b31ce6ef219da00f71113b220bcb14938c8d9a05cee0ff53/numpy-2.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2302dc0224c1cbc49bb94f7064f3f923a971bfae45c33870dcbff63a2a550505", size = 16248316, upload-time = "2026-01-10T06:43:44.121Z" }, + { url = "https://files.pythonhosted.org/packages/30/b4/e7f5ff8697274c9d0fa82398b6a372a27e5cef069b37df6355ccb1f1db1a/numpy-2.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9171a42fcad32dcf3fa86f0a4faa5e9f8facefdb276f54b8b390d90447cff4e2", size = 18329884, upload-time = "2026-01-10T06:43:46.613Z" }, + { url = "https://files.pythonhosted.org/packages/37/a4/b073f3e9d77f9aec8debe8ca7f9f6a09e888ad1ba7488f0c3b36a94c03ac/numpy-2.4.1-cp313-cp313t-win32.whl", hash = "sha256:382ad67d99ef49024f11d1ce5dcb5ad8432446e4246a4b014418ba3a1175a1f4", size = 6081138, upload-time = "2026-01-10T06:43:48.854Z" }, + { url = "https://files.pythonhosted.org/packages/16/16/af42337b53844e67752a092481ab869c0523bc95c4e5c98e4dac4e9581ac/numpy-2.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:62fea415f83ad8fdb6c20840578e5fbaf5ddd65e0ec6c3c47eda0f69da172510", size = 12447478, upload-time = "2026-01-10T06:43:50.476Z" }, + { url = "https://files.pythonhosted.org/packages/6c/f8/fa85b2eac68ec631d0b631abc448552cb17d39afd17ec53dcbcc3537681a/numpy-2.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a7870e8c5fc11aef57d6fea4b4085e537a3a60ad2cdd14322ed531fdca68d261", size = 10382981, upload-time = "2026-01-10T06:43:52.575Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/ef08d25698e0e4b4efbad8d55251d20fe2a15f6d9aa7c9b30cd03c165e6f/numpy-2.4.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3869ea1ee1a1edc16c29bbe3a2f2a4e515cc3a44d43903ad41e0cacdbaf733dc", size = 16652046, upload-time = "2026-01-10T06:43:54.797Z" }, + { url = "https://files.pythonhosted.org/packages/8f/39/e378b3e3ca13477e5ac70293ec027c438d1927f18637e396fe90b1addd72/numpy-2.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e867df947d427cdd7a60e3e271729090b0f0df80f5f10ab7dd436f40811699c3", size = 12378858, upload-time = "2026-01-10T06:43:57.099Z" }, + { url = "https://files.pythonhosted.org/packages/c3/74/7ec6154f0006910ed1fdbb7591cf4432307033102b8a22041599935f8969/numpy-2.4.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:e3bd2cb07841166420d2fa7146c96ce00cb3410664cbc1a6be028e456c4ee220", size = 5207417, upload-time = "2026-01-10T06:43:59.037Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b7/053ac11820d84e42f8feea5cb81cc4fcd1091499b45b1ed8c7415b1bf831/numpy-2.4.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:f0a90aba7d521e6954670550e561a4cb925713bd944445dbe9e729b71f6cabee", size = 6542643, upload-time = "2026-01-10T06:44:01.852Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c4/2e7908915c0e32ca636b92e4e4a3bdec4cb1e7eb0f8aedf1ed3c68a0d8cd/numpy-2.4.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d558123217a83b2d1ba316b986e9248a1ed1971ad495963d555ccd75dcb1556", size = 14418963, upload-time = "2026-01-10T06:44:04.047Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c0/3ed5083d94e7ffd7c404e54619c088e11f2e1939a9544f5397f4adb1b8ba/numpy-2.4.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f44de05659b67d20499cbc96d49f2650769afcb398b79b324bb6e297bfe3844", size = 16363811, upload-time = "2026-01-10T06:44:06.207Z" }, + { url = "https://files.pythonhosted.org/packages/0e/68/42b66f1852bf525050a67315a4fb94586ab7e9eaa541b1bef530fab0c5dd/numpy-2.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:69e7419c9012c4aaf695109564e3387f1259f001b4326dfa55907b098af082d3", size = 16197643, upload-time = "2026-01-10T06:44:08.33Z" }, + { url = "https://files.pythonhosted.org/packages/d2/40/e8714fc933d85f82c6bfc7b998a0649ad9769a32f3494ba86598aaf18a48/numpy-2.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2ffd257026eb1b34352e749d7cc1678b5eeec3e329ad8c9965a797e08ccba205", size = 18289601, upload-time = "2026-01-10T06:44:10.841Z" }, + { url = "https://files.pythonhosted.org/packages/80/9a/0d44b468cad50315127e884802351723daca7cf1c98d102929468c81d439/numpy-2.4.1-cp314-cp314-win32.whl", hash = "sha256:727c6c3275ddefa0dc078524a85e064c057b4f4e71ca5ca29a19163c607be745", size = 6005722, upload-time = "2026-01-10T06:44:13.332Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bb/c6513edcce5a831810e2dddc0d3452ce84d208af92405a0c2e58fd8e7881/numpy-2.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:7d5d7999df434a038d75a748275cd6c0094b0ecdb0837342b332a82defc4dc4d", size = 12438590, upload-time = "2026-01-10T06:44:15.006Z" }, + { url = "https://files.pythonhosted.org/packages/e9/da/a598d5cb260780cf4d255102deba35c1d072dc028c4547832f45dd3323a8/numpy-2.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:ce9ce141a505053b3c7bce3216071f3bf5c182b8b28930f14cd24d43932cd2df", size = 10596180, upload-time = "2026-01-10T06:44:17.386Z" }, + { url = "https://files.pythonhosted.org/packages/de/bc/ea3f2c96fcb382311827231f911723aeff596364eb6e1b6d1d91128aa29b/numpy-2.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4e53170557d37ae404bf8d542ca5b7c629d6efa1117dac6a83e394142ea0a43f", size = 12498774, upload-time = "2026-01-10T06:44:19.467Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ab/ef9d939fe4a812648c7a712610b2ca6140b0853c5efea361301006c02ae5/numpy-2.4.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:a73044b752f5d34d4232f25f18160a1cc418ea4507f5f11e299d8ac36875f8a0", size = 5327274, upload-time = "2026-01-10T06:44:23.189Z" }, + { url = "https://files.pythonhosted.org/packages/bd/31/d381368e2a95c3b08b8cf7faac6004849e960f4a042d920337f71cef0cae/numpy-2.4.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:fb1461c99de4d040666ca0444057b06541e5642f800b71c56e6ea92d6a853a0c", size = 6648306, upload-time = "2026-01-10T06:44:25.012Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e5/0989b44ade47430be6323d05c23207636d67d7362a1796ccbccac6773dd2/numpy-2.4.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:423797bdab2eeefbe608d7c1ec7b2b4fd3c58d51460f1ee26c7500a1d9c9ee93", size = 14464653, upload-time = "2026-01-10T06:44:26.706Z" }, + { url = "https://files.pythonhosted.org/packages/10/a7/cfbe475c35371cae1358e61f20c5f075badc18c4797ab4354140e1d283cf/numpy-2.4.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:52b5f61bdb323b566b528899cc7db2ba5d1015bda7ea811a8bcf3c89c331fa42", size = 16405144, upload-time = "2026-01-10T06:44:29.378Z" }, + { url = "https://files.pythonhosted.org/packages/f8/a3/0c63fe66b534888fa5177cc7cef061541064dbe2b4b60dcc60ffaf0d2157/numpy-2.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42d7dd5fa36d16d52a84f821eb96031836fd405ee6955dd732f2023724d0aa01", size = 16247425, upload-time = "2026-01-10T06:44:31.721Z" }, + { url = "https://files.pythonhosted.org/packages/6b/2b/55d980cfa2c93bd40ff4c290bf824d792bd41d2fe3487b07707559071760/numpy-2.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e7b6b5e28bbd47b7532698e5db2fe1db693d84b58c254e4389d99a27bb9b8f6b", size = 18330053, upload-time = "2026-01-10T06:44:34.617Z" }, + { url = "https://files.pythonhosted.org/packages/23/12/8b5fc6b9c487a09a7957188e0943c9ff08432c65e34567cabc1623b03a51/numpy-2.4.1-cp314-cp314t-win32.whl", hash = "sha256:5de60946f14ebe15e713a6f22850c2372fa72f4ff9a432ab44aa90edcadaa65a", size = 6152482, upload-time = "2026-01-10T06:44:36.798Z" }, + { url = "https://files.pythonhosted.org/packages/00/a5/9f8ca5856b8940492fc24fbe13c1bc34d65ddf4079097cf9e53164d094e1/numpy-2.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:8f085da926c0d491ffff3096f91078cc97ea67e7e6b65e490bc8dcda65663be2", size = 12627117, upload-time = "2026-01-10T06:44:38.828Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0d/eca3d962f9eef265f01a8e0d20085c6dd1f443cbffc11b6dede81fd82356/numpy-2.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:6436cffb4f2bf26c974344439439c95e152c9a527013f26b3577be6c2ca64295", size = 10667121, upload-time = "2026-01-10T06:44:41.644Z" }, + { url = "https://files.pythonhosted.org/packages/1e/48/d86f97919e79314a1cdee4c832178763e6e98e623e123d0bada19e92c15a/numpy-2.4.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8ad35f20be147a204e28b6a0575fbf3540c5e5f802634d4258d55b1ff5facce1", size = 16822202, upload-time = "2026-01-10T06:44:43.738Z" }, + { url = "https://files.pythonhosted.org/packages/51/e9/1e62a7f77e0f37dcfb0ad6a9744e65df00242b6ea37dfafb55debcbf5b55/numpy-2.4.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8097529164c0f3e32bb89412a0905d9100bf434d9692d9fc275e18dcf53c9344", size = 12569985, upload-time = "2026-01-10T06:44:45.945Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7e/914d54f0c801342306fdcdce3e994a56476f1b818c46c47fc21ae968088c/numpy-2.4.1-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:ea66d2b41ca4a1630aae5507ee0a71647d3124d1741980138aa8f28f44dac36e", size = 5398484, upload-time = "2026-01-10T06:44:48.012Z" }, + { url = "https://files.pythonhosted.org/packages/1c/d8/9570b68584e293a33474e7b5a77ca404f1dcc655e40050a600dee81d27fb/numpy-2.4.1-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d3f8f0df9f4b8be57b3bf74a1d087fec68f927a2fab68231fdb442bf2c12e426", size = 6713216, upload-time = "2026-01-10T06:44:49.725Z" }, + { url = "https://files.pythonhosted.org/packages/33/9b/9dd6e2db8d49eb24f86acaaa5258e5f4c8ed38209a4ee9de2d1a0ca25045/numpy-2.4.1-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2023ef86243690c2791fd6353e5b4848eedaa88ca8a2d129f462049f6d484696", size = 14538937, upload-time = "2026-01-10T06:44:51.498Z" }, + { url = "https://files.pythonhosted.org/packages/53/87/d5bd995b0f798a37105b876350d346eea5838bd8f77ea3d7a48392f3812b/numpy-2.4.1-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8361ea4220d763e54cff2fbe7d8c93526b744f7cd9ddab47afeff7e14e8503be", size = 16479830, upload-time = "2026-01-10T06:44:53.931Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c7/b801bf98514b6ae6475e941ac05c58e6411dd863ea92916bfd6d510b08c1/numpy-2.4.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:4f1b68ff47680c2925f8063402a693ede215f0257f02596b1318ecdfb1d79e33", size = 12492579, upload-time = "2026-01-10T06:44:57.094Z" }, ] [[package]] @@ -718,7 +790,6 @@ dependencies = [ [package.dev-dependencies] dev = [ { name = "jsonpath-ng" }, - { name = "pytest-subtests" }, { name = "types-pyyaml" }, { name = "types-shapely" }, ] @@ -732,7 +803,6 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ { name = "jsonpath-ng", specifier = ">=1.7.0" }, - { name = "pytest-subtests", specifier = ">=0.14.2" }, { name = "types-pyyaml", specifier = ">=6.0.12.20250516" }, { name = "types-shapely", specifier = ">=2.1.0.20250710" }, ] @@ -841,34 +911,35 @@ dev = [ [[package]] name = "packaging" -version = "25.0" +version = "26.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, ] [[package]] name = "pathspec" -version = "0.12.1" +version = "1.0.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, ] [[package]] name = "pdoc" -version = "15.0.4" +version = "16.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jinja2" }, + { name = "markdown2" }, { name = "markupsafe" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/91/5c/e94c1ab4aa2f8a9cc29d81e1c513c6216946cb3a90957ef7115b12e9363d/pdoc-15.0.4.tar.gz", hash = "sha256:cf9680f10f5b4863381f44ef084b1903f8f356acb0d4cc6b64576ba9fb712c82", size = 155678, upload-time = "2025-06-04T17:05:49.639Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/fe/ab3f34a5fb08c6b698439a2c2643caf8fef0d61a86dd3fdcd5501c670ab8/pdoc-16.0.0.tar.gz", hash = "sha256:fdadc40cc717ec53919e3cd720390d4e3bcd40405cb51c4918c119447f913514", size = 111890, upload-time = "2025-10-27T16:02:16.345Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/2c/87250ac73ca8730b2c4e0185b573585f0b42e09562132e6c29d00b3a9bb9/pdoc-15.0.4-py3-none-any.whl", hash = "sha256:f9028e85e7bb8475b054e69bde1f6d26fc4693d25d9fa1b1ce9009bec7f7a5c4", size = 145978, upload-time = "2025-06-04T17:05:48.473Z" }, + { url = "https://files.pythonhosted.org/packages/16/a1/56a17b7f9e18c2bb8df73f3833345d97083b344708b97bab148fdd7e0b82/pdoc-16.0.0-py3-none-any.whl", hash = "sha256:070b51de2743b9b1a4e0ab193a06c9e6c12cf4151cf9137656eebb16e8556628", size = 100014, upload-time = "2025-10-27T16:02:15.007Z" }, ] [[package]] @@ -891,7 +962,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.12.2" +version = "2.12.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -899,9 +970,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8d/35/d319ed522433215526689bad428a94058b6dd12190ce7ddd78618ac14b28/pydantic-2.12.2.tar.gz", hash = "sha256:7b8fa15b831a4bbde9d5b84028641ac3080a4ca2cbd4a621a661687e741624fd", size = 816358, upload-time = "2025-10-14T15:02:21.842Z" } +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/98/468cb649f208a6f1279448e6e5247b37ae79cf5e4041186f1e2ef3d16345/pydantic-2.12.2-py3-none-any.whl", hash = "sha256:25ff718ee909acd82f1ff9b1a4acfd781bb23ab3739adaa7144f19a6a4e231ae", size = 460628, upload-time = "2025-10-14T15:02:19.623Z" }, + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, ] [package.optional-dependencies] @@ -911,116 +982,120 @@ email = [ [[package]] name = "pydantic-core" -version = "2.41.4" +version = "2.41.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" }, - { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" }, - { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" }, - { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" }, - { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" }, - { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" }, - { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" }, - { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" }, - { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" }, - { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" }, - { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" }, - { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" }, - { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" }, - { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" }, - { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" }, - { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" }, - { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" }, - { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" }, - { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" }, - { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" }, - { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" }, - { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" }, - { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" }, - { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" }, - { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" }, - { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" }, - { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" }, - { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, - { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, - { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, - { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, - { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, - { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, - { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, - { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, - { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, - { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, - { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, - { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, - { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, - { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, - { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, - { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, - { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, - { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, - { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, - { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, - { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, - { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, - { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, - { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, - { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, - { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, - { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, - { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, - { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, - { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, - { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, - { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, - { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, - { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, - { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, - { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, - { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, - { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, - { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, - { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, - { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, - { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, - { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, - { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, - { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, - { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, - { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, - { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, - { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, - { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, - { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, - { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, - { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, - { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, - { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" }, - { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, - { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, - { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, - { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" }, - { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" }, - { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" }, - { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" }, - { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" }, - { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" }, - { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" }, - { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" }, - { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" }, - { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" }, - { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" }, - { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" }, - { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" }, - { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" }, - { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" }, - { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" }, + { url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298, upload-time = "2025-11-04T13:39:04.116Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475, upload-time = "2025-11-04T13:39:06.055Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815, upload-time = "2025-11-04T13:39:10.41Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567, upload-time = "2025-11-04T13:39:12.244Z" }, + { url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442, upload-time = "2025-11-04T13:39:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956, upload-time = "2025-11-04T13:39:15.889Z" }, + { url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253, upload-time = "2025-11-04T13:39:17.403Z" }, + { url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050, upload-time = "2025-11-04T13:39:19.351Z" }, + { url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178, upload-time = "2025-11-04T13:39:21Z" }, + { url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833, upload-time = "2025-11-04T13:39:22.606Z" }, + { url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156, upload-time = "2025-11-04T13:39:25.843Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378, upload-time = "2025-11-04T13:39:27.92Z" }, + { url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622, upload-time = "2025-11-04T13:39:29.848Z" }, + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351, upload-time = "2025-11-04T13:43:02.058Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363, upload-time = "2025-11-04T13:43:05.159Z" }, + { url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615, upload-time = "2025-11-04T13:43:08.116Z" }, + { url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369, upload-time = "2025-11-04T13:43:12.49Z" }, + { url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218, upload-time = "2025-11-04T13:43:15.431Z" }, + { url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951, upload-time = "2025-11-04T13:43:18.062Z" }, + { url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428, upload-time = "2025-11-04T13:43:20.679Z" }, + { url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009, upload-time = "2025-11-04T13:43:23.286Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, ] [[package]] @@ -1046,7 +1121,7 @@ wheels = [ [[package]] name = "pytest" -version = "8.4.2" +version = "9.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -1057,9 +1132,9 @@ dependencies = [ { name = "pygments" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, ] [[package]] @@ -1076,19 +1151,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, ] -[[package]] -name = "pytest-subtests" -version = "0.14.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/59/30/6ec8dfc678ddfd1c294212bbd7088c52d3f7fbf3f05e6d8a440c13b9741a/pytest_subtests-0.14.2.tar.gz", hash = "sha256:7154a8665fd528ee70a76d00216a44d139dc3c9c83521a0f779f7b0ad4f800de", size = 18083, upload-time = "2025-06-13T10:50:01.636Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/d4/9bf12e59fb882b0cf4f993871e1adbee094802224c429b00861acee1a169/pytest_subtests-0.14.2-py3-none-any.whl", hash = "sha256:8da0787c994ab372a13a0ad7d390533ad2e4385cac167b3ac501258c885d0b66", size = 9115, upload-time = "2025-06-13T10:50:00.543Z" }, -] - [[package]] name = "pyyaml" version = "6.0.3" @@ -1155,41 +1217,41 @@ wheels = [ [[package]] name = "rich" -version = "14.1.0" +version = "14.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/84/4831f881aa6ff3c976f6d6809b58cdfa350593ffc0dc3c58f5f6586780fb/rich-14.3.1.tar.gz", hash = "sha256:b8c5f568a3a749f9290ec6bddedf835cec33696bfc1e48bcfecb276c7386e4b8", size = 230125, upload-time = "2026-01-24T21:40:44.847Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, + { url = "https://files.pythonhosted.org/packages/87/2a/a1810c8627b9ec8c57ec5ec325d306701ae7be50235e8fd81266e002a3cc/rich-14.3.1-py3-none-any.whl", hash = "sha256:da750b1aebbff0b372557426fb3f35ba56de8ef954b3190315eb64076d6fb54e", size = 309952, upload-time = "2026-01-24T21:40:42.969Z" }, ] [[package]] name = "ruff" -version = "0.14.0" +version = "0.14.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/b9/9bd84453ed6dd04688de9b3f3a4146a1698e8faae2ceeccce4e14c67ae17/ruff-0.14.0.tar.gz", hash = "sha256:62ec8969b7510f77945df916de15da55311fade8d6050995ff7f680afe582c57", size = 5452071, upload-time = "2025-10-07T18:21:55.763Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/06/f71e3a86b2df0dfa2d2f72195941cd09b44f87711cb7fa5193732cb9a5fc/ruff-0.14.14.tar.gz", hash = "sha256:2d0f819c9a90205f3a867dbbd0be083bee9912e170fd7d9704cc8ae45824896b", size = 4515732, upload-time = "2026-01-22T22:30:17.527Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/4e/79d463a5f80654e93fa653ebfb98e0becc3f0e7cf6219c9ddedf1e197072/ruff-0.14.0-py3-none-linux_armv6l.whl", hash = "sha256:58e15bffa7054299becf4bab8a1187062c6f8cafbe9f6e39e0d5aface455d6b3", size = 12494532, upload-time = "2025-10-07T18:21:00.373Z" }, - { url = "https://files.pythonhosted.org/packages/ee/40/e2392f445ed8e02aa6105d49db4bfff01957379064c30f4811c3bf38aece/ruff-0.14.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:838d1b065f4df676b7c9957992f2304e41ead7a50a568185efd404297d5701e8", size = 13160768, upload-time = "2025-10-07T18:21:04.73Z" }, - { url = "https://files.pythonhosted.org/packages/75/da/2a656ea7c6b9bd14c7209918268dd40e1e6cea65f4bb9880eaaa43b055cd/ruff-0.14.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:703799d059ba50f745605b04638fa7e9682cc3da084b2092feee63500ff3d9b8", size = 12363376, upload-time = "2025-10-07T18:21:07.833Z" }, - { url = "https://files.pythonhosted.org/packages/42/e2/1ffef5a1875add82416ff388fcb7ea8b22a53be67a638487937aea81af27/ruff-0.14.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ba9a8925e90f861502f7d974cc60e18ca29c72bb0ee8bfeabb6ade35a3abde7", size = 12608055, upload-time = "2025-10-07T18:21:10.72Z" }, - { url = "https://files.pythonhosted.org/packages/4a/32/986725199d7cee510d9f1dfdf95bf1efc5fa9dd714d0d85c1fb1f6be3bc3/ruff-0.14.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e41f785498bd200ffc276eb9e1570c019c1d907b07cfb081092c8ad51975bbe7", size = 12318544, upload-time = "2025-10-07T18:21:13.741Z" }, - { url = "https://files.pythonhosted.org/packages/9a/ed/4969cefd53315164c94eaf4da7cfba1f267dc275b0abdd593d11c90829a3/ruff-0.14.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30a58c087aef4584c193aebf2700f0fbcfc1e77b89c7385e3139956fa90434e2", size = 14001280, upload-time = "2025-10-07T18:21:16.411Z" }, - { url = "https://files.pythonhosted.org/packages/ab/ad/96c1fc9f8854c37681c9613d825925c7f24ca1acfc62a4eb3896b50bacd2/ruff-0.14.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f8d07350bc7af0a5ce8812b7d5c1a7293cf02476752f23fdfc500d24b79b783c", size = 15027286, upload-time = "2025-10-07T18:21:19.577Z" }, - { url = "https://files.pythonhosted.org/packages/b3/00/1426978f97df4fe331074baf69615f579dc4e7c37bb4c6f57c2aad80c87f/ruff-0.14.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eec3bbbf3a7d5482b5c1f42d5fc972774d71d107d447919fca620b0be3e3b75e", size = 14451506, upload-time = "2025-10-07T18:21:22.779Z" }, - { url = "https://files.pythonhosted.org/packages/58/d5/9c1cea6e493c0cf0647674cca26b579ea9d2a213b74b5c195fbeb9678e15/ruff-0.14.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16b68e183a0e28e5c176d51004aaa40559e8f90065a10a559176713fcf435206", size = 13437384, upload-time = "2025-10-07T18:21:25.758Z" }, - { url = "https://files.pythonhosted.org/packages/29/b4/4cd6a4331e999fc05d9d77729c95503f99eae3ba1160469f2b64866964e3/ruff-0.14.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb732d17db2e945cfcbbc52af0143eda1da36ca8ae25083dd4f66f1542fdf82e", size = 13447976, upload-time = "2025-10-07T18:21:28.83Z" }, - { url = "https://files.pythonhosted.org/packages/3b/c0/ac42f546d07e4f49f62332576cb845d45c67cf5610d1851254e341d563b6/ruff-0.14.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:c958f66ab884b7873e72df38dcabee03d556a8f2ee1b8538ee1c2bbd619883dd", size = 13682850, upload-time = "2025-10-07T18:21:31.842Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c4/4b0c9bcadd45b4c29fe1af9c5d1dc0ca87b4021665dfbe1c4688d407aa20/ruff-0.14.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7eb0499a2e01f6e0c285afc5bac43ab380cbfc17cd43a2e1dd10ec97d6f2c42d", size = 12449825, upload-time = "2025-10-07T18:21:35.074Z" }, - { url = "https://files.pythonhosted.org/packages/4b/a8/e2e76288e6c16540fa820d148d83e55f15e994d852485f221b9524514730/ruff-0.14.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c63b2d99fafa05efca0ab198fd48fa6030d57e4423df3f18e03aa62518c565f", size = 12272599, upload-time = "2025-10-07T18:21:38.08Z" }, - { url = "https://files.pythonhosted.org/packages/18/14/e2815d8eff847391af632b22422b8207704222ff575dec8d044f9ab779b2/ruff-0.14.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:668fce701b7a222f3f5327f86909db2bbe99c30877c8001ff934c5413812ac02", size = 13193828, upload-time = "2025-10-07T18:21:41.216Z" }, - { url = "https://files.pythonhosted.org/packages/44/c6/61ccc2987cf0aecc588ff8f3212dea64840770e60d78f5606cd7dc34de32/ruff-0.14.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a86bf575e05cb68dcb34e4c7dfe1064d44d3f0c04bbc0491949092192b515296", size = 13628617, upload-time = "2025-10-07T18:21:44.04Z" }, - { url = "https://files.pythonhosted.org/packages/73/e6/03b882225a1b0627e75339b420883dc3c90707a8917d2284abef7a58d317/ruff-0.14.0-py3-none-win32.whl", hash = "sha256:7450a243d7125d1c032cb4b93d9625dea46c8c42b4f06c6b709baac168e10543", size = 12367872, upload-time = "2025-10-07T18:21:46.67Z" }, - { url = "https://files.pythonhosted.org/packages/41/77/56cf9cf01ea0bfcc662de72540812e5ba8e9563f33ef3d37ab2174892c47/ruff-0.14.0-py3-none-win_amd64.whl", hash = "sha256:ea95da28cd874c4d9c922b39381cbd69cb7e7b49c21b8152b014bd4f52acddc2", size = 13464628, upload-time = "2025-10-07T18:21:50.318Z" }, - { url = "https://files.pythonhosted.org/packages/c6/2a/65880dfd0e13f7f13a775998f34703674a4554906167dce02daf7865b954/ruff-0.14.0-py3-none-win_arm64.whl", hash = "sha256:f42c9495f5c13ff841b1da4cb3c2a42075409592825dada7c5885c2c844ac730", size = 12565142, upload-time = "2025-10-07T18:21:53.577Z" }, + { url = "https://files.pythonhosted.org/packages/d2/89/20a12e97bc6b9f9f68343952da08a8099c57237aef953a56b82711d55edd/ruff-0.14.14-py3-none-linux_armv6l.whl", hash = "sha256:7cfe36b56e8489dee8fbc777c61959f60ec0f1f11817e8f2415f429552846aed", size = 10467650, upload-time = "2026-01-22T22:30:08.578Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b1/c5de3fd2d5a831fcae21beda5e3589c0ba67eec8202e992388e4b17a6040/ruff-0.14.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6006a0082336e7920b9573ef8a7f52eec837add1265cc74e04ea8a4368cd704c", size = 10883245, upload-time = "2026-01-22T22:30:04.155Z" }, + { url = "https://files.pythonhosted.org/packages/b8/7c/3c1db59a10e7490f8f6f8559d1db8636cbb13dccebf18686f4e3c9d7c772/ruff-0.14.14-py3-none-macosx_11_0_arm64.whl", hash = "sha256:026c1d25996818f0bf498636686199d9bd0d9d6341c9c2c3b62e2a0198b758de", size = 10231273, upload-time = "2026-01-22T22:30:34.642Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6e/5e0e0d9674be0f8581d1f5e0f0a04761203affce3232c1a1189d0e3b4dad/ruff-0.14.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f666445819d31210b71e0a6d1c01e24447a20b85458eea25a25fe8142210ae0e", size = 10585753, upload-time = "2026-01-22T22:30:31.781Z" }, + { url = "https://files.pythonhosted.org/packages/23/09/754ab09f46ff1884d422dc26d59ba18b4e5d355be147721bb2518aa2a014/ruff-0.14.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c0f18b922c6d2ff9a5e6c3ee16259adc513ca775bcf82c67ebab7cbd9da5bc8", size = 10286052, upload-time = "2026-01-22T22:30:24.827Z" }, + { url = "https://files.pythonhosted.org/packages/c8/cc/e71f88dd2a12afb5f50733851729d6b571a7c3a35bfdb16c3035132675a0/ruff-0.14.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1629e67489c2dea43e8658c3dba659edbfd87361624b4040d1df04c9740ae906", size = 11043637, upload-time = "2026-01-22T22:30:13.239Z" }, + { url = "https://files.pythonhosted.org/packages/67/b2/397245026352494497dac935d7f00f1468c03a23a0c5db6ad8fc49ca3fb2/ruff-0.14.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:27493a2131ea0f899057d49d303e4292b2cae2bb57253c1ed1f256fbcd1da480", size = 12194761, upload-time = "2026-01-22T22:30:22.542Z" }, + { url = "https://files.pythonhosted.org/packages/5b/06/06ef271459f778323112c51b7587ce85230785cd64e91772034ddb88f200/ruff-0.14.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ff589aab3f5b539e35db38425da31a57521efd1e4ad1ae08fc34dbe30bd7df", size = 12005701, upload-time = "2026-01-22T22:30:20.499Z" }, + { url = "https://files.pythonhosted.org/packages/41/d6/99364514541cf811ccc5ac44362f88df66373e9fec1b9d1c4cc830593fe7/ruff-0.14.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc12d74eef0f29f51775f5b755913eb523546b88e2d733e1d701fe65144e89b", size = 11282455, upload-time = "2026-01-22T22:29:59.679Z" }, + { url = "https://files.pythonhosted.org/packages/ca/71/37daa46f89475f8582b7762ecd2722492df26421714a33e72ccc9a84d7a5/ruff-0.14.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb8481604b7a9e75eff53772496201690ce2687067e038b3cc31aaf16aa0b974", size = 11215882, upload-time = "2026-01-22T22:29:57.032Z" }, + { url = "https://files.pythonhosted.org/packages/2c/10/a31f86169ec91c0705e618443ee74ede0bdd94da0a57b28e72db68b2dbac/ruff-0.14.14-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:14649acb1cf7b5d2d283ebd2f58d56b75836ed8c6f329664fa91cdea19e76e66", size = 11180549, upload-time = "2026-01-22T22:30:27.175Z" }, + { url = "https://files.pythonhosted.org/packages/fd/1e/c723f20536b5163adf79bdd10c5f093414293cdf567eed9bdb7b83940f3f/ruff-0.14.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8058d2145566510790eab4e2fad186002e288dec5e0d343a92fe7b0bc1b3e13", size = 10543416, upload-time = "2026-01-22T22:30:01.964Z" }, + { url = "https://files.pythonhosted.org/packages/3e/34/8a84cea7e42c2d94ba5bde1d7a4fae164d6318f13f933d92da6d7c2041ff/ruff-0.14.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e651e977a79e4c758eb807f0481d673a67ffe53cfa92209781dfa3a996cf8412", size = 10285491, upload-time = "2026-01-22T22:30:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/55/ef/b7c5ea0be82518906c978e365e56a77f8de7678c8bb6651ccfbdc178c29f/ruff-0.14.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cc8b22da8d9d6fdd844a68ae937e2a0adf9b16514e9a97cc60355e2d4b219fc3", size = 10733525, upload-time = "2026-01-22T22:30:06.499Z" }, + { url = "https://files.pythonhosted.org/packages/6a/5b/aaf1dfbcc53a2811f6cc0a1759de24e4b03e02ba8762daabd9b6bd8c59e3/ruff-0.14.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:16bc890fb4cc9781bb05beb5ab4cd51be9e7cb376bf1dd3580512b24eb3fda2b", size = 11315626, upload-time = "2026-01-22T22:30:36.848Z" }, + { url = "https://files.pythonhosted.org/packages/2c/aa/9f89c719c467dfaf8ad799b9bae0df494513fb21d31a6059cb5870e57e74/ruff-0.14.14-py3-none-win32.whl", hash = "sha256:b530c191970b143375b6a68e6f743800b2b786bbcf03a7965b06c4bf04568167", size = 10502442, upload-time = "2026-01-22T22:30:38.93Z" }, + { url = "https://files.pythonhosted.org/packages/87/44/90fa543014c45560cae1fffc63ea059fb3575ee6e1cb654562197e5d16fb/ruff-0.14.14-py3-none-win_amd64.whl", hash = "sha256:3dde1435e6b6fe5b66506c1dff67a421d0b7f6488d466f651c07f4cab3bf20fd", size = 11630486, upload-time = "2026-01-22T22:30:10.852Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6a/40fee331a52339926a92e17ae748827270b288a35ef4a15c9c8f2ec54715/ruff-0.14.14-py3-none-win_arm64.whl", hash = "sha256:56e6981a98b13a32236a72a8da421d7839221fa308b223b9283312312e5ac76c", size = 10920448, upload-time = "2026-01-22T22:30:15.417Z" }, ] [[package]] @@ -1198,7 +1260,7 @@ version = "2.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" } wheels = [ @@ -1271,51 +1333,56 @@ wheels = [ [[package]] name = "tomli" -version = "2.3.0" +version = "2.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, - { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, - { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, - { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, - { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, - { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, - { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, - { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, - { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, - { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, - { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, - { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, - { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, - { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, - { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, - { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, - { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, - { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, - { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, - { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, - { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, - { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, - { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, - { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, - { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, - { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, - { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, - { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, - { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, - { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, - { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, - { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, - { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, - { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, - { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, - { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, - { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, - { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, - { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" }, + { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" }, + { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" }, + { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" }, + { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" }, + { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" }, + { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" }, + { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" }, + { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, ] [[package]] @@ -1333,7 +1400,7 @@ version = "2.1.0.20250917" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.3.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fa/19/7f28b10994433d43b9caa66f3b9bd6a0a9192b7ce8b5a7fc41534e54b821/types_shapely-2.1.0.20250917.tar.gz", hash = "sha256:5c56670742105aebe40c16414390d35fcaa55d6f774d328c1a18273ab0e2134a", size = 26363, upload-time = "2025-09-17T02:47:44.604Z" } wheels = [ From 6f7cb5cb16d7fa239c266ecff12b2e86fd4d2e86 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 10 Feb 2026 15:53:13 -0800 Subject: [PATCH 05/38] Quiet pytest output for dev workflow - Add -q, --tb=short to `make test` for compact output - Set verbosity_subtests=0 to suppress per-subtest progress characters (the u/,/- markers from pytest's built-in subtests support) --- Makefile | 2 +- pyproject.toml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 6788c8850..0ae1d38e6 100644 --- a/Makefile +++ b/Makefile @@ -16,7 +16,7 @@ test-all: uv-sync @uv run pytest -W error packages/ test: uv-sync - @uv run pytest -W error packages/ -x + @uv run pytest -W error packages/ -x -q --tb=short coverage: uv-sync @uv run pytest packages/ --cov overture.schema --cov-report=term --cov-report=html && open htmlcov/index.html diff --git a/pyproject.toml b/pyproject.toml index 92201b713..277e2bb89 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,7 @@ dev = [ ] [tool.pytest.ini_options] +verbosity_subtests = 0 pythonpath = [ "packages/overture-schema-addresses-theme/tests", "packages/overture-schema-annex/tests", From abb24f5c29b31ca1cd4137a2fa089a912bc719b9 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 11 Feb 2026 15:47:17 -0800 Subject: [PATCH 06/38] Attach docstrings to NewTypes at runtime Bare triple-quoted strings after NewType assignments are expression statements that Python never attaches to the NewType object, leaving __doc__ as None. Convert each to an explicit __doc__ assignment so codegen and introspection tools can read them at runtime. Same pattern DocumentedEnum uses for enum member docs. --- .../src/overture/schema/core/names.py | 2 +- .../schema/core/scoping/opening_hours.py | 2 +- .../schema/system/primitive/__init__.py | 18 ++++++++--------- .../src/overture/schema/system/ref/id.py | 2 +- .../src/overture/schema/system/string.py | 20 +++++++++---------- 5 files changed, 22 insertions(+), 22 deletions(-) diff --git a/packages/overture-schema-core/src/overture/schema/core/names.py b/packages/overture-schema-core/src/overture/schema/core/names.py index e968b24a7..aed77d985 100644 --- a/packages/overture-schema-core/src/overture/schema/core/names.py +++ b/packages/overture-schema-core/src/overture/schema/core/names.py @@ -110,7 +110,7 @@ Field(json_schema_extra={"additionalProperties": False}), ], ) -"""A mapping from language to the most commonly used or recognized name in that language.""" +CommonNames.__doc__ = """A mapping from language to the most commonly used or recognized name in that language.""" class NameVariant(str, DocumentedEnum): diff --git a/packages/overture-schema-core/src/overture/schema/core/scoping/opening_hours.py b/packages/overture-schema-core/src/overture/schema/core/scoping/opening_hours.py index a8efc8c16..8f8acbe6a 100644 --- a/packages/overture-schema-core/src/overture/schema/core/scoping/opening_hours.py +++ b/packages/overture-schema-core/src/overture/schema/core/scoping/opening_hours.py @@ -15,7 +15,7 @@ ), ], ) -""" +OpeningHours.__doc__ = """ Time span or time spans during which something is open or active, specified in the OpenStreetMap opening hours specification: https://wiki.openstreetmap.org/wiki/Key:opening_hours/specification. """ diff --git a/packages/overture-schema-system/src/overture/schema/system/primitive/__init__.py b/packages/overture-schema-system/src/overture/schema/system/primitive/__init__.py index 96bfd0250..2b27ef89e 100644 --- a/packages/overture-schema-system/src/overture/schema/system/primitive/__init__.py +++ b/packages/overture-schema-system/src/overture/schema/system/primitive/__init__.py @@ -24,7 +24,7 @@ ) uint8 = NewType("uint8", Annotated[int, Field(ge=0, le=255)]) # type: ignore [type-arg] -""" +uint8.__doc__ = """ Portable 8-bit unsigned integer. This is an `int` at runtime, but using `uint8` for Pydantic model fields instead of `int` makes them @@ -32,7 +32,7 @@ """ uint16 = NewType("uint16", Annotated[int, Field(ge=0, le=65535)]) # type: ignore[type-arg] -""" +uint16.__doc__ = """ Portable 16-bit unsigned integer. This is an `int` at runtime, but using `uint16` for Pydantic model fields instead of `int` makes @@ -40,7 +40,7 @@ """ uint32 = NewType("uint32", Annotated[int, Field(ge=0, le=4294967295)]) # type: ignore[type-arg] -""" +uint32.__doc__ = """ Portable 32-bit unsigned integer. This is an `int` at runtime, but using `uint32` for Pydantic model fields instead of `int` makes @@ -48,7 +48,7 @@ """ int8 = NewType("int8", Annotated[int, Field(ge=-128, le=127)]) # type: ignore[type-arg] -""" +int8.__doc__ = """ Portable 8-bit signed integer. This is an `int` at runtime, but using `int8` for Pydantic model fields instead of `int` makes them @@ -56,7 +56,7 @@ """ int16 = NewType("int16", Annotated[int, Field(ge=-32768, le=32767)]) # type: ignore[type-arg] -""" +int16.__doc__ = """ Portable 16-bit signed integer. This is an `int` at runtime, but using `int16` for Pydantic model fields instead of `int` makes them @@ -64,7 +64,7 @@ """ int32 = NewType("int32", Annotated[int, Field(ge=-(2**31), le=2**31 - 1)]) # type: ignore[type-arg] -""" +int32.__doc__ = """ Portable 32-bit signed integer. This is an `int` at runtime, but using `int32` for Pydantic model fields instead of `int` makes them @@ -72,7 +72,7 @@ """ int64 = NewType("int64", Annotated[int, Field(ge=-(2**63), le=2**63 - 1)]) # type: ignore[type-arg] -""" +int64.__doc__ = """ Portable 64-bit signed integer. This is an `int` at runtime, but using `int64` for Pydantic model fields instead of `int` makes them @@ -80,7 +80,7 @@ """ float32 = NewType("float32", float) # type: ignore[type-arg] -""" +float32.__doc__ = """ Portable IEEE 32-bit floating point number. This is a `float` at runtime, but using `float32` for Pydantic model fields instead of `float` makes @@ -88,7 +88,7 @@ """ float64 = NewType("float64", float) # type: ignore[type-arg] -""" +float64.__doc__ = """ Portable IEEE 64-bit floating point number. This is a `float` at runtime, but using `float64` for Pydantic model fields instead of `float` makes diff --git a/packages/overture-schema-system/src/overture/schema/system/ref/id.py b/packages/overture-schema-system/src/overture/schema/system/ref/id.py index eb467f142..2ffa5dad8 100644 --- a/packages/overture-schema-system/src/overture/schema/system/ref/id.py +++ b/packages/overture-schema-system/src/overture/schema/system/ref/id.py @@ -18,7 +18,7 @@ ), ], ) -""" +Id.__doc__ = """ A unique identifier. """ diff --git a/packages/overture-schema-system/src/overture/schema/system/string.py b/packages/overture-schema-system/src/overture/schema/system/string.py index cac9112ca..6533a192a 100644 --- a/packages/overture-schema-system/src/overture/schema/system/string.py +++ b/packages/overture-schema-system/src/overture/schema/system/string.py @@ -35,7 +35,7 @@ Field(description="An ISO 3166-1 alpha-2 country code"), ], ) # type: ignore [type-arg] -""" +CountryCodeAlpha2.__doc__ = """ An ISO-3166-1 alpha-2 country code. """ @@ -49,7 +49,7 @@ ), ], ) # type: ignore [type-arg] -""" +HexColor.__doc__ = """ A color represented as an #RRGGBB or #RGB hexadecimal string. For example: @@ -67,7 +67,7 @@ Field(description="A JSON Pointer (as described in RFC-6901)"), ], ) # type: ignore [type-arg] -""" +JsonPointer.__doc__ = """ A JSON Pointer As described in `the JSON Pointer specification, RFC-6901`_. @@ -91,7 +91,7 @@ ), ], ) # type: ignore [type-arg] -""" +LanguageTag.__doc__ = """ A BCP-47 language tag. As described in `Tags for Identifying Languages, BCP-47`_. @@ -114,7 +114,7 @@ Field(description="A string that contains no whitespace characters"), ], ) # type: ignore [type-arg] -""" +NoWhitespaceString.__doc__ = """ A string that contains no whitespace characters. """ @@ -124,7 +124,7 @@ str, PhoneNumberConstraint(), Field(description="An international phone number") ], ) # type: ignore [type-arg] -""" +PhoneNumber.__doc__ = """ An international phone number. """ @@ -136,12 +136,12 @@ Field(description="An ISO 3166-2 principal subdivision code"), ], ) # type: ignore [type-arg] -""" +RegionCode.__doc__ = """ An ISO 3166-2 principal subdivision code. """ SnakeCaseString = NewType("SnakeCaseString", Annotated[str, SnakeCaseConstraint()]) -""" +SnakeCaseString.__doc__ = """ A string that looks like a snake case identifier, like a Python variable name (*e.g.*, `foo_bar`). """ @@ -155,7 +155,7 @@ ), ], ) # type: ignore [type-arg] -""" +StrippedString.__doc__ = """ A string without leading or trailing whitespace. """ @@ -167,7 +167,7 @@ Field(description="A wikidata ID, as found on https://www.wikidata.org/"), ], ) # type: ignore [type-arg] -""" +WikidataId.__doc__ = """ A wikidata ID, as found on https://www.wikidata.org/. - `"Q42"` From 0edb552e08b829ae2c552c787c5ae7d22182869b Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 19:54:21 -0800 Subject: [PATCH 07/38] fix(core): add missing f-prefix to string continuation lines OvertureFeature validator error message had two continuation lines missing the f-prefix, so {self.__class__.__name__} was rendered literally. Also add missing space before "and". --- .../overture-schema-core/src/overture/schema/core/models.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/overture-schema-core/src/overture/schema/core/models.py b/packages/overture-schema-core/src/overture/schema/core/models.py index ed43eabe9..dbb77a281 100644 --- a/packages/overture-schema-core/src/overture/schema/core/models.py +++ b/packages/overture-schema-core/src/overture/schema/core/models.py @@ -65,9 +65,9 @@ def __validate_ext_fields__(self) -> Self: maybe_plural = "s" if len(invalid_extra_fields) > 1 else "" raise ValueError( f"invalid extra field name{maybe_plural}: {', '.join(invalid_extra_fields)} " - "(extra fields are temporarily allowed, but only if their names start with 'ext_', " - "but all extra field name support in {self.__class__.__name__} is on a deprecation path" - "and will be removed)" + f"(extra fields are temporarily allowed, but only if their names start with 'ext_', " + f"but all extra field name support in {self.__class__.__name__} is on a deprecation path " + f"and will be removed)" ) return self From f969ffcf07b6ab6821ccebf72fdfd32972caa2ca Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 19:30:58 -0800 Subject: [PATCH 08/38] fix(system): use dict instead of Mapping in test util type hints Also fix "supserset" typo in docstring. --- packages/overture-schema-system/tests/util.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/overture-schema-system/tests/util.py b/packages/overture-schema-system/tests/util.py index 0ce2e4932..f9725f5ba 100644 --- a/packages/overture-schema-system/tests/util.py +++ b/packages/overture-schema-system/tests/util.py @@ -10,10 +10,10 @@ def subset_conflicts( Parameters ---------- - a : dict[str, object] + a : Mapping[str, object] Candidate subset of `b` - b : dict[str, object] - Candidate supserset of `a` + b : Mapping[str, object] + Candidate superset of `a` Returns ------- From b11b8c22d22af96bcf1cf5d209236035dfdad82d Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 21:07:41 -0800 Subject: [PATCH 09/38] fix(cli): discover discriminator fields at runtime Replace hardcoded discriminator_fields tuple ("type", "theme", "subtype") in _process_union_member with the discriminator field name extracted from the union's Annotated metadata. introspect_union already extracted the discriminator field name but didn't pass it through to member processing. Now it does, so unions using any field name as discriminator work correctly. For nested unions, parent discriminator values are extracted from nested leaf models to preserve structural tuple classification. Feature.field_discriminator now attaches _field_name to the callable, and _extract_discriminator_name reads it. This handles the Discriminator-wrapping-a-callable case that str(disc) got wrong silently. --- .../src/overture/schema/cli/type_analysis.py | 54 ++++++++++----- .../tests/test_type_analysis.py | 66 +++++++++++++++++++ .../src/overture/schema/system/feature.py | 33 +++++++++- .../tests/test_feature.py | 12 ++++ 4 files changed, 148 insertions(+), 17 deletions(-) diff --git a/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py b/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py index 9316c80c0..4afcbf75b 100644 --- a/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py +++ b/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py @@ -8,6 +8,8 @@ from pydantic import BaseModel from pydantic.fields import FieldInfo +from overture.schema.system.feature import resolve_discriminator_field_name + from .types import ErrorLocation, ValidationErrorDict # Type aliases for structural tuple elements @@ -29,11 +31,23 @@ class UnionMetadata: nested_unions: dict[str, "UnionMetadata"] +def _extract_literal_value(model: type[BaseModel], field_name: str) -> object | None: + """Extract the single Literal value from a model field, if present.""" + field_info = model.model_fields.get(field_name) + if field_info is None or field_info.annotation is None: + return None + if get_origin(field_info.annotation) is Literal: + args = get_args(field_info.annotation) + return args[0] if args else None + return None + + def _process_union_member( member: Any, # noqa: ANN401 discriminator_to_model: dict[str, type[BaseModel]], model_name_to_model: dict[str, type[BaseModel]], nested_unions: dict[str, UnionMetadata], + discriminator_field: str | None = None, ) -> None: """Process a single union member, handling nesting recursively. @@ -43,6 +57,7 @@ def _process_union_member( discriminator_to_model: Dict to populate with discriminator value mappings model_name_to_model: Dict to populate with model name mappings nested_unions: Dict to populate with nested union metadata + discriminator_field: The discriminator field name from the parent union annotation """ member_origin = get_origin(member) @@ -63,12 +78,22 @@ def _process_union_member( nested_metadata = introspect_union(member) nested_unions[str(member)] = nested_metadata discriminator_to_model.update(nested_metadata.discriminator_to_model) + # Extract parent discriminator values from nested leaf models + if discriminator_field is not None: + for model in nested_metadata.model_name_to_model.values(): + value = _extract_literal_value(model, discriminator_field) + if value is not None: + discriminator_to_model[str(value)] = model return # Unwrap Annotated to get the actual type (e.g., Annotated[Building, Tag('building')]) # and process it recursively _process_union_member( - member_args[0], discriminator_to_model, model_name_to_model, nested_unions + member_args[0], + discriminator_to_model, + model_name_to_model, + nested_unions, + discriminator_field, ) return @@ -76,17 +101,10 @@ def _process_union_member( if inspect.isclass(member) and issubclass(member, BaseModel): model_name_to_model[member.__name__] = member - # Extract discriminator values from known discriminator fields only - # Restrict to known discriminator names to avoid false positives from other Literal fields - discriminator_fields = ("type", "theme", "subtype") - for field_name, field_info in member.model_fields.items(): - if field_name not in discriminator_fields: - continue - annotation = field_info.annotation - if get_origin(annotation) is Literal: - literal_args = get_args(annotation) - if literal_args: - discriminator_to_model[literal_args[0]] = member + if discriminator_field is not None: + value = _extract_literal_value(member, discriminator_field) + if value is not None: + discriminator_to_model[str(value)] = member def introspect_union(union_type: Any) -> UnionMetadata: # noqa: ANN401 @@ -163,9 +181,9 @@ def introspect_union(union_type: Any) -> UnionMetadata: # noqa: ANN401 if isinstance(metadata, FieldInfo) and hasattr( metadata, "discriminator" ): - disc = metadata.discriminator - # discriminator can be a string or Discriminator object - discriminator_field = str(disc) if disc is not None else None + discriminator_field = resolve_discriminator_field_name( + metadata.discriminator + ) break # Get union members @@ -183,7 +201,11 @@ def introspect_union(union_type: Any) -> UnionMetadata: # noqa: ANN401 # Process each union member for member in union_members: _process_union_member( - member, discriminator_to_model, model_name_to_model, nested_unions + member, + discriminator_to_model, + model_name_to_model, + nested_unions, + discriminator_field, ) return UnionMetadata( diff --git a/packages/overture-schema-cli/tests/test_type_analysis.py b/packages/overture-schema-cli/tests/test_type_analysis.py index 12fb10e75..a479f1344 100644 --- a/packages/overture-schema-cli/tests/test_type_analysis.py +++ b/packages/overture-schema-cli/tests/test_type_analysis.py @@ -283,6 +283,72 @@ class Place(BaseModel): assert literal_value not in metadata.discriminator_to_model +class TestDiscriminatorDiscovery: + """Tests for runtime discriminator field discovery (not hardcoded).""" + + def test_nonstandard_discriminator_field_name(self) -> None: + """Discriminator field not named type/theme/subtype is discovered at runtime.""" + + class Cat(BaseModel): + kind: Literal["cat"] + indoor: bool + + class Dog(BaseModel): + kind: Literal["dog"] + breed: str + + UnionType = Annotated[Cat | Dog, Field(discriminator="kind")] + metadata = introspect_union(UnionType) + + assert metadata.is_discriminated is True + assert metadata.discriminator_field == "kind" + assert metadata.discriminator_to_model["cat"] == Cat + assert metadata.discriminator_to_model["dog"] == Dog + + def test_non_discriminator_literal_fields_excluded(self) -> None: + """Literal fields that aren't the discriminator are not in the mapping.""" + + class Building(BaseModel): + type: Literal["building"] + status: Literal["active"] + + class Place(BaseModel): + type: Literal["place"] + status: Literal["active"] + + UnionType = Annotated[Building | Place, Field(discriminator="type")] + metadata = introspect_union(UnionType) + + assert "building" in metadata.discriminator_to_model + assert "place" in metadata.discriminator_to_model + assert "active" not in metadata.discriminator_to_model + + def test_callable_discriminator_extracts_field_name(self) -> None: + """Callable discriminators (Feature.field_discriminator) are supported.""" + from pydantic import Discriminator + + class ModelA(BaseModel): + kind: Literal["a"] + + class ModelB(BaseModel): + kind: Literal["b"] + + def get_kind(data: object) -> str | None: + return data.get("kind") if isinstance(data, dict) else None + + get_kind._field_name = "kind" # type: ignore[attr-defined] + + UnionType = Annotated[ + ModelA | ModelB, Field(discriminator=Discriminator(get_kind)) + ] + metadata = introspect_union(UnionType) + + assert metadata.is_discriminated is True + assert metadata.discriminator_field == "kind" + assert metadata.discriminator_to_model["a"] == ModelA + assert metadata.discriminator_to_model["b"] == ModelB + + class TestStructuralTupleCaching: """Tests for structural tuple caching functionality.""" diff --git a/packages/overture-schema-system/src/overture/schema/system/feature.py b/packages/overture-schema-system/src/overture/schema/system/feature.py index de3f14df6..fd067df54 100644 --- a/packages/overture-schema-system/src/overture/schema/system/feature.py +++ b/packages/overture-schema-system/src/overture/schema/system/feature.py @@ -30,6 +30,33 @@ from overture.schema.system.ref import Id +def resolve_discriminator_field_name(discriminator: object) -> str | None: + """Resolve a Pydantic discriminator value to its field name string. + + Handles the three forms a discriminator can take: + - A plain string (used directly as the field name). + - A ``pydantic.Discriminator`` whose ``.discriminator`` attribute is a string. + - A ``pydantic.Discriminator`` whose ``.discriminator`` is a callable + produced by ``Feature.field_discriminator``, which stores the field name + as ``_field_name`` on the callable. + + Returns None if *discriminator* is None or its field name cannot be + determined. + """ + if discriminator is None: + return None + if isinstance(discriminator, str): + return discriminator + inner = getattr(discriminator, "discriminator", None) + if isinstance(inner, str): + return inner + if callable(inner): + field_name = getattr(inner, "_field_name", None) + if isinstance(field_name, str): + return field_name + return None + + class Feature(BaseModel): """ A feature is something you can point to on a map—like a building, road, lake, or park—with the @@ -206,7 +233,10 @@ def field_discriminator( Returns ------- Discriminator - Discriminator that enables discriminated unions that include features + Discriminator that enables discriminated unions that include features. + The inner callable carries a ``_field_name`` attribute set to *field*, + allowing introspection code to recover the discriminator field name + without hardcoding it. Raises ------ @@ -296,6 +326,7 @@ def get_discriminator_value(data: object) -> Any: else getattr(data, field, None) ) + get_discriminator_value._field_name = field # type: ignore[attr-defined] return Discriminator(get_discriminator_value) @model_serializer(mode="wrap") diff --git a/packages/overture-schema-system/tests/test_feature.py b/packages/overture-schema-system/tests/test_feature.py index ec0dfc795..834cd7881 100644 --- a/packages/overture-schema-system/tests/test_feature.py +++ b/packages/overture-schema-system/tests/test_feature.py @@ -332,6 +332,18 @@ class BarModel(BaseModel): ): tap.validate_json(json.dumps(data)) + def test_field_discriminator_attaches_field_name(self) -> None: + """The callable returned by field_discriminator carries _field_name for introspection.""" + + class A(Feature): + kind: Literal["a"] + + class B(Feature): + kind: Literal["b"] + + disc = Feature.field_discriminator("kind", A, B) + assert disc.discriminator._field_name == "kind" # type: ignore[union-attr] + def test_error_field_not_str(self) -> None: with pytest.raises( TypeError, match="`field` must be a `str`, but 42 has type `int`" From b4237b54257d90ce8455031645b3ccf17e9a6d01 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 25 Feb 2026 08:32:58 -0800 Subject: [PATCH 10/38] refactor(cli): tighten type analysis contracts Make _extract_literal_value return str directly instead of object, eliminating implicit str() conversions at call sites. Add comment explaining nested union re-indexing under the parent discriminator. Remove redundant test covered by TestDiscriminatorDiscovery and debugging print() calls from TestStructuralTuples. --- .../src/overture/schema/cli/type_analysis.py | 14 +++--- .../tests/test_type_analysis.py | 48 ------------------- 2 files changed, 8 insertions(+), 54 deletions(-) diff --git a/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py b/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py index 4afcbf75b..4362d7f76 100644 --- a/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py +++ b/packages/overture-schema-cli/src/overture/schema/cli/type_analysis.py @@ -31,14 +31,14 @@ class UnionMetadata: nested_unions: dict[str, "UnionMetadata"] -def _extract_literal_value(model: type[BaseModel], field_name: str) -> object | None: - """Extract the single Literal value from a model field, if present.""" +def _extract_literal_value(model: type[BaseModel], field_name: str) -> str | None: + """Extract the single Literal value from a model field as a string, if present.""" field_info = model.model_fields.get(field_name) if field_info is None or field_info.annotation is None: return None if get_origin(field_info.annotation) is Literal: args = get_args(field_info.annotation) - return args[0] if args else None + return str(args[0]) if args else None return None @@ -78,12 +78,14 @@ def _process_union_member( nested_metadata = introspect_union(member) nested_unions[str(member)] = nested_metadata discriminator_to_model.update(nested_metadata.discriminator_to_model) - # Extract parent discriminator values from nested leaf models + # The nested union's discriminator_to_model uses the nested discriminator + # field (e.g. "subtype"). Re-extract using the parent discriminator field + # (e.g. "type") so leaf models are also reachable by the parent's values. if discriminator_field is not None: for model in nested_metadata.model_name_to_model.values(): value = _extract_literal_value(model, discriminator_field) if value is not None: - discriminator_to_model[str(value)] = model + discriminator_to_model[value] = model return # Unwrap Annotated to get the actual type (e.g., Annotated[Building, Tag('building')]) @@ -104,7 +106,7 @@ def _process_union_member( if discriminator_field is not None: value = _extract_literal_value(member, discriminator_field) if value is not None: - discriminator_to_model[str(value)] = member + discriminator_to_model[value] = member def introspect_union(union_type: Any) -> UnionMetadata: # noqa: ANN401 diff --git a/packages/overture-schema-cli/tests/test_type_analysis.py b/packages/overture-schema-cli/tests/test_type_analysis.py index a479f1344..0a21e168a 100644 --- a/packages/overture-schema-cli/tests/test_type_analysis.py +++ b/packages/overture-schema-cli/tests/test_type_analysis.py @@ -29,14 +29,9 @@ class ModelB(BaseModel): UnionType = Annotated[ModelA | ModelB, Field(discriminator="type")] - # Test simple discriminated union error path loc = ("a", "required_a") metadata = introspect_union(UnionType) structural = create_structural_tuple(loc, metadata) - print(f"\nloc: {loc}") - print(f"structural: {structural}") - assert len(structural) == len(loc) - # First element should be discriminator, second should be field assert structural == ("discriminator", "field") def test_mixed_union_structural_tuple(self) -> None: @@ -56,17 +51,11 @@ class Sources(BaseModel): # Test discriminated side loc1 = ("tagged-union[ModelA]", "a", "required_a") structural1 = create_structural_tuple(loc1, metadata) - print("\nDiscriminated side:") - print(f"loc: {loc1}") - print(f"structural: {structural1}") assert structural1 == ("union", "discriminator", "field") # Test non-discriminated side loc2 = ("Sources", "datasets") structural2 = create_structural_tuple(loc2, metadata) - print("\nNon-discriminated side:") - print(f"loc: {loc2}") - print(f"structural: {structural2}") assert structural2 == ("model", "field") def test_list_context_structural_tuple(self) -> None: @@ -78,13 +67,9 @@ class ModelA(BaseModel): UnionType = Annotated[ModelA, Field(discriminator="type")] - # Test list context loc = (1, "a", "required_a") metadata = introspect_union(list[UnionType]) structural = create_structural_tuple(loc, metadata) - print("\nList context:") - print(f"loc: {loc}") - print(f"structural: {structural}") assert structural == ("list_index", "discriminator", "field") def test_nested_discriminated_structural_tuple(self) -> None: @@ -114,13 +99,9 @@ class Sources(BaseModel): FeatureUnion = Annotated[Building | SegmentUnion, Field(discriminator="type")] MixedUnion = FeatureUnion | Sources - # Test nested discriminator path (type=segment, subtype=road) loc = ("tagged-union[SegmentUnion]", "segment", "road", "road_class") metadata = introspect_union(MixedUnion) structural = create_structural_tuple(loc, metadata) - print("\nNested discriminated:") - print(f"loc: {loc}") - print(f"structural: {structural}") assert structural == ("union", "discriminator", "discriminator", "field") @@ -253,35 +234,6 @@ class ModelA(BaseModel): assert metadata.discriminator_field == "type" assert "a" in metadata.discriminator_to_model - @pytest.mark.parametrize( - "literal_value,expected_in_mapping", - [ - pytest.param("building", True, id="literal_building"), - pytest.param("place", True, id="literal_place"), - pytest.param("nonexistent", False, id="not_present"), - ], - ) - def test_introspect_extracts_all_literals( - self, literal_value: str, expected_in_mapping: bool - ) -> None: - """Test that introspect_union extracts all Literal field values.""" - - class Building(BaseModel): - type: Literal["building"] - subtype: Literal["residential"] - - class Place(BaseModel): - type: Literal["place"] - category: Literal["restaurant"] - - UnionType = Annotated[Building | Place, Field(discriminator="type")] - metadata = introspect_union(UnionType) - - if expected_in_mapping: - assert literal_value in metadata.discriminator_to_model - else: - assert literal_value not in metadata.discriminator_to_model - class TestDiscriminatorDiscovery: """Tests for runtime discriminator field discovery (not hardcoded).""" From 910e12838899f578f3cf86cefd2ceff408ec9ea1 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 25 Feb 2026 12:53:42 -0800 Subject: [PATCH 11/38] refactor(core,cli): rename ModelKey.class_name to entry_point The field holds the entry point value in "module:Class" format, not a class name. The old name required callers to know this (codegen's cli.py had a comment explaining it, and assigned to a local `entry_point` variable to compensate). --- .../src/overture/schema/cli/commands.py | 2 +- .../src/overture/schema/core/discovery.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/overture-schema-cli/src/overture/schema/cli/commands.py b/packages/overture-schema-cli/src/overture/schema/cli/commands.py index 8fdd8bdf4..a75b16cd4 100644 --- a/packages/overture-schema-cli/src/overture/schema/cli/commands.py +++ b/packages/overture-schema-cli/src/overture/schema/cli/commands.py @@ -798,7 +798,7 @@ def dump_namespace( sorted_types = sorted(theme_types[theme], key=lambda x: x[0].type) for key, model_class in sorted_types: stdout.print( - f" [bright_black]→[/bright_black] [bold cyan]{key.type}[/bold cyan] [dim magenta]({key.class_name})[/dim magenta]" + f" [bright_black]→[/bright_black] [bold cyan]{key.type}[/bold cyan] [dim magenta]({key.entry_point})[/dim magenta]" ) docstring = get_model_docstring(model_class) if docstring: diff --git a/packages/overture-schema-core/src/overture/schema/core/discovery.py b/packages/overture-schema-core/src/overture/schema/core/discovery.py index 15da3abc4..b9290d29a 100644 --- a/packages/overture-schema-core/src/overture/schema/core/discovery.py +++ b/packages/overture-schema-core/src/overture/schema/core/discovery.py @@ -21,15 +21,15 @@ class ModelKey: The theme name (e.g., "buildings", "places"), or None for non-themed models type : str The feature type (e.g., "building", "place") - class_name : str - The fully qualified class name from the entry point value + entry_point : str + The entry point value in "module:Class" format """ namespace: str theme: str | None type: str - class_name: str + entry_point: str def discover_models( @@ -87,7 +87,7 @@ def discover_models( namespace=ns, theme=theme, type=feature_type, - class_name=entry_point.value, + entry_point=entry_point.value, ) models[key] = model_class except Exception as e: From 28ce95383d7785583a66170218f69f2c4a9ba5bc Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 19:55:05 -0800 Subject: [PATCH 12/38] feat(codegen): add overture-schema-codegen package Empty package with build config, namespace packages, and py.typed marker. Declares click, jinja2, tomli, and overture-schema-core/system as dependencies. --- packages/overture-schema-codegen/README.md | 20 +++++++++++++ .../overture-schema-codegen/pyproject.toml | 29 +++++++++++++++++++ .../src/overture/__init__.py | 1 + .../src/overture/schema/__init__.py | 1 + .../src/overture/schema/codegen/__about__.py | 1 + .../src/overture/schema/codegen/__init__.py | 1 + .../src/overture/schema/codegen/py.typed | 0 pyproject.toml | 1 + uv.lock | 21 ++++++++++++++ 9 files changed, 75 insertions(+) create mode 100644 packages/overture-schema-codegen/README.md create mode 100644 packages/overture-schema-codegen/pyproject.toml create mode 100644 packages/overture-schema-codegen/src/overture/__init__.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/__init__.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/__about__.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/__init__.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/py.typed diff --git a/packages/overture-schema-codegen/README.md b/packages/overture-schema-codegen/README.md new file mode 100644 index 000000000..4cb93011d --- /dev/null +++ b/packages/overture-schema-codegen/README.md @@ -0,0 +1,20 @@ +# Overture Schema Codegen + +Code generator that produces documentation and code from Pydantic models. + +## Installation + +```bash +pip install overture-schema-codegen +``` + +## Usage + +```python +from overture.schema.codegen import analyze_type, TypeInfo, TypeKind + +# Analyze a type annotation +info = analyze_type(str) +assert info.base_type == "str" +assert info.kind == TypeKind.PRIMITIVE +``` diff --git a/packages/overture-schema-codegen/pyproject.toml b/packages/overture-schema-codegen/pyproject.toml new file mode 100644 index 000000000..de42c5fb9 --- /dev/null +++ b/packages/overture-schema-codegen/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +build-backend = "hatchling.build" +requires = ["hatchling"] + +[project] +dependencies = [ + "click>=8.0", + "jinja2>=3.0", + "overture-schema-core", + "overture-schema-system", + "tomli>=2.0; python_version < '3.11'", +] +description = "Code generator that produces documentation and code from Pydantic models" +dynamic = ["version"] +license = "MIT" +name = "overture-schema-codegen" + +[project.scripts] +overture-codegen = "overture.schema.codegen.cli:main" + +[tool.uv.sources] +overture-schema-core = { workspace = true } +overture-schema-system = { workspace = true } + +[tool.hatch.version] +path = "src/overture/schema/codegen/__about__.py" + +[tool.hatch.build.targets.wheel] +packages = ["src/overture"] diff --git a/packages/overture-schema-codegen/src/overture/__init__.py b/packages/overture-schema-codegen/src/overture/__init__.py new file mode 100644 index 000000000..8db66d3d0 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/overture-schema-codegen/src/overture/schema/__init__.py b/packages/overture-schema-codegen/src/overture/schema/__init__.py new file mode 100644 index 000000000..8db66d3d0 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/__about__.py b/packages/overture-schema-codegen/src/overture/schema/codegen/__about__.py new file mode 100644 index 000000000..3dc1f76bc --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/__about__.py @@ -0,0 +1 @@ +__version__ = "0.1.0" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/__init__.py b/packages/overture-schema-codegen/src/overture/schema/codegen/__init__.py new file mode 100644 index 000000000..2de7d7120 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/__init__.py @@ -0,0 +1 @@ +"""Code generator for Overture Schema Pydantic models.""" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/py.typed b/packages/overture-schema-codegen/src/overture/schema/codegen/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/pyproject.toml b/pyproject.toml index 277e2bb89..f7d9bdbdc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,6 +62,7 @@ pythonpath = [ "packages/overture-schema-annex/tests", "packages/overture-schema-base-theme/tests", "packages/overture-schema-buildings-theme/tests", + "packages/overture-schema-codegen/tests", "packages/overture-schema-core/tests", "packages/overture-schema-divisions-theme/tests", "packages/overture-schema-places-theme/tests", diff --git a/uv.lock b/uv.lock index 2bc46eb72..5deec1907 100644 --- a/uv.lock +++ b/uv.lock @@ -14,6 +14,7 @@ members = [ "overture-schema-base-theme", "overture-schema-buildings-theme", "overture-schema-cli", + "overture-schema-codegen", "overture-schema-core", "overture-schema-divisions-theme", "overture-schema-places-theme", @@ -779,6 +780,26 @@ dev = [ { name = "ruff" }, ] +[[package]] +name = "overture-schema-codegen" +source = { editable = "packages/overture-schema-codegen" } +dependencies = [ + { name = "click" }, + { name = "jinja2" }, + { name = "overture-schema-core" }, + { name = "overture-schema-system" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] + +[package.metadata] +requires-dist = [ + { name = "click", specifier = ">=8.0" }, + { name = "jinja2", specifier = ">=3.0" }, + { name = "overture-schema-core", editable = "packages/overture-schema-core" }, + { name = "overture-schema-system", editable = "packages/overture-schema-system" }, + { name = "tomli", marker = "python_full_version < '3.11'", specifier = ">=2.0" }, +] + [[package]] name = "overture-schema-core" source = { editable = "packages/overture-schema-core" } From 35fbd31c42ff3afedf1bc6a46c96b15f5cc88d11 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 18:57:39 -0800 Subject: [PATCH 13/38] feat(codegen): add type analysis, specs, and type registry MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Type analyzer (analyze_type) handles all type unwrapping in a single iterative function: NewType → Annotated → Union → list → terminal classification. Constraints accumulate from Annotated metadata with source tracking via ConstraintSource. Data structures: TypeInfo (type representation), FieldSpec (model field), ModelSpec (model), EnumSpec, NewTypeSpec, PrimitiveSpec. Type registry maps type names to per-target string representations via TypeMapping. is_semantic_newtype() distinguishes meaningful NewTypes from pass-through aliases. Utilities: case_conversion (snake_case), docstring (cleaning and custom-docstring detection). --- .../schema/codegen/case_conversion.py | 41 ++ .../src/overture/schema/codegen/docstring.py | 46 ++ .../src/overture/schema/codegen/specs.py | 168 ++++++ .../overture/schema/codegen/type_analyzer.py | 310 ++++++++++ .../overture/schema/codegen/type_registry.py | 113 ++++ .../tests/test_naming.py | 23 + .../tests/test_type_analyzer.py | 563 ++++++++++++++++++ .../tests/test_type_registry.py | 143 +++++ 8 files changed, 1407 insertions(+) create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/case_conversion.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/docstring.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/specs.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/type_registry.py create mode 100644 packages/overture-schema-codegen/tests/test_naming.py create mode 100644 packages/overture-schema-codegen/tests/test_type_analyzer.py create mode 100644 packages/overture-schema-codegen/tests/test_type_registry.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/case_conversion.py b/packages/overture-schema-codegen/src/overture/schema/codegen/case_conversion.py new file mode 100644 index 000000000..9d06341fb --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/case_conversion.py @@ -0,0 +1,41 @@ +"""PascalCase to snake_case conversion for code generation.""" + +import re + +__all__ = ["slug_filename", "to_snake_case"] + +# Insert _ between an acronym run and a capitalized word start (HTML|Parser) +_ACRONYM_BOUNDARY = re.compile(r"([A-Z]+)([A-Z][a-z])") +# Insert _ between a lowercase/digit and an uppercase letter (building|Part) +_CAMEL_BOUNDARY = re.compile(r"([a-z0-9])([A-Z])") + + +def to_snake_case(name: str) -> str: + """Convert PascalCase to snake_case. + + Handles acronym runs correctly: "HTMLParser" becomes "html_parser", + not "h_t_m_l_parser". + + >>> to_snake_case("HTMLParser") + 'html_parser' + >>> to_snake_case("BuildingPart") + 'building_part' + >>> to_snake_case("simple") + 'simple' + """ + name = _ACRONYM_BOUNDARY.sub(r"\1_\2", name) + name = _CAMEL_BOUNDARY.sub(r"\1_\2", name) + return name.lower() + + +def slug_filename(name: str, ext: str = ".md") -> str: + """Convert a PascalCase type name to a snake_case filename. + + >>> slug_filename("HexColor") + 'hex_color.md' + >>> slug_filename("BuildingPart") + 'building_part.md' + >>> slug_filename("BuildingPart", ext=".json") + 'building_part.json' + """ + return f"{to_snake_case(name)}{ext}" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/docstring.py b/packages/overture-schema-codegen/src/overture/schema/codegen/docstring.py new file mode 100644 index 000000000..865692ea0 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/docstring.py @@ -0,0 +1,46 @@ +"""Docstring extraction and cleaning utilities.""" + +import inspect +from enum import Enum +from typing import NewType + +__all__ = ["clean_docstring", "first_docstring_line", "is_custom_docstring"] + + +# Probe auto-generated docstrings so we can distinguish them from explicit ones. +# Both Enum and NewType generate default docstrings that vary by Python version; +# capturing at import time adapts automatically if the format changes. +class _DocstringProbeEnum(Enum): + pass + + +_ENUM_DEFAULT_DOCSTRING = _DocstringProbeEnum.__doc__ +del _DocstringProbeEnum +_NewtypeProbe = NewType("_NewtypeProbe", int) +_NEWTYPE_DEFAULT_DOCSTRING = _NewtypeProbe.__doc__ +del _NewtypeProbe + + +def clean_docstring(doc: str | None) -> str | None: + """Return cleaned docstring, or None if absent or whitespace-only.""" + if not doc: + return None + cleaned = inspect.cleandoc(doc) + return cleaned or None + + +def first_docstring_line(doc: str | None) -> str: + """Return the first line of a docstring, or empty string.""" + cleaned = clean_docstring(doc) + if not cleaned: + return "" + return cleaned.split("\n")[0] + + +def is_custom_docstring(doc: str | None, inherited_doc: str | None = None) -> bool: + """Check if a docstring was explicitly written, not auto-generated or inherited.""" + return bool(doc) and doc not in ( + _ENUM_DEFAULT_DOCSTRING, + _NEWTYPE_DEFAULT_DOCSTRING, + inherited_doc, + ) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py b/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py new file mode 100644 index 000000000..bb5c3fcac --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py @@ -0,0 +1,168 @@ +"""Data types for extracted specifications.""" + +from __future__ import annotations + +import functools +from dataclasses import dataclass, field +from typing import Any, Protocol, TypeGuard, runtime_checkable + +from annotated_types import Interval +from pydantic import BaseModel + +from overture.schema.system.model_constraint import ModelConstraint + +from .type_analyzer import TypeInfo, TypeKind, UnsupportedUnionError, analyze_type + +__all__ = [ + "AnnotatedField", + "EnumMemberSpec", + "EnumSpec", + "FeatureSpec", + "FieldSpec", + "ModelSpec", + "NewTypeSpec", + "PrimitiveSpec", + "SupplementarySpec", + "filter_model_classes", + "is_model_class", + "is_union_alias", +] + + +@dataclass +class EnumMemberSpec: + """Specification for an enum member.""" + + name: str + value: str + description: str | None + + +@dataclass +class EnumSpec: + """Specification for an Enum class.""" + + name: str + description: str | None + members: list[EnumMemberSpec] = field(default_factory=list) + source_type: type | None = None + + +@dataclass +class FieldSpec: + """Specification for a model field.""" + + name: str + type_info: TypeInfo + description: str | None + is_required: bool + model: ModelSpec | None = None + starts_cycle: bool = False + + +@runtime_checkable +class FeatureSpec(Protocol): + """Shared interface for feature-level specs (ModelSpec, UnionSpec).""" + + name: str + description: str | None + source_type: type[BaseModel] | None + entry_point: str | None + constraints: tuple[ModelConstraint, ...] + + @property + def fields(self) -> list[FieldSpec]: ... + + +@dataclass +class ModelSpec: + """Specification for a Pydantic model.""" + + name: str + description: str | None + fields: list[FieldSpec] = field(default_factory=list) + source_type: type[BaseModel] | None = None + entry_point: str | None = None + constraints: tuple[ModelConstraint, ...] = () + + +@dataclass +class AnnotatedField: + """A FieldSpec paired with union variant provenance.""" + + field_spec: FieldSpec + variant_sources: tuple[str, ...] | None + + +# eq=False: contains mutable lists and a cached_property, so +# dataclass-generated __eq__ would be unreliable. +@dataclass(eq=False) +class UnionSpec: + """Specification for a discriminated union type alias.""" + + name: str + description: str | None + annotated_fields: list[AnnotatedField] + members: list[type[BaseModel]] + discriminator_field: str | None + discriminator_mapping: dict[str, type[BaseModel]] | None + source_annotation: object + common_base: type[BaseModel] + source_type: type[BaseModel] | None = field(default=None, init=False) + entry_point: str | None = None + constraints: tuple[ModelConstraint, ...] = () + + @functools.cached_property + def fields(self) -> list[FieldSpec]: + """Plain field list for tree expansion and supplementary collection.""" + return [af.field_spec for af in self.annotated_fields] + + +@dataclass +class NewTypeSpec: + """Specification for a NewType.""" + + name: str + description: str | None + type_info: TypeInfo + source_type: object | None = None + + +@dataclass +class PrimitiveSpec: + """Extracted specification for a numeric primitive type.""" + + name: str + description: str + bounds: Interval = field(default_factory=Interval) + float_bits: int | None = None + + +SupplementarySpec = EnumSpec | NewTypeSpec | ModelSpec +"""Non-feature types referenced by feature models. + +Excludes PrimitiveSpec and geometry types, which are extracted +separately via dedicated functions. +""" + + +def is_model_class(obj: object) -> TypeGuard[type[BaseModel]]: + """Check whether *obj* is a concrete BaseModel subclass (not a type alias).""" + return isinstance(obj, type) and issubclass(obj, BaseModel) + + +def is_union_alias(obj: object) -> bool: + """Check whether *obj* is a discriminated union type alias of BaseModel subclasses.""" + try: + ti = analyze_type(obj) + except (TypeError, UnsupportedUnionError): + return False + return ti.kind == TypeKind.UNION + + +def filter_model_classes(models: dict[Any, Any]) -> list[type[BaseModel]]: + """Filter discovered models to concrete BaseModel subclasses. + + Excludes type aliases (like discriminated unions) and non-class entries. + """ + return [v for v in models.values() if is_model_class(v)] diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py new file mode 100644 index 000000000..97c363c85 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py @@ -0,0 +1,310 @@ +"""Type analysis for Pydantic models.""" + +from __future__ import annotations + +import types +from dataclasses import dataclass, field +from enum import Enum, auto +from typing import Annotated, Any, Literal, Union, get_args, get_origin + +from pydantic import BaseModel +from pydantic.fields import FieldInfo +from typing_extensions import Sentinel + +from .docstring import clean_docstring + +__all__ = [ + "ConstraintSource", + "TypeKind", + "TypeInfo", + "UnsupportedUnionError", + "analyze_type", + "is_newtype", + "single_literal_value", +] + + +class UnsupportedUnionError(TypeError): + """Raised when analyze_type encounters a multi-type union it cannot represent.""" + + +class TypeKind(Enum): + """Classification of type kinds.""" + + PRIMITIVE = auto() + LITERAL = auto() + ENUM = auto() + MODEL = auto() + UNION = auto() + + +@dataclass(slots=True) +class ConstraintSource: + """A constraint paired with the NewType that contributed it.""" + + source: str | None + constraint: object + + +@dataclass(slots=True) +class TypeInfo: + """Information about a type annotation.""" + + base_type: str + kind: TypeKind + is_optional: bool = False + is_list: bool = False + is_dict: bool = False + dict_key_type: TypeInfo | None = None + dict_value_type: TypeInfo | None = None + constraints: tuple[ConstraintSource, ...] = () + literal_value: object | None = None + source_type: type | None = None + newtype_name: str | None = None + newtype_ref: object | None = None + union_members: tuple[type[BaseModel], ...] | None = None + description: str | None = None + + +def is_newtype(annotation: object) -> bool: + """Check if annotation is a typing.NewType. + + NewType creates a callable with a __supertype__ attribute pointing + to the wrapped type. No public API exists for this check. + """ + return callable(annotation) and hasattr(annotation, "__supertype__") + + +def _is_union(origin: object) -> bool: + """Check if an origin represents a union type (X | Y or Union[X, Y]).""" + return origin in (types.UnionType, Union) + + +@dataclass(slots=True) +class _UnwrapState: + """Accumulated state from iterative type unwrapping. + + Tracks two NewType names during unwrapping: + - ``outermost_newtype_name`` / ``outermost_newtype_ref``: the first + NewType encountered, exposed as ``TypeInfo.newtype_name`` / ``newtype_ref``. + - ``last_newtype_name``: the most recently entered NewType, used both + as constraint provenance (which NewType contributed each constraint) + and as the resolved ``base_type`` for the terminal type. + """ + + is_optional: bool = False + is_list: bool = False + is_dict: bool = False + dict_key_type: TypeInfo | None = None + dict_value_type: TypeInfo | None = None + _constraints: list[ConstraintSource] = field(default_factory=list) + outermost_newtype_name: str | None = None + outermost_newtype_ref: object | None = None + last_newtype_name: str | None = None + description: str | None = None + + def add_constraint(self, source: str | None, constraint: object) -> None: + self._constraints.append(ConstraintSource(source, constraint)) + + def build_type_info( + self, + *, + base_type: str, + kind: TypeKind, + literal_value: object | None = None, + source_type: type | None = None, + union_members: tuple[type[BaseModel], ...] | None = None, + ) -> TypeInfo: + return TypeInfo( + base_type=base_type, + kind=kind, + is_optional=self.is_optional, + is_list=self.is_list, + is_dict=self.is_dict, + dict_key_type=self.dict_key_type, + dict_value_type=self.dict_value_type, + constraints=tuple(self._constraints), + literal_value=literal_value, + source_type=source_type, + newtype_name=self.outermost_newtype_name, + newtype_ref=self.outermost_newtype_ref, + union_members=union_members, + description=self.description, + ) + + +def analyze_type(annotation: object) -> TypeInfo: + """Analyze a type annotation and return TypeInfo. + + Iteratively unwraps type wrappers (Annotated, Optional, list, NewType) until + reaching a terminal type. + """ + state = _UnwrapState() + + while True: + origin = get_origin(annotation) + + # Handle NewType (e.g., int32 = NewType("int32", Annotated[int, ...])) + if is_newtype(annotation): + name = annotation.__name__ # type: ignore[attr-defined] + state.last_newtype_name = name + if state.outermost_newtype_name is None: + state.outermost_newtype_name = name + state.outermost_newtype_ref = annotation + annotation = annotation.__supertype__ # type: ignore[attr-defined] + continue + + # Handle Annotated types (Annotated[X, metadata...]) + if origin is Annotated: + args = get_args(annotation) + annotation = args[0] + for c in args[1:]: + if isinstance(c, FieldInfo): + if c.description is not None and state.description is None: + state.description = clean_docstring(c.description) + for m in c.metadata: + state.add_constraint(state.last_newtype_name, m) + else: + state.add_constraint(state.last_newtype_name, c) + continue + + # Handle union types (X | None or Optional[X]) + if _is_union(origin): + args = get_args(annotation) + # Filter out None, Sentinel types (Pydantic's ), and + # Literal alternatives (e.g., HttpUrl | Literal[""] where the + # Literal is a special-value sentinel, not the primary type). + if any(a is types.NoneType for a in args): + state.is_optional = True + + non_none_args = [ + a + for a in args + if a is not types.NoneType and not isinstance(a, Sentinel) + ] + + # Only filter out Literal arms when a concrete (non-Literal) type + # exists. Without this guard, Optional[Literal["x"]] would lose + # all args because the Literal *is* the primary type. + concrete_args = [a for a in non_none_args if get_origin(a) is not Literal] + real_args = concrete_args if concrete_args else non_none_args + + if len(real_args) > 1: + # Check if all real args are BaseModel subclasses + # (unwrap Annotated wrappers to get the actual class) + members: list[type[BaseModel]] = [] + for arg in real_args: + inner = arg + if get_origin(inner) is Annotated: + inner = get_args(inner)[0] + if isinstance(inner, type) and issubclass(inner, BaseModel): + members.append(inner) + else: + raise UnsupportedUnionError( + f"Multi-type unions not supported: {annotation}" + ) + return state.build_type_info( + base_type=members[0].__name__, + kind=TypeKind.UNION, + union_members=tuple(members), + ) + + if not real_args: + raise UnsupportedUnionError( + f"Union with no concrete types: {annotation}" + ) + + annotation = real_args[0] + continue + + # Handle list types (list[X]) + if origin is list: + args = get_args(annotation) + if not args: + raise TypeError("Bare list without type argument is not supported") + state.is_list = True + annotation = args[0] + continue + + # Handle dict types (dict[K, V]) + if origin is dict: + args = get_args(annotation) + if not args: + raise TypeError("Bare dict without type arguments is not supported") + state.is_dict = True + state.dict_key_type = analyze_type(args[0]) + state.dict_value_type = analyze_type(args[1]) + base_type = state.last_newtype_name or "dict" + return state.build_type_info( + base_type=base_type, + kind=TypeKind.PRIMITIVE, + source_type=dict, + ) + + break + + return _classify_terminal(annotation, state) + + +def _classify_terminal(annotation: object, state: _UnwrapState) -> TypeInfo: + """Classify a fully-unwrapped terminal type into a TypeInfo.""" + # typing.Any -- treat as an opaque primitive + if annotation is Any: + return state.build_type_info( + base_type="Any", + kind=TypeKind.PRIMITIVE, + ) + + # Literal types (e.g., Literal["value"]) + if get_origin(annotation) is Literal: + args = get_args(annotation) + # Only expose literal_value for single-value Literals, which + # represent fixed constants (theme="buildings"). Multi-value + # Literals (Literal["a", "b"]) are enum-like and have no + # single default. + value = args[0] if len(args) == 1 else None + return state.build_type_info( + base_type="Literal", + kind=TypeKind.LITERAL, + literal_value=value, + ) + + if not isinstance(annotation, type): + raise TypeError(f"Unsupported annotation type: {type(annotation)}") + + if issubclass(annotation, list): + raise TypeError("Bare list without type argument is not supported") + + if issubclass(annotation, dict): + raise TypeError("Bare dict without type arguments is not supported") + + # Determine kind from type hierarchy + if issubclass(annotation, Enum): + kind = TypeKind.ENUM + elif issubclass(annotation, BaseModel): + kind = TypeKind.MODEL + else: + kind = TypeKind.PRIMITIVE + + base_type = state.last_newtype_name or annotation.__name__ + + return state.build_type_info( + base_type=base_type, + kind=kind, + source_type=annotation, + ) + + +def single_literal_value(annotation: object) -> object | None: + """Extract a single literal value from a type annotation, or None. + + Delegates to analyze_type for all unwrapping, then checks + whether the result is a single-value Literal. + """ + try: + ti = analyze_type(annotation) + except (TypeError, UnsupportedUnionError): + return None + if ti.kind == TypeKind.LITERAL: + return ti.literal_value + return None diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_registry.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_registry.py new file mode 100644 index 000000000..505657866 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_registry.py @@ -0,0 +1,113 @@ +"""Type registry mapping Python types to target representations.""" + +from dataclasses import dataclass + +from .type_analyzer import TypeInfo + +__all__ = [ + "TypeMapping", + "PRIMITIVE_TYPES", + "get_type_mapping", + "is_semantic_newtype", + "resolve_type_name", +] + + +@dataclass(frozen=True) +class TypeMapping: + """Maps a type to its representation in different targets.""" + + markdown: str + + def for_target(self, target: str) -> str: + """Get the type representation for a named target.""" + if target != "markdown": + raise ValueError(f"Unknown target {target!r}, expected 'markdown'") + return self.markdown + + +PRIMITIVE_TYPES: dict[str, TypeMapping] = { + # Signed integers + "int8": TypeMapping(markdown="int8"), + "int16": TypeMapping(markdown="int16"), + "int32": TypeMapping(markdown="int32"), + "int64": TypeMapping(markdown="int64"), + # Unsigned integers + "uint8": TypeMapping(markdown="uint8"), + "uint16": TypeMapping(markdown="uint16"), + "uint32": TypeMapping(markdown="uint32"), + # Floating point + "float32": TypeMapping(markdown="float32"), + "float64": TypeMapping(markdown="float64"), + # Basic types + "str": TypeMapping(markdown="string"), + "bool": TypeMapping(markdown="boolean"), + # Python builtins (aliases to their portable equivalents) + "int": TypeMapping(markdown="int64"), + "float": TypeMapping(markdown="float64"), + # Geometry types + "Geometry": TypeMapping(markdown="geometry"), + "BBox": TypeMapping(markdown="bbox"), +} + + +def is_semantic_newtype(type_info: TypeInfo) -> bool: + """Whether a type represents a semantic NewType that should be displayed by name. + + Returns True for unregistered NewTypes (HexColor, Sources) and NewTypes + that wrap a different base type (FeatureVersion wrapping int32, Id wrapping + NoWhitespaceString). Returns False for registered primitives (int32, Geometry). + """ + if type_info.newtype_name is None: + return False + if type_info.newtype_name != type_info.base_type: + return True + return get_type_mapping(type_info.base_type) is None + + +def get_type_mapping(type_name: str) -> TypeMapping | None: + """Look up a type mapping by name. + + Parameters + ---------- + type_name : str + The type name to look up (e.g., "int32", "str", "Geometry"). + Also accepts Python builtin names ("int" -> int64, "float" -> float64). + + Returns + ------- + TypeMapping or None + The TypeMapping for the type, or None if not found. + """ + return PRIMITIVE_TYPES.get(type_name) + + +def resolve_type_name(type_info: TypeInfo, target: str) -> str: + """Resolve a TypeInfo to the base type string for a given target. + + Looks up the type in the registry first (trying source_type if base_type + has no mapping). Falls back to the base_type name as-is. + + Parameters + ---------- + type_info : TypeInfo + The analyzed type information. + target : str + The output target ("markdown"). + + Returns + ------- + str + The resolved base type name string for the target. + """ + mapping = get_type_mapping(type_info.base_type) + if mapping is None and type_info.source_type is not None: + mapping = get_type_mapping(type_info.source_type.__name__) + if mapping is not None: + return mapping.for_target(target) + + # Semantic NewType wrapping an unregistered type (e.g., Sources wrapping + # SourceItem): use the underlying class name rather than the NewType alias. + if type_info.newtype_name and type_info.source_type is not None: + return type_info.source_type.__name__ + return type_info.base_type diff --git a/packages/overture-schema-codegen/tests/test_naming.py b/packages/overture-schema-codegen/tests/test_naming.py new file mode 100644 index 000000000..aeb398a34 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_naming.py @@ -0,0 +1,23 @@ +"""Tests for PascalCase to snake_case conversion.""" + +import pytest +from overture.schema.codegen.case_conversion import to_snake_case + + +class TestToSnakeCase: + """Tests for snake_case conversion helper.""" + + @pytest.mark.parametrize( + ("input_name", "expected"), + [ + ("Building", "building"), + ("BuildingPart", "building_part"), + ("RoadSegment", "road_segment"), + ("Place", "place"), + ("simple", "simple"), # Already lowercase + ("HTTPServer", "http_server"), # Consecutive caps + ], + ) + def test_converts_pascal_to_snake(self, input_name: str, expected: str) -> None: + """PascalCase names should convert to snake_case.""" + assert to_snake_case(input_name) == expected diff --git a/packages/overture-schema-codegen/tests/test_type_analyzer.py b/packages/overture-schema-codegen/tests/test_type_analyzer.py new file mode 100644 index 000000000..04c54340c --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_type_analyzer.py @@ -0,0 +1,563 @@ +"""Tests for type analysis.""" + +from enum import Enum +from typing import Annotated, Any, Literal, NewType, Optional + +import pytest +from annotated_types import Ge +from overture.schema.codegen.type_analyzer import ( + TypeInfo, + TypeKind, + UnsupportedUnionError, + analyze_type, +) +from overture.schema.system.primitive import float64, int32 +from overture.schema.system.ref import Id +from overture.schema.system.string import ( + HexColor, + NoWhitespaceConstraint, + SnakeCaseString, +) +from pydantic import BaseModel, Field, Tag + + +@pytest.fixture() +def id_type_info() -> TypeInfo: + return analyze_type(Id) + + +@pytest.fixture() +def hex_color_type_info() -> TypeInfo: + return analyze_type(HexColor) + + +class TestAnalyzeTypePrimitives: + """Tests for primitive type analysis.""" + + @pytest.mark.parametrize("annotation", [str, int, float, bool]) + def test_builtin_returns_primitive_type_info(self, annotation: type) -> None: + """Builtin type annotations return PRIMITIVE TypeInfo with matching base_type.""" + result = analyze_type(annotation) + + assert result.base_type == annotation.__name__ + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is False + assert result.is_list is False + + +class TestAnalyzeTypeSentinel: + """Tests for Sentinel type filtering in unions. + + Pydantic uses ``typing_extensions.Sentinel`` instances (like ````) + in union types for optional fields. The type analyzer filters these out + alongside ``None`` when processing unions. + """ + + @pytest.fixture() + def missing_sentinel(self) -> object: + from typing_extensions import Sentinel + + return Sentinel("MISSING") + + def test_sentinel_filtered_from_union(self, missing_sentinel: object) -> None: + """Sentinel is filtered out, leaving the concrete type.""" + result = analyze_type(str | missing_sentinel) # type: ignore[arg-type] + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is False + + def test_sentinel_with_none_sets_optional(self, missing_sentinel: object) -> None: + """Sentinel + None both filtered; None triggers is_optional.""" + result = analyze_type(str | missing_sentinel | None) # type: ignore[arg-type] + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is True + + +class TestAnalyzeTypeOptional: + """Tests for Optional type analysis.""" + + def test_pipe_none_sets_is_optional(self) -> None: + """str | None returns TypeInfo with is_optional=True.""" + result = analyze_type(str | None) + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is True + assert result.is_list is False + + def test_type_with_literal_alternative(self) -> None: + """str | Literal[""] filters out the Literal and analyzes the concrete type.""" + result = analyze_type(str | Literal[""]) + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is False + + def test_type_with_literal_and_none(self) -> None: + """str | Literal[""] | None filters Literal and marks optional.""" + result = analyze_type(str | Literal[""] | None) + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is True + + def test_typing_optional_sets_is_optional(self) -> None: + """Optional[str] from typing module returns TypeInfo with is_optional=True.""" + result = analyze_type(Optional[str]) # noqa: UP045 + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is True + assert result.is_list is False + + +class TestAnalyzeTypeList: + """Tests for list type analysis.""" + + def test_list_str_sets_is_list(self) -> None: + """list[str] returns TypeInfo with is_list=True.""" + result = analyze_type(list[str]) + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is False + assert result.is_list is True + + +class TestAnalyzeTypeComposite: + """Tests for composite/nested type analysis.""" + + def test_list_optional_str(self) -> None: + """list[str | None] sets both is_list and is_optional.""" + result = analyze_type(list[str | None]) + + assert result.base_type == "str" + assert result.is_list is True + assert result.is_optional is True + + def test_optional_list_str(self) -> None: + """list[str] | None sets both is_list and is_optional.""" + result = analyze_type(list[str] | None) + + assert result.base_type == "str" + assert result.is_list is True + assert result.is_optional is True + + def test_annotated_optional_str(self) -> None: + """Annotated[str | None, ...] extracts constraints and sets is_optional.""" + result = analyze_type(Annotated[str | None, "description"]) + + assert result.base_type == "str" + assert result.is_optional is True + assert len(result.constraints) == 1 + assert result.constraints[0].source is None + assert result.constraints[0].constraint == "description" + + def test_annotated_list_str(self) -> None: + """Annotated[list[str], ...] extracts constraints and sets is_list.""" + result = analyze_type(Annotated[list[str], Field(min_length=1)]) + + assert result.base_type == "str" + assert result.is_list is True + assert len(result.constraints) == 1 + assert result.constraints[0].source is None + + +class TestAnalyzeTypeAnnotated: + """Tests for Annotated type analysis.""" + + def test_annotated_int_with_ge_extracts_constraint(self) -> None: + """Annotated[int, Field(ge=0)] unpacks FieldInfo to extract Ge constraint.""" + result = analyze_type(Annotated[int, Field(ge=0)]) + + assert result.base_type == "int" + assert result.kind == TypeKind.PRIMITIVE + assert len(result.constraints) == 1 + cs = result.constraints[0] + assert cs.source is None + assert isinstance(cs.constraint, Ge) + assert cs.constraint.ge == 0 + + def test_annotated_without_constraints(self) -> None: + """Annotated[str, 'description'] extracts non-Field metadata.""" + result = analyze_type(Annotated[str, "just a description"]) + + assert result.base_type == "str" + assert len(result.constraints) == 1 + assert result.constraints[0].source is None + assert result.constraints[0].constraint == "just a description" + + +class TestAnalyzeTypeLiteral: + """Tests for Literal type analysis.""" + + def test_literal_string_extracts_value(self) -> None: + """Literal["value"] returns TypeInfo with literal_value="value".""" + result = analyze_type(Literal["active"]) + + assert result.kind == TypeKind.LITERAL + assert result.literal_value == "active" + + def test_literal_int_extracts_value(self) -> None: + """Literal[42] returns TypeInfo with literal_value=42.""" + result = analyze_type(Literal[42]) + + assert result.kind == TypeKind.LITERAL + assert result.literal_value == 42 + + def test_optional_literal_extracts_value(self) -> None: + """Optional[Literal["x"]] unwraps to Literal with is_optional set.""" + result = analyze_type(Literal["x"] | None) + + assert result.kind == TypeKind.LITERAL + assert result.literal_value == "x" + assert result.is_optional is True + + +class TestAnalyzeTypeEnum: + """Tests for Enum type analysis.""" + + def test_enum_subclass_returns_kind_enum(self) -> None: + """Enum subclass returns TypeInfo with kind=ENUM.""" + + class Color(Enum): + RED = "red" + GREEN = "green" + + result = analyze_type(Color) + + assert result.base_type == "Color" + assert result.kind == TypeKind.ENUM + + +class TestAnalyzeTypeModel: + """Tests for BaseModel type analysis.""" + + def test_basemodel_subclass_returns_kind_model(self) -> None: + """BaseModel subclass returns TypeInfo with kind=MODEL.""" + + class Person(BaseModel): + name: str + + result = analyze_type(Person) + + assert result.base_type == "Person" + assert result.kind == TypeKind.MODEL + + +class TestAnalyzeTypeNewType: + """Tests for NewType primitive analysis.""" + + def test_int32_returns_newtype_name(self) -> None: + """int32 NewType returns TypeInfo with base_type='int32'.""" + result = analyze_type(int32) + + assert result.base_type == "int32" + assert result.kind == TypeKind.PRIMITIVE + + def test_float64_returns_newtype_name(self) -> None: + """float64 NewType returns TypeInfo with base_type='float64'.""" + result = analyze_type(float64) + + assert result.base_type == "float64" + assert result.kind == TypeKind.PRIMITIVE + + def test_optional_int32(self) -> None: + """int32 | None sets is_optional and preserves base_type.""" + result = analyze_type(int32 | None) + + assert result.base_type == "int32" + assert result.is_optional is True + + +class TestNewtypeName: + """Tests for outermost NewType name tracking.""" + + def test_single_layer_newtype(self) -> None: + """Single NewType like int32 sets newtype_name to its name.""" + result = analyze_type(int32) + + assert result.newtype_name == "int32" + assert result.base_type == "int32" + + def test_nested_newtype_preserves_outermost(self, id_type_info: TypeInfo) -> None: + """Nested NewType chain uses outermost name for newtype_name.""" + assert id_type_info.newtype_name == "Id" + assert id_type_info.base_type == "NoWhitespaceString" + + def test_plain_type_has_no_newtype_name(self) -> None: + """Plain types without NewType wrapping have newtype_name=None.""" + result = analyze_type(str) + + assert result.newtype_name is None + + def test_newtype_ref_set_for_newtype(self, id_type_info: TypeInfo) -> None: + """newtype_ref points to the outermost NewType callable.""" + assert id_type_info.newtype_ref is Id + + def test_newtype_ref_none_for_plain_type(self) -> None: + """Plain types have newtype_ref=None.""" + result = analyze_type(str) + + assert result.newtype_ref is None + + +class TestNewtypeWrappingList: + """Tests for NewType wrapping a list type.""" + + def test_newtype_wrapping_list(self) -> None: + """NewType wrapping a list sets is_list and preserves newtype_name.""" + TestSources = NewType("TestSources", Annotated[list[str], Field(min_length=1)]) + result = analyze_type(TestSources) + + assert result.is_list is True + assert result.newtype_name == "TestSources" + + def test_scalar_newtype_is_not_list(self) -> None: + """Scalar NewType like int32 has is_list=False.""" + result = analyze_type(int32) + + assert result.is_list is False + + def test_plain_list_has_no_newtype_name(self) -> None: + """Plain list[str] without NewType has newtype_name=None.""" + result = analyze_type(list[str]) + + assert result.newtype_name is None + assert result.is_list is True + + +class TestConstraintProvenance: + """Tests for flattened constraints with provenance tracking.""" + + def test_nested_newtype_flattens_constraints(self, id_type_info: TypeInfo) -> None: + """Id -> NoWhitespaceString -> str flattens all constraints with sources.""" + sources = {cs.source for cs in id_type_info.constraints} + assert "Id" in sources + assert "NoWhitespaceString" in sources + + def test_nested_newtype_includes_inner_constraints( + self, id_type_info: TypeInfo + ) -> None: + """Inner NewType constraints are collected with provenance.""" + nws_constraints = [ + cs for cs in id_type_info.constraints if cs.source == "NoWhitespaceString" + ] + constraint_types = {type(cs.constraint) for cs in nws_constraints} + assert NoWhitespaceConstraint in constraint_types + + def test_direct_annotation_has_none_source(self) -> None: + """Constraints from direct Annotated (no NewType) have source=None.""" + result = analyze_type(Annotated[str, "direct"]) + + assert len(result.constraints) == 1 + assert result.constraints[0].source is None + assert result.constraints[0].constraint == "direct" + + def test_single_newtype_constraints_attributed( + self, hex_color_type_info: TypeInfo + ) -> None: + """HexColor constraints are attributed to HexColor.""" + assert all(cs.source == "HexColor" for cs in hex_color_type_info.constraints) + assert len(hex_color_type_info.constraints) > 0 + + def test_constraint_preserves_original_object( + self, hex_color_type_info: TypeInfo + ) -> None: + """ConstraintSource.constraint holds the original constraint object.""" + hcc = next( + cs + for cs in hex_color_type_info.constraints + if type(cs.constraint).__name__ == "HexColorConstraint" + ) + assert hcc.constraint.__class__.__name__ == "HexColorConstraint" + + +class TestTypeInfoDescription: + """Tests for TypeInfo.description from Field(description=...) metadata.""" + + def test_newtype_with_field_description( + self, hex_color_type_info: TypeInfo + ) -> None: + """Should extract Field description from HexColor.""" + assert hex_color_type_info.description is not None + assert "color" in hex_color_type_info.description.lower() + + def test_newtype_without_field_description(self) -> None: + """Should have None description for types without Field(description=...).""" + result = analyze_type(int) + assert result.description is None + + def test_plain_annotated_with_field_description(self) -> None: + """Should extract description from Annotated with Field(description=...).""" + MyType = Annotated[str, Field(description="A test description")] + result = analyze_type(MyType) + assert result.description == "A test description" + + def test_outermost_description_wins(self, id_type_info: TypeInfo) -> None: + """Outermost FieldInfo.description takes precedence in nested NewTypes.""" + assert id_type_info.description is not None + assert "unique identifier" in id_type_info.description.lower() + + def test_newtype_without_field_has_none_description(self) -> None: + """NewType with constraints but no Field(description=...) has None.""" + result = analyze_type(SnakeCaseString) + assert result.description is None + + +class TestAnalyzeTypeAny: + """Tests for typing.Any analysis.""" + + def test_any_returns_primitive(self) -> None: + """Any annotation returns TypeInfo with base_type='Any' and kind=PRIMITIVE.""" + result = analyze_type(Any) + + assert result.base_type == "Any" + assert result.kind == TypeKind.PRIMITIVE + + def test_dict_with_any_value(self) -> None: + """dict[str, Any] analyzes without error.""" + result = analyze_type(dict[str, Any]) + + assert result.is_dict is True + assert result.dict_value_type is not None + assert result.dict_value_type.base_type == "Any" + + +class TestAnalyzeTypeDict: + """Tests for dict type analysis.""" + + @pytest.fixture() + def dict_str_int(self) -> TypeInfo: + return analyze_type(dict[str, int]) + + def test_dict_str_int_sets_is_dict(self, dict_str_int: TypeInfo) -> None: + """dict[str, int] returns TypeInfo with is_dict=True.""" + assert dict_str_int.is_dict is True + assert dict_str_int.is_optional is False + assert dict_str_int.is_list is False + + def test_dict_key_type_analyzed(self, dict_str_int: TypeInfo) -> None: + """dict[str, int] has dict_key_type describing the key.""" + assert dict_str_int.dict_key_type is not None + assert dict_str_int.dict_key_type.base_type == "str" + assert dict_str_int.dict_key_type.kind == TypeKind.PRIMITIVE + + def test_dict_value_type_analyzed(self, dict_str_int: TypeInfo) -> None: + """dict[str, int] has dict_value_type describing the value.""" + assert dict_str_int.dict_value_type is not None + assert dict_str_int.dict_value_type.base_type == "int" + assert dict_str_int.dict_value_type.kind == TypeKind.PRIMITIVE + + def test_optional_dict(self) -> None: + """dict[str, str] | None sets is_dict and is_optional.""" + result = analyze_type(dict[str, str] | None) + + assert result.is_dict is True + assert result.is_optional is True + + def test_newtype_wrapping_dict(self) -> None: + """NewType wrapping dict preserves newtype_name and sets is_dict.""" + TestMapping = NewType("TestMapping", dict[str, str]) + result = analyze_type(TestMapping) + + assert result.is_dict is True + assert result.newtype_name == "TestMapping" + + def test_bare_dict_raises_type_error(self) -> None: + """Bare dict without type arguments raises TypeError.""" + with pytest.raises(TypeError, match="Bare dict"): + analyze_type(dict) + + +class TestAnalyzeTypeErrors: + """Tests for error handling.""" + + def test_unsupported_annotation_raises_type_error(self) -> None: + """Unsupported annotation type raises TypeError.""" + with pytest.raises(TypeError, match="Unsupported annotation type"): + analyze_type("not a type") + + def test_multi_type_union_raises_clear_error(self) -> None: + """Multi-type unions like str | int raise UnsupportedUnionError.""" + with pytest.raises( + UnsupportedUnionError, match="Multi-type unions not supported" + ): + analyze_type(str | int) + + def test_multi_type_union_with_none_raises_clear_error(self) -> None: + """Multi-type optional unions like str | int | None raise UnsupportedUnionError.""" + with pytest.raises( + UnsupportedUnionError, match="Multi-type unions not supported" + ): + analyze_type(str | int | None) + + def test_bare_list_raises_type_error(self) -> None: + """Bare list without type argument raises TypeError.""" + with pytest.raises(TypeError, match="Bare list without type argument"): + analyze_type(list) + + +class _UnionModelA(BaseModel): + x: int + + +class _UnionModelB(BaseModel): + y: str + + +class TestAnalyzeTypeUnion: + """Tests for discriminated union analysis.""" + + def test_all_model_union_returns_union_kind(self) -> None: + """Annotated[Union of BaseModel subclasses] returns TypeKind.UNION.""" + union_type = Annotated[_UnionModelA | _UnionModelB, Field(description="test")] + result = analyze_type(union_type) + + assert result.kind == TypeKind.UNION + assert result.union_members is not None + assert len(result.union_members) == 2 + assert _UnionModelA in result.union_members + assert _UnionModelB in result.union_members + + def test_annotated_wrapped_members_unwrapped(self) -> None: + """Union members wrapped in Annotated[X, Tag(...)] are unwrapped.""" + union_type = Annotated[ + Annotated[_UnionModelA, Tag("a")] | Annotated[_UnionModelB, Tag("b")], + Field(description="disc"), + ] + result = analyze_type(union_type) + + assert result.kind == TypeKind.UNION + assert result.union_members is not None + assert len(result.union_members) == 2 + assert _UnionModelA in result.union_members + assert _UnionModelB in result.union_members + + def test_mixed_model_nonmodel_union_still_raises(self) -> None: + """Union of model + non-model types still raises UnsupportedUnionError.""" + with pytest.raises(UnsupportedUnionError): + analyze_type(_UnionModelA | str) + + def test_non_model_multi_union_still_raises(self) -> None: + """Multi-type union of non-models still raises UnsupportedUnionError.""" + with pytest.raises(UnsupportedUnionError): + analyze_type(str | int) + + def test_union_base_type_is_first_member_name(self) -> None: + """UNION TypeInfo base_type is the first member's class name.""" + result = analyze_type( + Annotated[_UnionModelA | _UnionModelB, Field(description="test")] + ) + assert result.base_type == "_UnionModelA" + + def test_optional_union_sets_is_optional(self) -> None: + """Union with None among model members sets is_optional.""" + result = analyze_type( + Annotated[_UnionModelA | _UnionModelB, Field(description="test")] | None + ) + assert result.kind == TypeKind.UNION + assert result.is_optional is True diff --git a/packages/overture-schema-codegen/tests/test_type_registry.py b/packages/overture-schema-codegen/tests/test_type_registry.py new file mode 100644 index 000000000..b969ed7f0 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_type_registry.py @@ -0,0 +1,143 @@ +"""Tests for type registry.""" + +import pytest +from overture.schema.codegen.type_analyzer import TypeInfo, TypeKind +from overture.schema.codegen.type_registry import ( + PRIMITIVE_TYPES, + TypeMapping, + get_type_mapping, + resolve_type_name, +) + + +class TestTypeMapping: + """Tests for TypeMapping dataclass.""" + + def test_typemapping_accepts_markdown(self) -> None: + """TypeMapping should construct with markdown field.""" + mapping = TypeMapping(markdown="int32") + + assert mapping.markdown == "int32" + + def test_for_target_returns_markdown(self) -> None: + """for_target should return markdown representation for markdown target.""" + mapping = TypeMapping(markdown="int32") + + assert mapping.for_target("markdown") == "int32" + + def test_for_target_rejects_unknown_target(self) -> None: + """for_target should raise ValueError for unknown targets.""" + mapping = TypeMapping(markdown="int32") + + with pytest.raises(ValueError, match="Unknown target 'scala'"): + mapping.for_target("scala") + + +class TestPrimitiveTypes: + """Tests for PRIMITIVE_TYPES registry.""" + + def test_registry_contains_expected_types(self) -> None: + """Registry should contain all expected primitive types.""" + expected_types = { + "int8", + "int16", + "int32", + "int64", + "uint8", + "uint16", + "uint32", + "float32", + "float64", + "str", + "bool", + "int", + "float", + "Geometry", + "BBox", + } + + assert set(PRIMITIVE_TYPES.keys()) == expected_types + + def test_bbox_mapping(self) -> None: + """BBox should map to bbox.""" + bbox = PRIMITIVE_TYPES["BBox"] + + assert bbox.markdown == "bbox" + + +class TestGetTypeMapping: + """Tests for get_type_mapping function.""" + + def test_returns_mapping_for_known_type(self) -> None: + """Should return TypeMapping for known primitive type.""" + result = get_type_mapping("int32") + + assert result is not None + assert result.markdown == "int32" + + def test_returns_none_for_unknown_type(self) -> None: + """Should return None for unknown type names.""" + result = get_type_mapping("unknown_type") + + assert result is None + + def test_returns_mapping_for_builtin_int(self) -> None: + """Should map Python int to int64.""" + result = get_type_mapping("int") + + assert result is not None + assert result.markdown == "int64" + + def test_returns_mapping_for_builtin_float(self) -> None: + """Should map Python float to float64.""" + result = get_type_mapping("float") + + assert result is not None + assert result.markdown == "float64" + + +class TestResolveTypeNameNewTypeFallback: + """Tests for resolve_type_name with unregistered NewTypes.""" + + def test_unregistered_newtype_falls_back_to_source_type(self) -> None: + """Unregistered NewType resolves to source_type name.""" + ti = TypeInfo( + base_type="Sources", + kind=TypeKind.MODEL, + newtype_name="Sources", + source_type=type("SourceItem", (), {}), + ) + result = resolve_type_name(ti, "markdown") + + assert result == "SourceItem" + + def test_registered_newtype_unaffected(self) -> None: + """Registered NewType (int32) still resolves through the registry.""" + ti = TypeInfo( + base_type="int32", + kind=TypeKind.PRIMITIVE, + newtype_name="int32", + source_type=int, + ) + result = resolve_type_name(ti, "markdown") + + assert result == "int32" + + +class TestResolveTypeName: + """Tests for resolve_type_name with list/optional flags.""" + + def _make_type_info(self, **kwargs: object) -> TypeInfo: + defaults = {"base_type": "str", "kind": TypeKind.PRIMITIVE} + defaults.update(kwargs) + return TypeInfo(**defaults) # type: ignore[arg-type] + + def test_ignores_is_list(self) -> None: + """resolve_type_name returns the base type regardless of is_list.""" + ti = self._make_type_info(is_list=True) + assert resolve_type_name(ti, "markdown") == "string" + + def test_ignores_is_optional(self) -> None: + """resolve_type_name returns the base type regardless of is_optional.""" + ti = self._make_type_info(is_optional=True) + assert resolve_type_name(ti, "markdown") == "string" From 7c6a670815929713adcdf15168b5f9a7d896773a Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 18:59:36 -0800 Subject: [PATCH 14/38] feat(codegen): add extraction modules Domain-specific extractors that consume analyze_type() and produce specs: - model_extraction: extract_model() for Pydantic models with MRO-aware field ordering, alias resolution, and recursive sub-model expansion via expand_model_tree() - enum_extraction: extract_enum() for DocumentedEnum classes - newtype_extraction: extract_newtype() for semantic NewTypes - primitive_extraction: extract_primitives() for numeric types with range and precision introspection - union_extraction: extract_union() with field merging across discriminated union variants Shared test fixtures in codegen_test_support.py. --- .../src/overture/schema/codegen/docstring.py | 6 +- .../schema/codegen/enum_extraction.py | 40 ++ .../schema/codegen/model_extraction.py | 208 +++++++ .../schema/codegen/newtype_extraction.py | 26 + .../schema/codegen/primitive_extraction.py | 101 ++++ .../src/overture/schema/codegen/specs.py | 2 +- .../schema/codegen/union_extraction.py | 163 ++++++ .../tests/codegen_test_support.py | 298 ++++++++++ .../overture-schema-codegen/tests/conftest.py | 61 ++ .../tests/test_enum_extraction.py | 148 +++++ .../tests/test_model_extractor.py | 546 ++++++++++++++++++ .../tests/test_newtype_extraction.py | 74 +++ .../tests/test_primitive_extraction.py | 71 +++ .../tests/test_specs.py | 226 ++++++++ .../tests/test_type_analyzer.py | 20 +- .../tests/test_union_extraction.py | 91 +++ 16 files changed, 2069 insertions(+), 12 deletions(-) create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/enum_extraction.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/newtype_extraction.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/primitive_extraction.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py create mode 100644 packages/overture-schema-codegen/tests/codegen_test_support.py create mode 100644 packages/overture-schema-codegen/tests/conftest.py create mode 100644 packages/overture-schema-codegen/tests/test_enum_extraction.py create mode 100644 packages/overture-schema-codegen/tests/test_model_extractor.py create mode 100644 packages/overture-schema-codegen/tests/test_newtype_extraction.py create mode 100644 packages/overture-schema-codegen/tests/test_primitive_extraction.py create mode 100644 packages/overture-schema-codegen/tests/test_specs.py create mode 100644 packages/overture-schema-codegen/tests/test_union_extraction.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/docstring.py b/packages/overture-schema-codegen/src/overture/schema/codegen/docstring.py index 865692ea0..7dc2e112f 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/docstring.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/docstring.py @@ -29,11 +29,11 @@ def clean_docstring(doc: str | None) -> str | None: return cleaned or None -def first_docstring_line(doc: str | None) -> str: - """Return the first line of a docstring, or empty string.""" +def first_docstring_line(doc: str | None) -> str | None: + """Return the first line of a docstring, or None if absent.""" cleaned = clean_docstring(doc) if not cleaned: - return "" + return None return cleaned.split("\n")[0] diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/enum_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/enum_extraction.py new file mode 100644 index 000000000..545979d66 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/enum_extraction.py @@ -0,0 +1,40 @@ +"""Enum extraction.""" + +from enum import Enum + +from .docstring import clean_docstring, is_custom_docstring +from .specs import EnumMemberSpec, EnumSpec + +__all__ = ["extract_enum"] + + +def extract_enum(enum_class: type[Enum]) -> EnumSpec: + """Extract enum specification from an Enum class. + + Handles both simple str Enums and DocumentedEnums where members + have per-value descriptions via the __doc__ attribute. + """ + class_doc = enum_class.__doc__ + description = clean_docstring(class_doc) if is_custom_docstring(class_doc) else None + + members: list[EnumMemberSpec] = [] + for member in enum_class: + member_doc = getattr(member, "__doc__", None) + member_description = ( + member_doc if is_custom_docstring(member_doc, class_doc) else None + ) + + members.append( + EnumMemberSpec( + name=member.name, + value=str(member.value), + description=member_description, + ) + ) + + return EnumSpec( + name=enum_class.__name__, + description=description, + members=members, + source_type=enum_class, + ) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py new file mode 100644 index 000000000..4cb12c6f0 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py @@ -0,0 +1,208 @@ +"""Model extraction and tree expansion.""" + +from __future__ import annotations + +import dataclasses + +from pydantic import BaseModel +from pydantic.fields import FieldInfo +from pydantic_core import PydanticUndefined + +from overture.schema.system.model_constraint import ModelConstraint + +from .docstring import clean_docstring +from .specs import FeatureSpec, FieldSpec, ModelSpec, is_model_class +from .type_analyzer import ConstraintSource, TypeInfo, TypeKind, analyze_type + +__all__ = [ + "expand_model_tree", + "extract_model", + "resolve_field_alias", +] + + +def resolve_field_alias(field_name: str, field_info: FieldInfo) -> str: + """Return the data-dict key for a Pydantic field. + + Prefers ``validation_alias``, falls back to ``alias``, then the + Python field name. Only string aliases are supported; AliasPath + and AliasChoices are ignored. + """ + validation_alias = field_info.validation_alias + if isinstance(validation_alias, str): + return validation_alias + alias = field_info.alias + if isinstance(alias, str): + return alias + return field_name + + +def _merge_field_metadata(type_info: TypeInfo, field_info: FieldInfo) -> TypeInfo: + """Merge constraints from field_info.metadata into TypeInfo. + + Pydantic strips the Annotated wrapper from some fields (non-optional, + non-union) and moves the metadata to field_info.metadata. When this + happens, analyze_type sees a bare type and misses the constraints. + The two sets never overlap: field_info.metadata is empty when the + Annotated wrapper survives in the annotation. + """ + if not field_info.metadata: + return type_info + extra = tuple(ConstraintSource(None, m) for m in field_info.metadata) + return dataclasses.replace(type_info, constraints=type_info.constraints + extra) + + +def _is_field_required(field_info: FieldInfo, type_info: TypeInfo) -> bool: + """A field is required when it has no default and is not Optional.""" + has_default = ( + field_info.default is not PydanticUndefined + or field_info.default_factory is not None + ) + return not has_default and not type_info.is_optional + + +def _basemodel_bases(cls: type) -> list[type[BaseModel]]: + """Return direct BaseModel bases, excluding BaseModel itself.""" + return [b for b in cls.__bases__ if is_model_class(b) and b is not BaseModel] + + +def _class_order(model_class: type[BaseModel]) -> list[type]: + """Return MRO classes in documentation order, recursively. + + For single-inheritance: reversed MRO (base first, derived last). + For multiple-inheritance: primary chain → self → mixins, where + primary chain and each mixin are themselves recursively ordered. + """ + bases = _basemodel_bases(model_class) + + if len(bases) <= 1: + return [ + cls + for cls in reversed(model_class.__mro__) + if isinstance(cls, type) + and issubclass(cls, BaseModel) + and cls is not BaseModel + ] + + primary = _class_order(bases[0]) + mixins = [cls for base in bases[1:] for cls in _class_order(base)] + return primary + [model_class] + mixins + + +def _field_order(model_class: type[BaseModel]) -> list[str]: + """Return model_fields keys in documentation order. + + Walks the class hierarchy recursively. At each level of multiple + inheritance, the first base is the "primary chain" and the rest + are "mixins." Primary chain and own fields come first, then mixin + fields in declaration order. Single-inheritance levels use + Pydantic's default reversed-MRO order. + """ + valid_names = set(model_class.model_fields.keys()) + result: list[str] = [] + seen: set[str] = set() + for cls in _class_order(model_class): + for name in getattr(cls, "__annotations__", {}): + if name not in seen and name in valid_names: + result.append(name) + seen.add(name) + return result + + +def extract_model( + model_class: type[BaseModel], + *, + entry_point: str | None = None, +) -> ModelSpec: + """Extract model specification from a Pydantic model class.""" + field_info_map = model_class.model_fields + ordered_keys = _field_order(model_class) + + fields: list[FieldSpec] = [] + for field_name in ordered_keys: + field_info = field_info_map[field_name] + output_name = resolve_field_alias(field_name, field_info) + + # Use field_info.annotation (resolved TypeVars) not get_type_hints + annotation = field_info.annotation + if annotation is None: + continue + + type_info = _merge_field_metadata(analyze_type(annotation), field_info) + + fields.append( + FieldSpec( + name=output_name, + type_info=type_info, + description=field_info.description or type_info.description, + is_required=_is_field_required(field_info, type_info), + ) + ) + + return ModelSpec( + name=model_class.__name__, + description=clean_docstring(model_class.__doc__), + fields=fields, + source_type=model_class, + entry_point=entry_point, + constraints=ModelConstraint.get_model_constraints(model_class), + ) + + +def expand_model_tree( + spec: FeatureSpec, + cache: dict[type, ModelSpec] | None = None, +) -> FeatureSpec: + """Populate model references on MODEL-kind fields, recursively. + + Walks *spec*'s fields and sets ``field.model`` for fields whose type + is a Pydantic model. Uses *cache* to reuse already-extracted ModelSpecs + and detect shared references. Marks fields whose model creates a cycle + in the ancestor chain with ``starts_cycle=True``. + + Mutates *spec* in place and returns it. + """ + if cache is None: + cache = {} + if spec.source_type is not None: + # Only ModelSpec has non-None source_type; UnionSpec.source_type is always None + assert isinstance(spec, ModelSpec) + cache[spec.source_type] = spec + ancestors = frozenset({spec.source_type}) if spec.source_type else frozenset() + _expand_fields(spec.fields, cache, ancestors) + return spec + + +def _expand_fields( + fields: list[FieldSpec], + cache: dict[type, ModelSpec], + ancestors: frozenset[type], +) -> None: + """Recursive helper for expand_model_tree. + + Cache insertion happens before recursion — cycle detection depends + on the ancestor's ModelSpec being in the cache when the back-edge + is encountered. + """ + for field_spec in fields: + ti = field_spec.type_info + source = ti.source_type + if ti.kind == TypeKind.UNION: + # Union fields have no single model to recurse into. + # The field row appears in the output; skip inline expansion. + continue + if ti.kind != TypeKind.MODEL or source is None: + continue + + if source in ancestors: + # Cycle: reuse existing spec, mark the edge + field_spec.model = cache.get(source) + field_spec.starts_cycle = True + elif source in cache: + # Shared reference: reuse, not a cycle + field_spec.model = cache[source] + else: + sub_spec = extract_model(source) + cache[source] = sub_spec # insert BEFORE recursing + field_spec.model = sub_spec + _expand_fields(sub_spec.fields, cache, ancestors | {source}) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/newtype_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/newtype_extraction.py new file mode 100644 index 000000000..ff11c770a --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/newtype_extraction.py @@ -0,0 +1,26 @@ +"""NewType extraction.""" + +from .docstring import clean_docstring, is_custom_docstring +from .specs import NewTypeSpec +from .type_analyzer import analyze_type + +__all__ = ["extract_newtype"] + + +def extract_newtype(newtype_callable: object) -> NewTypeSpec: + """Extract NewType specification from a NewType callable.""" + type_info = analyze_type(newtype_callable) + doc = getattr(newtype_callable, "__doc__", None) + name = type_info.newtype_name or getattr(newtype_callable, "__name__", None) + if name is None: + msg = f"Cannot determine name for NewType: {newtype_callable!r}" + raise ValueError(msg) + description = ( + clean_docstring(doc) if is_custom_docstring(doc) else type_info.description + ) + return NewTypeSpec( + name=name, + description=description, + type_info=type_info, + source_type=newtype_callable, + ) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/primitive_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/primitive_extraction.py new file mode 100644 index 000000000..351c5051c --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/primitive_extraction.py @@ -0,0 +1,101 @@ +"""Primitive extraction and partitioning.""" + +from annotated_types import Interval + +from .docstring import first_docstring_line +from .newtype_extraction import extract_newtype +from .specs import PrimitiveSpec +from .type_analyzer import TypeInfo, is_newtype + +__all__ = [ + "extract_numeric_bounds", + "extract_primitives", + "partition_primitive_and_geometry_names", +] + + +# Bound attribute names on annotated_types constraint objects (Ge, Gt, Le, +# Lt, Interval) used for numeric bound extraction. +# +# field_constraint_description.py has its own _BOUND_OPS for display formatting. +# The duplication is deliberate: these modules use the same attribute names +# for unrelated purposes (numeric extraction vs. prose rendering), and +# coupling them for four string literals adds a dependency without value. +_BOUND_ATTRS = ("ge", "gt", "le", "lt") + + +def extract_numeric_bounds(type_info: TypeInfo) -> Interval: + """Extract numeric bounds from a TypeInfo's constraints. + + Checks for ge, gt, le, and lt attributes on constraint objects. + Stops at the first constraint defining each bound. + """ + found: dict[str, int | float] = {} + for cs in type_info.constraints: + c = cs.constraint + for attr in _BOUND_ATTRS: + if attr not in found: + val = getattr(c, attr, None) + if val is not None: + found[attr] = val + return Interval(**found) + + +def extract_primitives( + primitive_names: list[str], + primitive_module: object, +) -> list[PrimitiveSpec]: + """Extract specifications for numeric primitive types. + + Resolves each name against the given module, extracts its NewType + spec, docstring, and numeric bounds. + """ + specs: list[PrimitiveSpec] = [] + for name in primitive_names: + obj = getattr(primitive_module, name) + newtype_spec = extract_newtype(obj) + bounds = extract_numeric_bounds(newtype_spec.type_info) + description = first_docstring_line(getattr(obj, "__doc__", None)) + float_bits = _extract_float_bits(name) + specs.append( + PrimitiveSpec( + name=name, + description=description, + bounds=bounds, + float_bits=float_bits, + ) + ) + return specs + + +_FLOAT_BITS: dict[str, int] = { + "float32": 32, + "float64": 64, +} + + +def _extract_float_bits(name: str) -> int | None: + """Extract bit width from a float type name like 'float32'.""" + return _FLOAT_BITS.get(name) + + +def partition_primitive_and_geometry_names( + primitive_module: object, +) -> tuple[list[str], list[str]]: + """Discover primitive and geometry type names from a module's exports. + + NewType exports are numeric primitives. + Non-constraint class/enum exports are geometry types. + """ + module_all: list[str] = getattr(primitive_module, "__all__", []) + primitives: list[str] = [] + geometries: list[str] = [] + + for name in module_all: + obj = getattr(primitive_module, name) + if is_newtype(obj): + primitives.append(name) + elif isinstance(obj, type) and not name.endswith("Constraint"): + geometries.append(name) + + return primitives, geometries diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py b/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py index bb5c3fcac..0d81f8e1f 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py @@ -133,7 +133,7 @@ class PrimitiveSpec: """Extracted specification for a numeric primitive type.""" name: str - description: str + description: str | None bounds: Interval = field(default_factory=Interval) float_bits: int | None = None diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py new file mode 100644 index 000000000..0256c0589 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py @@ -0,0 +1,163 @@ +"""Union extraction and discriminator handling.""" + +from __future__ import annotations + +from typing import Annotated, get_args, get_origin + +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from overture.schema.system.feature import resolve_discriminator_field_name + +from .model_extraction import extract_model, resolve_field_alias +from .specs import AnnotatedField, UnionSpec, is_model_class +from .type_analyzer import TypeInfo, TypeKind, analyze_type, single_literal_value + +__all__ = ["extract_discriminator", "extract_union"] + + +def _extract_annotated_description(annotation: object) -> str | None: + """Extract description from Annotated metadata (FieldInfo).""" + if get_origin(annotation) is not Annotated: + return None + for metadata in get_args(annotation)[1:]: + if isinstance(metadata, FieldInfo) and metadata.description: + return metadata.description + return None + + +def _find_common_base(members: list[type[BaseModel]]) -> type[BaseModel]: + """Find the most-derived common BaseModel ancestor of all members.""" + filtered_mros = [ + [c for c in cls.__mro__ if is_model_class(c) and c is not BaseModel] + for cls in members + ] + common = set(filtered_mros[0]) + for mro in filtered_mros[1:]: + common &= set(mro) + if not common: + raise ValueError( + f"No common BaseModel ancestor for {[m.__name__ for m in members]}" + ) + + def max_mro_index(cls: type) -> int: + return max(mro.index(cls) for mro in filtered_mros) + + return min(common, key=max_mro_index) + + +def _discriminator_field_from_metadata(field_info: FieldInfo) -> str | None: + """Extract a discriminator field name from a FieldInfo's discriminator.""" + return resolve_discriminator_field_name(field_info.discriminator) + + +def _find_field_by_alias(model: type[BaseModel], alias: str) -> FieldInfo | None: + """Find a field in model_fields by alias-resolved name.""" + direct = model.model_fields.get(alias) + if direct is not None: + return direct + for py_name, fi in model.model_fields.items(): + if resolve_field_alias(py_name, fi) == alias: + return fi + return None + + +def extract_discriminator( + annotation: object, + members: list[type[BaseModel]], +) -> tuple[str | None, dict[str, type[BaseModel]] | None]: + """Extract discriminator field name and value-to-type mapping.""" + if get_origin(annotation) is not Annotated: + return None, None + + disc_field_name: str | None = None + for metadata in get_args(annotation)[1:]: + if isinstance(metadata, FieldInfo): + disc_field_name = _discriminator_field_from_metadata(metadata) + if disc_field_name is not None: + break + + if disc_field_name is None: + return None, None + + mapping: dict[str, type[BaseModel]] = {} + for member in members: + field_info = _find_field_by_alias(member, disc_field_name) + if field_info and field_info.annotation is not None: + lit_val = single_literal_value(field_info.annotation) + if lit_val is not None: + mapping[str(lit_val)] = member + + return disc_field_name, mapping or None + + +_TypeIdentity = tuple[str, TypeKind, bool, bool] +_FieldKey = tuple[str, _TypeIdentity] + + +def _type_identity(ti: TypeInfo) -> _TypeIdentity: + """Stable identity for dedup — excludes source_type which can vary across members.""" + return (ti.base_type, ti.kind, ti.is_optional, ti.is_list) + + +def extract_union( + name: str, + annotation: object, + *, + entry_point: str | None = None, +) -> UnionSpec: + """Extract a UnionSpec from a discriminated union type alias.""" + ti = analyze_type(annotation) + if ti.kind != TypeKind.UNION or ti.union_members is None: + raise TypeError(f"{name} is not a union type alias") + + members = list(ti.union_members) + common_base = _find_common_base(members) + + base_spec = extract_model(common_base) + shared_field_names = {f.name for f in base_spec.fields} + + member_specs = [(m, extract_model(m)) for m in members] + + annotated_fields: list[AnnotatedField] = [] + + # Shared fields first (from common base) + for fs in base_spec.fields: + annotated_fields.append(AnnotatedField(field_spec=fs, variant_sources=None)) + + # Variant-specific fields: collect by (name, type identity) for dedup + seen: dict[_FieldKey, AnnotatedField] = {} + + for member_cls, member_spec in member_specs: + for fs in member_spec.fields: + if fs.name in shared_field_names: + continue + key = (fs.name, _type_identity(fs.type_info)) + if key in seen: + existing = seen[key] + existing_sources = existing.variant_sources or () + seen[key] = AnnotatedField( + field_spec=fs, + variant_sources=(*existing_sources, member_cls.__name__), + ) + else: + seen[key] = AnnotatedField( + field_spec=fs, + variant_sources=(member_cls.__name__,), + ) + + annotated_fields.extend(seen.values()) + + disc_field, disc_mapping = extract_discriminator(annotation, members) + + return UnionSpec( + name=name, + description=_extract_annotated_description(annotation), + annotated_fields=annotated_fields, + members=members, + discriminator_field=disc_field, + discriminator_mapping=disc_mapping, + source_annotation=annotation, + common_base=common_base, + entry_point=entry_point, + ) diff --git a/packages/overture-schema-codegen/tests/codegen_test_support.py b/packages/overture-schema-codegen/tests/codegen_test_support.py new file mode 100644 index 000000000..e573153ad --- /dev/null +++ b/packages/overture-schema-codegen/tests/codegen_test_support.py @@ -0,0 +1,298 @@ +"""Shared test support for overture-schema-codegen tests. + +Provides reusable model fixtures and helpers. Pytest fixtures are in conftest.py. +""" + +from __future__ import annotations + +from difflib import unified_diff +from enum import Enum +from pathlib import Path +from typing import Annotated, Generic, Literal, NewType, TypeVar + +import pytest +from overture.schema.codegen.model_extraction import extract_model +from overture.schema.codegen.specs import ( + AnnotatedField, + FieldSpec, + ModelSpec, + UnionSpec, + is_model_class, +) +from overture.schema.codegen.type_analyzer import TypeInfo, TypeKind +from overture.schema.core.discovery import discover_models +from overture.schema.system.doc import DocumentedEnum +from overture.schema.system.field_constraint import UniqueItemsConstraint +from overture.schema.system.model_constraint import require_any_of +from overture.schema.system.primitive import ( + Geometry, + GeometryType, + GeometryTypeConstraint, + float64, + int32, +) +from overture.schema.system.ref import Id, Identified, Reference, Relationship +from overture.schema.system.string import HexColor, LanguageTag, StrippedString +from pydantic import BaseModel, Field + +STR_TYPE = TypeInfo(base_type="str", kind=TypeKind.PRIMITIVE) + +ThemeT = TypeVar("ThemeT") +TypeT = TypeVar("TypeT") + + +class SimpleModel(BaseModel): + """A simple model.""" + + name: str + + +class FeatureBase(BaseModel, Generic[ThemeT, TypeT]): + """Base class mimicking OvertureFeature pattern for tests.""" + + theme: ThemeT + type: TypeT + + +# Separate TypeVars from ThemeT/TypeT: IdentifiedFeature models a +# non-Overture user building on Identified with their own nomenclature. +CategoryT = TypeVar("CategoryT") +KindT = TypeVar("KindT") + + +class IdentifiedFeature(Identified, Generic[CategoryT, KindT]): + """Feature with identity and typed category/kind.""" + + category: CategoryT + kind: KindT + + +class InstrumentFamily(str, DocumentedEnum): + """Classification by sound production method.""" + + STRING = "string", "Sound from vibrating strings" + WIND = "wind", "Sound from vibrating air column" + PERCUSSION = "percussion" + + +class SimpleKind(str, Enum): + SMALL = "small" + LARGE = "large" + + +class Instrument( + IdentifiedFeature[Literal["music"], Literal["instrument"]], +): + """A musical instrument. + + Instruments produce sound through vibration. They are classified + by how sound is produced. + """ + + name: str = Field(description="Common name") + tuning: float64 | None = Field( + None, + description=("Concert pitch in Hz.\n\nStandard tuning is 440 Hz."), + ) + num_strings: int32 | None = Field(None) + family: InstrumentFamily | None = None + color: HexColor | None = Field(None, description="Body color") + tags: Annotated[list[str], UniqueItemsConstraint()] | None = None + + +@require_any_of("name", "description") +class Venue( + IdentifiedFeature[Literal["music"], Literal["venue"]], +): + """A concert venue. + + A location where musical performances take place. + """ + + name: str | None = Field(None, description="Venue name") + description: str | None = None + geometry: Annotated[ + Geometry, + GeometryTypeConstraint(GeometryType.POINT, GeometryType.POLYGON), + ] + capacity: Annotated[int, Field(ge=1)] | None = None + resident_ensemble: ( + Annotated[Id, Reference(Relationship.BELONGS_TO, Instrument)] | None + ) = None + + +class SourceItem(BaseModel): + """A source data reference.""" + + dataset: str = Field(description="Source dataset name") + + +Sources = NewType( + "Sources", + Annotated[ + list[SourceItem], + Field(min_length=1, description="Source data references"), + UniqueItemsConstraint(), + ], +) + + +class FeatureWithSources( + FeatureBase[Literal["test"], Literal["sourced"]], +): + """A feature with a Sources field.""" + + name: str = Field(description="Feature name") + sources: Sources | None = None + + +class Address(BaseModel): + """A mailing address.""" + + street: str = Field(description="Street name") + city: str = Field(description="City name") + zip_code: str | None = Field(None, description="Postal code") + + +class FeatureWithAddress( + FeatureBase[Literal["test"], Literal["addressed"]], +): + """A feature with an address field.""" + + title: str = Field(description="Feature title") + address: Address + + +class TreeNode(BaseModel): + """A recursive tree node.""" + + label: str = Field(description="Node label") + parent: TreeNode | None = None + + +class Widget(BaseModel): + active: bool + label: str = Field(description="Display label") + + +CommonNames = NewType("CommonNames", dict[LanguageTag, StrippedString]) + + +class FeatureWithDict( + FeatureBase[Literal["test"], Literal["dictfeat"]], +): + """A feature with dict fields.""" + + name: str = Field(description="Feature name") + names: CommonNames | None = Field(None, description="Localized names") + alt_names: dict[LanguageTag, StrippedString] | None = Field( + None, description="Alternate localized names" + ) + tags: dict[str, str] | None = Field(None, description="Arbitrary tags") + metadata: dict[str, int] = Field(description="Numeric metadata") + + +class SegmentBase(BaseModel): + """Common base for test segments.""" + + geometry: str + subtype: str + + +class RoadSegment(SegmentBase): + subtype: Literal["road"] + class_: Annotated[str, Field(alias="class")] + speed_limit: int | None = None + + +class RailSegment(SegmentBase): + subtype: Literal["rail"] + class_: Annotated[int, Field(alias="class")] + rail_gauge: float | None = None + + +class WaterSegment(SegmentBase): + subtype: Literal["water"] + + +TestSegment = Annotated[ + RoadSegment | RailSegment | WaterSegment, + Field(description="Test segment union"), +] + + +def make_union_spec( + name: str = "TestUnion", + *, + description: str | None = None, + annotated_fields: list[AnnotatedField] | None = None, + members: list[type[BaseModel]] | None = None, + source_annotation: object = None, + common_base: type[BaseModel] | None = None, + entry_point: str | None = None, +) -> UnionSpec: + """Build a UnionSpec with sensible defaults for tests.""" + return UnionSpec( + name=name, + description=description, + annotated_fields=annotated_fields or [], + members=members or [], + discriminator_field=None, + discriminator_mapping=None, + source_annotation=source_annotation, + common_base=common_base or BaseModel, + entry_point=entry_point, + ) + + +def find_field(spec: ModelSpec, name: str) -> FieldSpec: + """Find a field by name in a ModelSpec, raising if missing.""" + return next(f for f in spec.fields if f.name == name) + + +def assert_literal_field( + spec: ModelSpec, field_name: str, expected_value: object +) -> None: + """Assert a field is a single-value Literal with the expected value.""" + field = find_field(spec, field_name) + assert field.type_info.kind == TypeKind.LITERAL + assert field.type_info.literal_value == expected_value + + +def flat_specs_from_discovery( + theme: str | None = None, +) -> list[ModelSpec]: + """Build a flat list of ModelSpecs from discovery, with entry_point set.""" + models = discover_models() + if theme: + models = {k: v for k, v in models.items() if k.theme == theme} + result = [] + for key, cls in models.items(): + if not is_model_class(cls): + continue + result.append(extract_model(cls, entry_point=key.entry_point)) + return result + + +def assert_golden(actual: str, golden_path: Path, *, update: bool) -> None: + """Compare rendered output against a golden file. + + When update is True, writes actual content to the golden file + instead of comparing. + """ + if update: + golden_path.parent.mkdir(parents=True, exist_ok=True) + golden_path.write_text(actual) + return + expected = golden_path.read_text() + if actual != expected: + diff = "\n".join( + unified_diff( + expected.splitlines(), + actual.splitlines(), + fromfile=str(golden_path), + tofile="actual", + lineterm="", + ) + ) + pytest.fail(f"Golden file mismatch:\n{diff}") diff --git a/packages/overture-schema-codegen/tests/conftest.py b/packages/overture-schema-codegen/tests/conftest.py new file mode 100644 index 000000000..f95e74d46 --- /dev/null +++ b/packages/overture-schema-codegen/tests/conftest.py @@ -0,0 +1,61 @@ +"""Shared pytest fixtures for overture-schema-codegen tests.""" + +import pytest +from overture.schema.codegen.model_extraction import extract_model +from overture.schema.codegen.specs import ModelSpec +from overture.schema.core.discovery import discover_models +from pydantic import BaseModel + + +def pytest_addoption(parser: pytest.Parser) -> None: + parser.addoption( + "--update-golden", + action="store_true", + default=False, + help="Regenerate golden files instead of comparing against them", + ) + + +@pytest.fixture +def update_golden(request: pytest.FixtureRequest) -> bool: + return bool(request.config.getoption("--update-golden")) + + +def _find_model_class(name: str, models: dict[object, object]) -> type[BaseModel]: + """Find a discovered model class by name.""" + matches = [v for v in models.values() if getattr(v, "__name__", None) == name] + assert matches, f"{name} model not found" + match = matches[0] + assert isinstance(match, type) + assert issubclass(match, BaseModel) + return match + + +@pytest.fixture +def all_discovered_models() -> dict: + """Discover and return all registered Overture models.""" + return discover_models() + + +@pytest.fixture +def building_class(all_discovered_models: dict) -> type[BaseModel]: + """Get the Building model class.""" + return _find_model_class("Building", all_discovered_models) + + +@pytest.fixture +def building_spec(building_class: type[BaseModel]) -> ModelSpec: + """Extract the Building model spec.""" + return extract_model(building_class) + + +@pytest.fixture +def place_class(all_discovered_models: dict) -> type[BaseModel]: + """Get the Place model class.""" + return _find_model_class("Place", all_discovered_models) + + +@pytest.fixture +def division_class(all_discovered_models: dict) -> type[BaseModel]: + """Get the Division model class.""" + return _find_model_class("Division", all_discovered_models) diff --git a/packages/overture-schema-codegen/tests/test_enum_extraction.py b/packages/overture-schema-codegen/tests/test_enum_extraction.py new file mode 100644 index 000000000..f51f7f707 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_enum_extraction.py @@ -0,0 +1,148 @@ +"""Tests for enum extraction.""" + +from enum import Enum + +from overture.schema.codegen.enum_extraction import extract_enum +from overture.schema.codegen.specs import EnumMemberSpec, EnumSpec +from overture.schema.system.doc import DocumentedEnum + + +class TestEnumMemberSpec: + """Tests for EnumMemberSpec dataclass.""" + + def test_stores_name_value_description(self) -> None: + """EnumMemberSpec should store name, value, and description.""" + member = EnumMemberSpec( + name="GABLED", value="gabled", description="A gabled roof" + ) + + assert member.name == "GABLED" + assert member.value == "gabled" + assert member.description == "A gabled roof" + + def test_description_can_be_none(self) -> None: + """EnumMemberSpec description should be optional.""" + member = EnumMemberSpec(name="FLAT", value="flat", description=None) + + assert member.description is None + + +class TestEnumSpec: + """Tests for EnumSpec dataclass.""" + + def test_stores_name_description_members(self) -> None: + """EnumSpec should store name, description, and members list.""" + members = [ + EnumMemberSpec(name="A", value="a", description=None), + EnumMemberSpec(name="B", value="b", description="The letter B"), + ] + + spec = EnumSpec( + name="Letters", description="A collection of letters", members=members + ) + + assert spec.name == "Letters" + assert spec.description == "A collection of letters" + assert len(spec.members) == 2 + + +class TestExtractEnumSimple: + """Tests for extract_enum with simple str Enum classes.""" + + def test_extracts_simple_str_enum(self) -> None: + """Should extract name, description, and members from simple str Enum.""" + + class RoofShape(str, Enum): + """The shape of the roof.""" + + FLAT = "flat" + GABLED = "gabled" + DOMED = "dome" + + result = extract_enum(RoofShape) + + assert result.name == "RoofShape" + assert result.description == "The shape of the roof." + assert len(result.members) == 3 + + # Check member extraction + flat = next(m for m in result.members if m.name == "FLAT") + assert flat.value == "flat" + assert flat.description is None + + gabled = next(m for m in result.members if m.name == "GABLED") + assert gabled.value == "gabled" + + def test_enum_without_docstring(self) -> None: + """Should handle enum without docstring.""" + + class SimpleEnum(str, Enum): + A = "a" + B = "b" + + result = extract_enum(SimpleEnum) + + assert result.name == "SimpleEnum" + assert result.description is None + + +class TestExtractEnumDocumented: + """Tests for extract_enum with DocumentedEnum classes.""" + + def test_extracts_documented_enum_with_member_descriptions(self) -> None: + """Should extract per-member descriptions from DocumentedEnum.""" + + class Side(str, DocumentedEnum): + """The side on which something appears.""" + + LEFT = ("left", "On the left side") + RIGHT = ("right", "On the right side") + + result = extract_enum(Side) + + assert result.name == "Side" + assert result.description == "The side on which something appears." + assert len(result.members) == 2 + + left = next(m for m in result.members if m.name == "LEFT") + assert left.value == "left" + assert left.description == "On the left side" + + right = next(m for m in result.members if m.name == "RIGHT") + assert right.value == "right" + assert right.description == "On the right side" + + def test_documented_enum_with_mixed_documentation(self) -> None: + """DocumentedEnum can have some members documented and others not.""" + + class ConnectionState(str, DocumentedEnum): + """Connection states.""" + + CONNECTED = "connected" + DISCONNECTED = "disconnected" + QUIESCING = ("quiescing", "Gracefully shutting down") + + result = extract_enum(ConnectionState) + + connected = next(m for m in result.members if m.name == "CONNECTED") + assert connected.value == "connected" + assert connected.description is None + + quiescing = next(m for m in result.members if m.name == "QUIESCING") + assert quiescing.value == "quiescing" + assert quiescing.description == "Gracefully shutting down" + + +class TestEnumSpecSourceType: + """Tests for source_type on EnumSpec.""" + + def test_enum_spec_source_type_defaults_to_none(self) -> None: + spec = EnumSpec(name="Test", description=None) + assert spec.source_type is None + + def test_extract_enum_sets_source_type(self) -> None: + class Color(str, Enum): + RED = "red" + + spec = extract_enum(Color) + assert spec.source_type is Color diff --git a/packages/overture-schema-codegen/tests/test_model_extractor.py b/packages/overture-schema-codegen/tests/test_model_extractor.py new file mode 100644 index 000000000..e3ba788c6 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_model_extractor.py @@ -0,0 +1,546 @@ +"""Tests for model extraction.""" + +from typing import Annotated, Literal + +from codegen_test_support import ( + FeatureBase, + FeatureWithAddress, + Instrument, + SourceItem, + TreeNode, + Venue, + assert_literal_field, + find_field, +) +from overture.schema.codegen.model_extraction import expand_model_tree, extract_model +from overture.schema.codegen.specs import ModelSpec +from overture.schema.system.field_constraint import UniqueItemsConstraint +from overture.schema.system.model_constraint import ( + FieldEqCondition, + FieldGroupConstraint, + require_any_of, + require_if, +) +from overture.schema.system.primitive import ( + Geometry, + GeometryType, + GeometryTypeConstraint, +) +from overture.schema.system.string import HexColor +from pydantic import BaseModel, Field + + +class TestModelConstraints: + """Model-level constraint extraction.""" + + def test_unconstrained_model_has_empty_constraints(self) -> None: + """Models without decorators produce an empty constraints tuple.""" + + class Plain(BaseModel): + name: str + + spec = extract_model(Plain) + + assert spec.constraints == () + + def test_extracts_require_any_of(self) -> None: + """Should extract @require_any_of from a decorated model.""" + spec = extract_model(Venue) + + assert len(spec.constraints) == 1 + (constraint,) = spec.constraints + assert constraint.name == "@require_any_of" + assert isinstance(constraint, FieldGroupConstraint) + assert constraint.field_names == ("name", "description") + + def test_stacked_constraints_preserve_order(self) -> None: + """Multiple decorators extracted in stacking order (inner-first).""" + + @require_if(["bar"], FieldEqCondition("baz", "x")) + @require_any_of("foo", "bar") + class Stacked(BaseModel): + foo: str | None = None + bar: str | None = None + baz: str | None = None + + spec = extract_model(Stacked) + + assert len(spec.constraints) == 2 + assert spec.constraints[0].name == "@require_any_of" + assert spec.constraints[1].name == "@require_if" + + +class TestExtractModelSimple: + """Tests for extract_model with simple Pydantic models.""" + + def test_extract_simple_model(self) -> None: + """Should extract basic model information.""" + + class SimpleModel(BaseModel): + """A simple test model.""" + + name: str + + result = extract_model(SimpleModel) + + assert result.name == "SimpleModel" + assert result.description == "A simple test model." + assert len(result.fields) == 1 + assert result.fields[0].name == "name" + assert result.fields[0].type_info.base_type == "str" + assert result.fields[0].is_required is True + + def test_extract_model_does_not_set_entry_point(self) -> None: + class M(BaseModel): + x: int + + result = extract_model(M) + assert result.entry_point is None + + def test_extract_model_with_optional_field(self) -> None: + """Should handle optional fields correctly.""" + + class ModelWithOptional(BaseModel): + """Model with optional field.""" + + name: str + nickname: str | None = None + + result = extract_model(ModelWithOptional) + + assert len(result.fields) == 2 + + name_field = find_field(result, "name") + assert name_field.is_required is True + + nickname_field = find_field(result, "nickname") + assert nickname_field.is_required is False + assert nickname_field.type_info.is_optional is True + + def test_extract_model_with_field_description(self) -> None: + """Should extract field descriptions from Field().""" + + class ModelWithDescription(BaseModel): + """Model with field descriptions.""" + + name: str = Field(description="The name of the entity") + + result = extract_model(ModelWithDescription) + + assert result.fields[0].description == "The name of the entity" + + def test_extract_model_with_list_field(self) -> None: + """Should handle list fields correctly.""" + + class ModelWithList(BaseModel): + """Model with list field.""" + + tags: list[str] + + result = extract_model(ModelWithList) + + tags_field = result.fields[0] + assert tags_field.name == "tags" + assert tags_field.type_info.is_list is True + assert tags_field.type_info.base_type == "str" + + +class TestExtractModelWithThemeType: + """Tests for extracting theme/type from Feature-like models.""" + + def test_extract_theme_and_type_from_generic(self) -> None: + """Should extract theme and type as Literal fields.""" + + class Place(FeatureBase[Literal["places"], Literal["place"]]): + """A place feature.""" + + name: str + + result = extract_model(Place) + assert_literal_field(result, "theme", "places") + assert_literal_field(result, "type", "place") + + def test_extract_different_theme_type(self) -> None: + """Should handle different theme/type values as Literal fields.""" + + class Building(FeatureBase[Literal["buildings"], Literal["building"]]): + """A building feature.""" + + height: float | None = None + + result = extract_model(Building) + assert_literal_field(result, "theme", "buildings") + assert_literal_field(result, "type", "building") + + def test_non_feature_model_has_no_theme_type(self) -> None: + """Regular models without Generic base should have no theme/type fields.""" + + class RegularModel(BaseModel): + """A regular model.""" + + value: int + + result = extract_model(RegularModel) + + field_names = [f.name for f in result.fields] + assert "theme" not in field_names + assert "type" not in field_names + + +class TestExtractModelFieldAlias: + """Tests for field alias handling in extract_model.""" + + def test_field_with_alias_uses_alias_name(self) -> None: + """Fields with alias should use alias as the field name, not Python attr name.""" + + class ModelWithAlias(BaseModel): + """Model with aliased field.""" + + class_: str | None = Field(default=None, alias="class") + + result = extract_model(ModelWithAlias) + + # Should use alias 'class', not Python name 'class_' + class_field = result.fields[0] + assert class_field.name == "class" + + def test_field_without_alias_uses_python_name(self) -> None: + """Fields without alias should use Python attribute name.""" + + class ModelWithoutAlias(BaseModel): + """Model without alias.""" + + name: str + + result = extract_model(ModelWithoutAlias) + + assert result.fields[0].name == "name" + + +class TestExtractModelDocstring: + """Tests for docstring extraction and cleaning.""" + + def test_multiline_docstring_has_indentation_stripped(self) -> None: + """Multi-line docstrings should have leading whitespace stripped. + + Docstrings defined in classes have leading whitespace on continuation + lines. This should be stripped so they render as normal paragraphs + in Markdown, not as code blocks. + """ + + class ModelWithMultilineDoc(BaseModel): + """A model with multi-line docstring. + + This is a second paragraph that would have leading + whitespace in the raw __doc__ attribute. + """ + + name: str + + result = extract_model(ModelWithMultilineDoc) + + # Description should NOT have leading whitespace on continuation lines + assert result.description is not None + assert "\n " not in result.description + # Should still have the content + assert "second paragraph" in result.description + + +class TestFieldOrderingWithMixins: + """Tests for field ordering when a model has multiple inheritance.""" + + def test_mixin_fields_come_after_primary_chain_and_own(self) -> None: + """Fields from mixin bases should appear after primary chain and own fields.""" + + class PrimaryBase(BaseModel): + base_field: str + + class MixinA(BaseModel): + a_field: str + + class MixinB(BaseModel): + b_field: str + + class Child(PrimaryBase, MixinA, MixinB): + """A child model with mixins.""" + + own_field: str + + result = extract_model(Child) + field_names = [f.name for f in result.fields] + + assert field_names == ["base_field", "own_field", "a_field", "b_field"] + + def test_single_inheritance_order_unchanged(self) -> None: + """Single-inheritance models should keep Pydantic's default order.""" + + class Parent(BaseModel): + parent_field: str + + class Child(Parent): + """A child model.""" + + child_field: str + + result = extract_model(Child) + field_names = [f.name for f in result.fields] + + assert field_names == ["parent_field", "child_field"] + + def test_mixin_fields_in_declaration_order(self) -> None: + """Mixin fields should appear in class declaration order, not reversed MRO.""" + + class Primary(BaseModel): + p: str + + class MixinFirst(BaseModel): + first: str + + class MixinSecond(BaseModel): + second: str + + class MixinThird(BaseModel): + third: str + + class Model(Primary, MixinFirst, MixinSecond, MixinThird): + """Model with three mixins.""" + + own: str + + result = extract_model(Model) + field_names = [f.name for f in result.fields] + + # Mixins in declaration order: First, Second, Third + assert field_names == ["p", "own", "first", "second", "third"] + + def test_deep_primary_chain_before_mixins(self) -> None: + """Fields from the entire primary chain should precede mixin fields.""" + + class GrandParent(BaseModel): + gp_field: str + + class Parent(GrandParent): + p_field: str + + class Mixin(BaseModel): + m_field: str + + class Child(Parent, Mixin): + """Child with deep primary chain.""" + + own_field: str + + result = extract_model(Child) + field_names = [f.name for f in result.fields] + + assert field_names == ["gp_field", "p_field", "own_field", "m_field"] + + def test_recursive_mixin_reordering(self) -> None: + """Mixins on primary-chain classes should also be reordered.""" + + class CoreBase(BaseModel): + core: str + + class ParentMixin(BaseModel): + pm: str + + class Parent(CoreBase, ParentMixin): + p: str + + class ChildMixin(BaseModel): + cm: str + + class Child(Parent, ChildMixin): + """Child where primary-chain parent has its own mixin.""" + + own: str + + result = extract_model(Child) + field_names = [f.name for f in result.fields] + + # CoreBase (Parent's primary) -> Parent own -> ParentMixin -> Child own -> ChildMixin + assert field_names == ["core", "p", "pm", "own", "cm"] + + +class TestExpandModelTree: + """Tests for expand_model_tree.""" + + def test_model_without_sub_models_unchanged(self) -> None: + """Fields without MODEL kind get model=None.""" + + class Simple(BaseModel): + name: str + count: int + + spec = extract_model(Simple) + expand_model_tree(spec) + + for f in spec.fields: + assert f.model is None + assert f.starts_cycle is False + + def test_nested_model_gets_expanded(self) -> None: + """MODEL-kind fields get their model populated.""" + spec = extract_model(FeatureWithAddress) + expand_model_tree(spec) + + addr_field = find_field(spec, "address") + assert addr_field.model is not None + assert addr_field.model.name == "Address" + assert addr_field.starts_cycle is False + + # Sub-model fields should exist + sub_names = [f.name for f in addr_field.model.fields] + assert "street" in sub_names + assert "city" in sub_names + + def test_cycle_detected_and_marked(self) -> None: + """Self-referential model gets starts_cycle=True.""" + spec = extract_model(TreeNode) + expand_model_tree(spec) + + parent_field = find_field(spec, "parent") + assert parent_field.model is not None + assert parent_field.model is spec # Same object -- cycle + assert parent_field.starts_cycle is True + + def test_shared_reference_not_marked_as_cycle(self) -> None: + """Two models referencing the same sub-model share it without cycle.""" + + class Shared(BaseModel): + value: str + + class ModelA(BaseModel): + ref: Shared + + class ModelB(BaseModel): + ref: Shared + + cache: dict[type, ModelSpec] = {} + spec_a = extract_model(ModelA) + expand_model_tree(spec_a, cache) + + spec_b = extract_model(ModelB) + expand_model_tree(spec_b, cache) + + ref_a = find_field(spec_a, "ref") + ref_b = find_field(spec_b, "ref") + + # Same ModelSpec object, neither is a cycle + assert ref_a.model is ref_b.model + assert ref_a.starts_cycle is False + assert ref_b.starts_cycle is False + + def test_list_of_model_gets_expanded(self) -> None: + """list[Model] fields also get their model populated.""" + + class HasList(BaseModel): + items: list[SourceItem] + + spec = extract_model(HasList) + expand_model_tree(spec) + + items_field = find_field(spec, "items") + assert items_field.model is not None + assert items_field.model.name == "SourceItem" + + +class TestFieldInfoMetadataConstraints: + """Constraints from field_info.metadata are merged into TypeInfo. + + Pydantic strips the Annotated wrapper from some fields and moves the + metadata to field_info.metadata. extract_model merges these back into + TypeInfo.constraints so they aren't silently dropped. + """ + + def test_geometry_type_constraint_extracted(self) -> None: + """GeometryTypeConstraint on geometry field should appear in constraints.""" + spec = extract_model(Venue) + geometry_field = find_field(spec, "geometry") + + constraint_types = [ + type(cs.constraint) for cs in geometry_field.type_info.constraints + ] + assert GeometryTypeConstraint in constraint_types + + def test_geometry_type_constraint_has_null_source(self) -> None: + """Constraints from field_info.metadata have source=None (not from a NewType).""" + spec = extract_model(Venue) + geometry_field = find_field(spec, "geometry") + + geo_constraints = [ + cs + for cs in geometry_field.type_info.constraints + if isinstance(cs.constraint, GeometryTypeConstraint) + ] + assert len(geo_constraints) == 1 + assert geo_constraints[0].source is None + + def test_metadata_constraints_not_duplicated(self) -> None: + """Fields where Pydantic preserves Annotated don't get duplicate constraints. + + When field_info.metadata is empty (Pydantic kept the Annotated wrapper), + no extra constraints are added. + """ + spec = extract_model(Instrument) + tags_field = find_field(spec, "tags") + + unique_constraints = [ + cs + for cs in tags_field.type_info.constraints + if isinstance(cs.constraint, UniqueItemsConstraint) + ] + assert len(unique_constraints) == 1 + + def test_standalone_annotated_field_extracts_metadata(self) -> None: + """Direct Annotated[Type, constraint] fields (non-optional, non-union) + get their constraints from field_info.metadata.""" + + class Model(BaseModel): + geo: Annotated[ + Geometry, + GeometryTypeConstraint(GeometryType.POINT), + ] + + spec = extract_model(Model) + geo_field = find_field(spec, "geo") + + constraint_types = [ + type(cs.constraint) for cs in geo_field.type_info.constraints + ] + assert GeometryTypeConstraint in constraint_types + + +class TestFieldDescriptionFallback: + """Tests for field description fallback from NewType Field metadata.""" + + def test_field_inherits_newtype_description(self) -> None: + """Field with no explicit description gets NewType's Field description.""" + + class TestModel(BaseModel): + color: HexColor + + spec = extract_model(TestModel) + field = find_field(spec, "color") + assert field.description is not None + assert "color" in field.description.lower() + + def test_explicit_description_not_overridden(self) -> None: + """Field with explicit description keeps its own, ignores NewType's.""" + + class TestModel(BaseModel): + color: HexColor = Field(description="Custom color description") + + spec = extract_model(TestModel) + field = find_field(spec, "color") + assert field.description == "Custom color description" + + def test_field_without_newtype_description_stays_none(self) -> None: + """Field typed as plain str (no NewType description) keeps None.""" + + class TestModel(BaseModel): + name: str + + spec = extract_model(TestModel) + field = find_field(spec, "name") + assert field.description is None diff --git a/packages/overture-schema-codegen/tests/test_newtype_extraction.py b/packages/overture-schema-codegen/tests/test_newtype_extraction.py new file mode 100644 index 000000000..d8b97259e --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_newtype_extraction.py @@ -0,0 +1,74 @@ +"""Tests for NewType extraction.""" + +from typing import Annotated, NewType + +from codegen_test_support import STR_TYPE +from overture.schema.codegen.newtype_extraction import extract_newtype +from overture.schema.codegen.specs import NewTypeSpec +from overture.schema.system.field_constraint import UniqueItemsConstraint +from overture.schema.system.ref import Id +from overture.schema.system.string import HexColor +from pydantic import BaseModel, Field + + +class TestExtractNewType: + """Tests for extract_newtype function.""" + + def test_extract_hex_color(self) -> None: + """Should extract HexColor NewType specification.""" + spec = extract_newtype(HexColor) + + assert spec.name == "HexColor" + assert spec.type_info.newtype_name == "HexColor" + + def test_extract_id(self) -> None: + """Should extract Id NewType with nested chain.""" + spec = extract_newtype(Id) + + assert spec.name == "Id" + assert spec.type_info.newtype_name == "Id" + assert spec.type_info.base_type == "NoWhitespaceString" + + def test_extract_newtype_wrapping_list(self) -> None: + """Should extract a list-wrapping NewType.""" + + class Item(BaseModel): + value: str + + TestSources = NewType( + "TestSources", Annotated[list[Item], UniqueItemsConstraint()] + ) + spec = extract_newtype(TestSources) + + assert spec.name == "TestSources" + assert spec.type_info.is_list is True + assert spec.type_info.newtype_name == "TestSources" + + def test_extract_newtype_without_doc_uses_field_description(self) -> None: + """NewType with Field(description=...) but no __doc__ uses Field description.""" + TestType = NewType( + "TestType", + Annotated[str, Field(description="A test type description")], + ) + spec = extract_newtype(TestType) + assert spec.description == "A test type description" + + def test_extract_newtype_with_doc_ignores_field_description(self) -> None: + """NewType with custom __doc__ uses docstring, not Field description.""" + spec = extract_newtype(HexColor) + # HexColor has both __doc__ and Field(description=...). + # __doc__ should win because is_custom_docstring returns True. + assert spec.description is not None + assert "example" in spec.description.lower() or "#" in spec.description + + +class TestNewTypeSpecSourceType: + """Tests for source_type on NewTypeSpec.""" + + def test_newtype_spec_source_type_defaults_to_none(self) -> None: + spec = NewTypeSpec(name="Test", description=None, type_info=STR_TYPE) + assert spec.source_type is None + + def test_extract_newtype_sets_source_type(self) -> None: + spec = extract_newtype(HexColor) + assert spec.source_type is HexColor diff --git a/packages/overture-schema-codegen/tests/test_primitive_extraction.py b/packages/overture-schema-codegen/tests/test_primitive_extraction.py new file mode 100644 index 000000000..8ed54261d --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_primitive_extraction.py @@ -0,0 +1,71 @@ +"""Tests for primitive extraction and numeric bounds.""" + +from typing import Annotated, NewType + +from overture.schema.codegen.newtype_extraction import extract_newtype +from overture.schema.codegen.primitive_extraction import extract_numeric_bounds +from overture.schema.codegen.type_analyzer import analyze_type +from overture.schema.system.primitive import float32, int32, int64, uint8 +from pydantic import Field + + +class TestExtractNumericBounds: + """Tests for extract_numeric_bounds function.""" + + def test_signed_integer_bounds(self) -> None: + """Should extract ge/le from a constrained integer NewType.""" + spec = extract_newtype(int32) + bounds = extract_numeric_bounds(spec.type_info) + + assert bounds.ge == -(2**31) + assert bounds.le == 2**31 - 1 + + def test_unsigned_integer_bounds(self) -> None: + """Should extract 0-based bounds from unsigned NewType.""" + spec = extract_newtype(uint8) + bounds = extract_numeric_bounds(spec.type_info) + + assert bounds.ge == 0 + assert bounds.le == 255 + + def test_int64_bounds(self) -> None: + """Should extract large bounds from int64.""" + spec = extract_newtype(int64) + bounds = extract_numeric_bounds(spec.type_info) + + assert bounds.ge == -(2**63) + assert bounds.le == 2**63 - 1 + + def test_unconstrained_type(self) -> None: + """Should return empty Interval for types without numeric constraints.""" + spec = extract_newtype(float32) + bounds = extract_numeric_bounds(spec.type_info) + + assert bounds.ge is None + assert bounds.gt is None + assert bounds.le is None + assert bounds.lt is None + + def test_exclusive_bounds(self) -> None: + """Should extract gt/lt from constraints using exclusive bounds.""" + ExclusiveBounded = NewType( + "ExclusiveBounded", Annotated[int, Field(gt=0, lt=100)] + ) + type_info = analyze_type(ExclusiveBounded) + bounds = extract_numeric_bounds(type_info) + + assert bounds.gt == 0 + assert bounds.lt == 100 + assert bounds.ge is None + assert bounds.le is None + + def test_mixed_bounds(self) -> None: + """Should extract a mix of inclusive and exclusive bounds.""" + MixedBounded = NewType("MixedBounded", Annotated[int, Field(ge=0, lt=256)]) + type_info = analyze_type(MixedBounded) + bounds = extract_numeric_bounds(type_info) + + assert bounds.ge == 0 + assert bounds.lt == 256 + assert bounds.gt is None + assert bounds.le is None diff --git a/packages/overture-schema-codegen/tests/test_specs.py b/packages/overture-schema-codegen/tests/test_specs.py new file mode 100644 index 000000000..0b47187ed --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_specs.py @@ -0,0 +1,226 @@ +"""Tests for spec data structures and predicates.""" + +from typing import Annotated + +from codegen_test_support import STR_TYPE, make_union_spec +from overture.schema.codegen.specs import ( + AnnotatedField, + FeatureSpec, + FieldSpec, + ModelSpec, + is_union_alias, +) +from overture.schema.codegen.type_analyzer import TypeInfo, TypeKind +from pydantic import BaseModel, Field + + +class TestFeatureSpecProtocol: + """Tests for FeatureSpec protocol compliance.""" + + def test_model_spec_satisfies_feature_spec(self) -> None: + """ModelSpec satisfies the FeatureSpec protocol.""" + + class Simple(BaseModel): + name: str + + from overture.schema.codegen.model_extraction import extract_model + + spec = extract_model(Simple) + # Protocol compliance check + assert isinstance(spec, FeatureSpec) + # Verify protocol attributes + assert spec.name == "Simple" + assert isinstance(spec.fields, list) + assert spec.source_type is Simple + + +class TestFieldSpec: + """Tests for FieldSpec dataclass.""" + + def test_fieldspec_stores_basic_attributes(self) -> None: + """FieldSpec should store name, type_info, description, is_required.""" + field_spec = FieldSpec( + name="test_field", + type_info=STR_TYPE, + description="A test field", + is_required=True, + ) + + assert field_spec.name == "test_field" + assert field_spec.type_info == STR_TYPE + assert field_spec.description == "A test field" + assert field_spec.is_required is True + + def test_fieldspec_optional_field(self) -> None: + """FieldSpec should handle optional fields.""" + optional_str = TypeInfo( + base_type="str", kind=TypeKind.PRIMITIVE, is_optional=True + ) + + field_spec = FieldSpec( + name="optional_field", + type_info=optional_str, + description=None, + is_required=False, + ) + + assert field_spec.is_required is False + assert field_spec.description is None + + +class TestModelSpec: + """Tests for ModelSpec dataclass.""" + + def test_modelspec_stores_basic_attributes(self) -> None: + """ModelSpec should store name, description, fields.""" + field = FieldSpec( + name="id", + type_info=STR_TYPE, + description="Unique identifier", + is_required=True, + ) + + model_spec = ModelSpec( + name="TestModel", + description="A test model", + fields=[field], + ) + + assert model_spec.name == "TestModel" + assert model_spec.description == "A test model" + assert len(model_spec.fields) == 1 + assert model_spec.fields[0].name == "id" + + def test_entry_point_defaults_to_none(self) -> None: + spec = ModelSpec(name="M", description=None) + assert spec.entry_point is None + + +class TestAnnotatedField: + """Tests for AnnotatedField wrapper.""" + + def test_stores_field_and_variant_sources(self) -> None: + """AnnotatedField pairs a FieldSpec with variant provenance.""" + fs = FieldSpec(name="x", type_info=STR_TYPE, description=None, is_required=True) + af = AnnotatedField(field_spec=fs, variant_sources=("RoadSegment",)) + assert af.field_spec is fs + assert af.variant_sources == ("RoadSegment",) + + def test_none_variant_sources_means_shared(self) -> None: + """variant_sources=None indicates a shared field.""" + fs = FieldSpec(name="x", type_info=STR_TYPE, description=None, is_required=True) + af = AnnotatedField(field_spec=fs, variant_sources=None) + assert af.variant_sources is None + + +class TestFieldSpecModelTree: + """Tests for FieldSpec model and starts_cycle fields.""" + + def test_model_defaults_to_none(self) -> None: + field_spec = FieldSpec( + name="test", type_info=STR_TYPE, description=None, is_required=True + ) + assert field_spec.model is None + + def test_starts_cycle_defaults_to_false(self) -> None: + field_spec = FieldSpec( + name="test", type_info=STR_TYPE, description=None, is_required=True + ) + assert field_spec.starts_cycle is False + + def test_model_can_hold_model_spec(self) -> None: + type_info = TypeInfo(base_type="Address", kind=TypeKind.MODEL) + sub = ModelSpec(name="Address", description=None) + field_spec = FieldSpec( + name="address", + type_info=type_info, + description=None, + is_required=True, + model=sub, + ) + assert field_spec.model is sub + + def test_starts_cycle_can_be_set(self) -> None: + type_info = TypeInfo(base_type="Node", kind=TypeKind.MODEL) + sub = ModelSpec(name="Node", description=None) + field_spec = FieldSpec( + name="parent", + type_info=type_info, + description=None, + is_required=False, + model=sub, + starts_cycle=True, + ) + assert field_spec.starts_cycle is True + assert field_spec.model is sub + + def test_starts_cycle_without_model_is_nonsensical(self) -> None: + """starts_cycle=True with model=None is expressible but invalid. + + expand_model_tree never produces this combination -- starts_cycle + is only set when model points to the cycle-causing ModelSpec. + Document the invariant so violations stand out. + """ + type_info = TypeInfo(base_type="Node", kind=TypeKind.MODEL) + field_spec = FieldSpec( + name="parent", + type_info=type_info, + description=None, + is_required=False, + starts_cycle=True, + ) + # Expressible but meaningless: cycle to nowhere + assert field_spec.starts_cycle is True + assert field_spec.model is None + + +class TestIsUnionAlias: + """Tests for is_union_alias predicate.""" + + def test_annotated_union_of_models_returns_true(self) -> None: + """Annotated[Union of BaseModels] is a union alias.""" + + class A(BaseModel): + x: int + + class B(BaseModel): + y: str + + union_type = Annotated[A | B, Field(description="test")] + assert is_union_alias(union_type) is True + + def test_model_class_returns_false(self) -> None: + """A concrete BaseModel class is not a union alias.""" + + class A(BaseModel): + x: int + + assert is_union_alias(A) is False + + def test_plain_string_returns_false(self) -> None: + """A plain string is not a union alias.""" + assert is_union_alias("not a type") is False + + def test_non_model_union_returns_false(self) -> None: + """A union of non-model types is not a union alias.""" + assert is_union_alias(str | int) is False + + +class TestUnionSpec: + """Tests for UnionSpec data structure.""" + + def test_fields_property_returns_plain_field_specs(self) -> None: + """UnionSpec.fields property returns list[FieldSpec] from annotated_fields.""" + fs1 = FieldSpec( + name="a", type_info=STR_TYPE, description=None, is_required=True + ) + fs2 = FieldSpec( + name="b", type_info=STR_TYPE, description=None, is_required=False + ) + spec = make_union_spec( + annotated_fields=[ + AnnotatedField(field_spec=fs1, variant_sources=None), + AnnotatedField(field_spec=fs2, variant_sources=("X",)), + ], + ) + assert spec.fields == [fs1, fs2] diff --git a/packages/overture-schema-codegen/tests/test_type_analyzer.py b/packages/overture-schema-codegen/tests/test_type_analyzer.py index 04c54340c..8f1e11e13 100644 --- a/packages/overture-schema-codegen/tests/test_type_analyzer.py +++ b/packages/overture-schema-codegen/tests/test_type_analyzer.py @@ -88,14 +88,6 @@ def test_pipe_none_sets_is_optional(self) -> None: assert result.is_optional is True assert result.is_list is False - def test_type_with_literal_alternative(self) -> None: - """str | Literal[""] filters out the Literal and analyzes the concrete type.""" - result = analyze_type(str | Literal[""]) - - assert result.base_type == "str" - assert result.kind == TypeKind.PRIMITIVE - assert result.is_optional is False - def test_type_with_literal_and_none(self) -> None: """str | Literal[""] | None filters Literal and marks optional.""" result = analyze_type(str | Literal[""] | None) @@ -114,6 +106,18 @@ def test_typing_optional_sets_is_optional(self) -> None: assert result.is_list is False +class TestAnalyzeTypeUnionLiteralFiltering: + """Tests for filtering Literal arms out of unions.""" + + def test_type_with_literal_alternative(self) -> None: + """str | Literal[""] filters out the Literal and analyzes the concrete type.""" + result = analyze_type(str | Literal[""]) + + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + assert result.is_optional is False + + class TestAnalyzeTypeList: """Tests for list type analysis.""" diff --git a/packages/overture-schema-codegen/tests/test_union_extraction.py b/packages/overture-schema-codegen/tests/test_union_extraction.py new file mode 100644 index 000000000..3cfe0b423 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_union_extraction.py @@ -0,0 +1,91 @@ +"""Tests for union extraction.""" + +import pytest +from codegen_test_support import ( + RailSegment, + RoadSegment, + SegmentBase, + TestSegment, + WaterSegment, +) +from overture.schema.codegen.specs import FieldSpec, UnionSpec +from overture.schema.codegen.union_extraction import extract_union + + +class TestExtractUnion: + """Tests for extract_union function.""" + + @pytest.fixture + def segment_spec(self) -> UnionSpec: + return extract_union("TestSegment", TestSegment) + + def test_extracts_name_and_description(self, segment_spec: UnionSpec) -> None: + """UnionSpec captures the union name and docstring.""" + assert segment_spec.name == "TestSegment" + assert segment_spec.description == "Test segment union" + + def test_finds_common_base(self, segment_spec: UnionSpec) -> None: + """Identifies SegmentBase as the common base class.""" + assert segment_spec.common_base is SegmentBase + + def test_shared_fields_first(self, segment_spec: UnionSpec) -> None: + """Shared fields from common base come first with variant_sources=None.""" + shared = [ + af for af in segment_spec.annotated_fields if af.variant_sources is None + ] + shared_names = [af.field_spec.name for af in shared] + assert "geometry" in shared_names + assert "subtype" in shared_names + # Shared fields are at the start + first_variant_idx = next( + ( + i + for i, af in enumerate(segment_spec.annotated_fields) + if af.variant_sources is not None + ), + len(segment_spec.annotated_fields), + ) + for af in segment_spec.annotated_fields[:first_variant_idx]: + assert af.variant_sources is None + + def test_variant_specific_fields_have_sources( + self, segment_spec: UnionSpec + ) -> None: + """Variant-only fields carry their source class names.""" + speed = next( + af + for af in segment_spec.annotated_fields + if af.field_spec.name == "speed_limit" + ) + assert speed.variant_sources == ("RoadSegment",) + gauge = next( + af + for af in segment_spec.annotated_fields + if af.field_spec.name == "rail_gauge" + ) + assert gauge.variant_sources == ("RailSegment",) + + def test_heterogeneous_same_name_produces_separate_rows( + self, segment_spec: UnionSpec + ) -> None: + """class_ in Road (str) vs Rail (int): separate rows, not merged.""" + class_fields = [ + af for af in segment_spec.annotated_fields if af.field_spec.name == "class" + ] + assert len(class_fields) == 2 + sources = {af.variant_sources for af in class_fields} + assert ("RoadSegment",) in sources + assert ("RailSegment",) in sources + + def test_members_lists_all_member_classes(self, segment_spec: UnionSpec) -> None: + """UnionSpec.members contains all union member classes.""" + assert set(segment_spec.members) == {RoadSegment, RailSegment, WaterSegment} + + def test_source_annotation_preserved(self, segment_spec: UnionSpec) -> None: + """source_annotation holds the original Annotated[Union[...]].""" + assert segment_spec.source_annotation is TestSegment + + def test_fields_property_returns_plain_list(self, segment_spec: UnionSpec) -> None: + """spec.fields returns list[FieldSpec] without provenance.""" + for f in segment_spec.fields: + assert isinstance(f, FieldSpec) From 86ef93d39de74f51b0b8eaf11758abfd5dc2e4a3 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 19:00:26 -0800 Subject: [PATCH 15/38] feat(codegen): add constraint description modules Generate prose from extracted constraint data: - field_constraint_description: describe field-level constraints (ranges, patterns, unique items, hex colors) as human-readable notes with NewType source attribution - model_constraint_description: describe model-level constraints (@require_any_of, @radio_group, @min_fields_set, @require_if, @forbid_if) as prose, with consolidation of same-field conditional constraints --- .../codegen/field_constraint_description.py | 151 +++++++ .../codegen/model_constraint_description.py | 227 ++++++++++ .../schema/codegen/model_extraction.py | 4 +- .../overture/schema/codegen/type_analyzer.py | 6 +- .../schema/codegen/union_extraction.py | 18 +- .../tests/test_constraint_description.py | 400 ++++++++++++++++++ .../tests/test_enum_extraction.py | 17 +- .../tests/test_specs.py | 3 +- .../tests/test_type_analyzer.py | 24 +- 9 files changed, 812 insertions(+), 38 deletions(-) create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py create mode 100644 packages/overture-schema-codegen/tests/test_constraint_description.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py b/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py new file mode 100644 index 000000000..96f2ca48b --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py @@ -0,0 +1,151 @@ +"""Convert field-level constraints to display text. + +Handles constraints from Annotated metadata and NewType wrappers: +Ge, Gt, Interval, Le, Lt, MaxLen, MinLen, GeometryTypeConstraint, +Reference, and custom constraint classes. +""" + +from __future__ import annotations + +from collections.abc import Callable + +from annotated_types import Ge, Gt, Interval, Le, Lt, MaxLen, MinLen + +from overture.schema.system.primitive import GeometryTypeConstraint +from overture.schema.system.ref import Reference + +from .docstring import first_docstring_line +from .type_analyzer import ConstraintSource + +__all__ = [ + "constraint_display_text", + "constraint_pattern", + "describe_field_constraint", +] + +# Bound attribute names paired with display operators. Each entry maps an +# annotated_types constraint attribute (Ge, Gt, Le, Lt, Interval) to its +# mathematical symbol for prose rendering. +# +# primitive_extraction.py has its own _BOUND_ATTRS for numeric extraction. The +# duplication is deliberate: these modules use the same attribute names for +# unrelated purposes (display formatting vs. numeric bound extraction), and +# coupling them for four string literals adds a dependency without value. +_BOUND_OPS: tuple[tuple[str, str], ...] = ( + ("ge", "≥"), + ("gt", ">"), + ("le", "≤"), + ("lt", "<"), +) + + +def _first_bound(obj: object) -> str | None: + """Return backticked notation for the first set bound, or None.""" + for attr, op in _BOUND_OPS: + val = getattr(obj, attr, None) + if val is not None: + return f"`{op} {val}`" + return None + + +def _describe_interval(iv: Interval) -> str: + """Format an Interval as readable bound notation.""" + lower_val = iv.ge if iv.ge is not None else iv.gt + lower_op = "≤" if iv.ge is not None else "<" + upper_val = iv.le if iv.le is not None else iv.lt + upper_op = "≤" if iv.le is not None else "<" + + if lower_val is not None and upper_val is not None: + return f"`{lower_val} {lower_op} x {upper_op} {upper_val}`" + + return _first_bound(iv) or "" + + +def _is_opaque_constraint(constraint: object) -> bool: + """Check whether the constraint has no custom __repr__ (renders as just its class name).""" + return type(constraint).__repr__ is object.__repr__ + + +def _geometry_type_label(value: str) -> str: + """Convert a GeometryType value to PascalCase display name. + + >>> _geometry_type_label("line_string") + 'LineString' + """ + return "".join(part.title() for part in value.split("_")) + + +def describe_field_constraint( + constraint: object, + link_fn: Callable[[str], str] | None = None, +) -> str: + """Return a display string for a field-level constraint object. + + *link_fn* resolves a type name to a markdown link string (e.g. + `` [`Name`](path) ``). When None, names render as inline code. + """ + if isinstance(constraint, GeometryTypeConstraint): + labels = ", ".join( + _geometry_type_label(gt.value) for gt in constraint.allowed_types + ) + return f"Allowed geometry types: {labels}" + if isinstance(constraint, Reference): + rel_value: str = constraint.relationship.value # type: ignore[assignment] + rel_label = rel_value.replace("_", " ") + target = constraint.relatee.__name__ + target_str = link_fn(target) if link_fn else f"`{target}`" + return f"References {target_str} ({rel_label})" + if isinstance(constraint, Interval): + desc = _describe_interval(constraint) + if desc: + return desc + elif isinstance(constraint, (Ge, Gt, Le, Lt)): + result = _first_bound(constraint) + if result is not None: + return result + if isinstance(constraint, MinLen): + return f"`minimum length: {constraint.min_length}`" + if isinstance(constraint, MaxLen): + return f"`maximum length: {constraint.max_length}`" + + if _is_opaque_constraint(constraint): + return f"`{type(constraint).__name__}`" + return f"`{constraint}`" + + +def _constraint_class_description(constraint: object) -> str | None: + """Extract the first docstring line from a custom constraint class. + + Returns None for builtins and classes without docstrings. + """ + constraint_type = type(constraint) + if constraint_type.__module__ == "builtins": + return None + line = first_docstring_line(constraint_type.__doc__) + return line or None + + +def constraint_pattern(constraint: object) -> str | None: + """Extract the regex pattern string from a constraint, if present. + + Traverses two levels: constraint.pattern is a compiled re.Pattern + object, and re.Pattern.pattern is the raw string. + """ + compiled = getattr(constraint, "pattern", None) + return getattr(compiled, "pattern", None) + + +def constraint_display_text( + cs: ConstraintSource, + link_fn: Callable[[str], str] | None = None, +) -> str: + """Build display text for a constraint, combining description/pattern when available.""" + description = _constraint_class_description(cs.constraint) + if _is_opaque_constraint(cs.constraint) and description: + cls_name = type(cs.constraint).__name__ + pattern = constraint_pattern(cs.constraint) + if pattern: + return f"{description} (`{cls_name}`, pattern: `{pattern}`)" + return f"{description} (`{cls_name}`)" + + return describe_field_constraint(cs.constraint, link_fn=link_fn) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py b/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py new file mode 100644 index 000000000..4401a777a --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py @@ -0,0 +1,227 @@ +"""Convert model-level constraints to human-readable prose. + +Handles RequireAnyOf, RadioGroup, ForbidIf, RequireIf, and other +ModelConstraint types. Produces descriptions and per-field notes for +documentation rendering. +""" + +from __future__ import annotations + +from dataclasses import dataclass + +from overture.schema.system.model_constraint import ( + FieldEqCondition, + ForbidIfConstraint, + MinFieldsSetConstraint, + ModelConstraint, + NoExtraFieldsConstraint, + Not, + RadioGroupConstraint, + RequireAnyOfConstraint, + RequireIfConstraint, +) + +__all__ = ["analyze_model_constraints"] + +_ConditionalConstraint = RequireIfConstraint | ForbidIfConstraint + + +@dataclass(frozen=True) +class _ConstraintEntry: + """A constraint description paired with the field names it affects.""" + + description: str + field_names: frozenset[str] + + +def _format_field_list(names: tuple[str, ...]) -> str: + """Format field names as backtick-quoted, comma-separated list.""" + return ", ".join(f"`{n}`" for n in names) + + +def _conditional_verb(constraint: _ConditionalConstraint) -> str: + """Return 'required' or 'forbidden' based on constraint type.""" + return "required" if isinstance(constraint, RequireIfConstraint) else "forbidden" + + +def _plural_verb(names: tuple[str, ...]) -> str: + """Return 'is' or 'are' based on field count.""" + return "are" if len(names) > 1 else "is" + + +def _unwrap_field_eq(condition: object) -> tuple[FieldEqCondition, bool] | None: + """Extract the FieldEqCondition from a condition, with negation flag. + + Returns (field_eq, is_negated) or None for unrecognized conditions. + """ + if isinstance(condition, Not) and isinstance(condition.inner, FieldEqCondition): + return condition.inner, True + if isinstance(condition, FieldEqCondition): + return condition, False + return None + + +def _describe_condition(condition: object) -> str: + """Render a Condition as human-readable text.""" + unwrapped = _unwrap_field_eq(condition) + if unwrapped is not None: + field_eq, negated = unwrapped + op = "≠" if negated else "=" + return f"`{field_eq.field_name}` {op} `{field_eq.value}`" + return str(condition) + + +def _describe_conditional(constraint: _ConditionalConstraint) -> str: + """Describe a require_if or forbid_if constraint.""" + fields = _format_field_list(constraint.field_names) + verb = _conditional_verb(constraint) + cond = _describe_condition(constraint.condition) + return f"{fields} {_plural_verb(constraint.field_names)} {verb} when {cond}" + + +def _consolidation_key( + constraint: _ConditionalConstraint, +) -> tuple[type, tuple[str, ...], str] | None: + """Return a grouping key if the constraint is consolidatable, else None. + + Consolidatable: same type, same field_names, plain FieldEqCondition + (not negated) on the same condition field. + """ + cond = constraint.condition + if not isinstance(cond, FieldEqCondition): + return None + return (type(constraint), constraint.field_names, cond.field_name) + + +def _as_field_eq(constraint: _ConditionalConstraint) -> FieldEqCondition: + """Narrow a conditional constraint's condition to FieldEqCondition. + + Only called on constraints that passed _consolidation_key, which + rejects non-FieldEqCondition conditions. + """ + cond = constraint.condition + assert isinstance(cond, FieldEqCondition) + return cond + + +def _describe_consolidated( + constraints: list[_ConditionalConstraint], +) -> str: + """Describe a group of consolidated conditional constraints.""" + first = constraints[0] + fields = _format_field_list(first.field_names) + verb = _conditional_verb(first) + cond_field = _as_field_eq(first).field_name + values = ", ".join(f"`{_as_field_eq(c).value}`" for c in constraints) + return ( + f"{fields} {_plural_verb(first.field_names)} {verb} " + f"when `{cond_field}` is one of: {values}" + ) + + +def _condition_field_names(condition: object) -> frozenset[str]: + """Extract field names referenced by a condition.""" + unwrapped = _unwrap_field_eq(condition) + if unwrapped is not None: + return frozenset({unwrapped[0].field_name}) + return frozenset() + + +def _affected_field_names(constraint: ModelConstraint) -> frozenset[str]: + """Return all field names referenced by a constraint. + + Includes both constrained field_names and condition trigger fields. + Returns empty set for constraints that don't reference specific fields + (NoExtraFieldsConstraint, MinFieldsSetConstraint). + """ + if isinstance(constraint, (NoExtraFieldsConstraint, MinFieldsSetConstraint)): + return frozenset() + if isinstance(constraint, (RequireIfConstraint, ForbidIfConstraint)): + return frozenset(constraint.field_names) | _condition_field_names( + constraint.condition + ) + if isinstance(constraint, (RequireAnyOfConstraint, RadioGroupConstraint)): + return frozenset(constraint.field_names) + return frozenset() + + +def _describe_one(constraint: ModelConstraint) -> str | None: + """Describe a single constraint, or None to skip it.""" + if isinstance(constraint, NoExtraFieldsConstraint): + return None + if isinstance(constraint, RequireAnyOfConstraint): + return ( + f"At least one of {_format_field_list(constraint.field_names)} must be set" + ) + if isinstance(constraint, RadioGroupConstraint): + return f"Exactly one of {_format_field_list(constraint.field_names)} must be `true`" + if isinstance(constraint, MinFieldsSetConstraint): + return f"At least {constraint.count} fields must be set" + if isinstance(constraint, (RequireIfConstraint, ForbidIfConstraint)): + return _describe_conditional(constraint) + return f"`{constraint.name}`" + + +def _analyze_constraints( + constraints: tuple[ModelConstraint, ...], +) -> list[_ConstraintEntry]: + """Analyze constraints into descriptions paired with affected fields. + + Handles consolidation and filtering, preserving original declaration order. + """ + groups: dict[ + tuple[type, tuple[str, ...], str], list[tuple[int, _ConditionalConstraint]] + ] = {} + standalone: list[tuple[int, ModelConstraint]] = [] + + for i, c in enumerate(constraints): + if isinstance(c, (RequireIfConstraint, ForbidIfConstraint)): + key = _consolidation_key(c) + if key is not None: + groups.setdefault(key, []).append((i, c)) + continue + standalone.append((i, c)) + + entries: list[tuple[int, _ConstraintEntry]] = [] + + for group_items in groups.values(): + first_idx = group_items[0][0] + group_constraints = [c for _, c in group_items] + all_fields = frozenset[str]().union( + *(_affected_field_names(c) for c in group_constraints) + ) + if len(group_constraints) == 1: + desc = _describe_one(group_constraints[0]) + else: + desc = _describe_consolidated(group_constraints) + if desc is not None: + entries.append((first_idx, _ConstraintEntry(desc, all_fields))) + + for idx, c in standalone: + desc = _describe_one(c) + if desc is not None: + entries.append((idx, _ConstraintEntry(desc, _affected_field_names(c)))) + + entries.sort(key=lambda e: e[0]) + return [entry for _, entry in entries] + + +def analyze_model_constraints( + constraints: tuple[ModelConstraint, ...], +) -> tuple[list[str], dict[str, list[str]]]: + """Analyze constraints into descriptions and per-field notes in one pass. + + Returns (descriptions, field_notes) where descriptions is the list of + human-readable constraint strings and field_notes maps field names to + constraint descriptions that reference them. + """ + entries = _analyze_constraints(constraints) + + descriptions = [entry.description for entry in entries] + + field_notes: dict[str, list[str]] = {} + for entry in entries: + for name in entry.field_names: + field_notes.setdefault(name, []).append(entry.description) + + return descriptions, field_notes diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py index 4cb12c6f0..7f122e868 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py @@ -164,9 +164,7 @@ def expand_model_tree( """ if cache is None: cache = {} - if spec.source_type is not None: - # Only ModelSpec has non-None source_type; UnionSpec.source_type is always None - assert isinstance(spec, ModelSpec) + if isinstance(spec, ModelSpec) and spec.source_type is not None: cache[spec.source_type] = spec ancestors = frozenset({spec.source_type}) if spec.source_type else frozenset() _expand_fields(spec.fields, cache, ancestors) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py index 97c363c85..cedae8506 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py @@ -97,14 +97,14 @@ class _UnwrapState: is_dict: bool = False dict_key_type: TypeInfo | None = None dict_value_type: TypeInfo | None = None - _constraints: list[ConstraintSource] = field(default_factory=list) + constraints: list[ConstraintSource] = field(default_factory=list) outermost_newtype_name: str | None = None outermost_newtype_ref: object | None = None last_newtype_name: str | None = None description: str | None = None def add_constraint(self, source: str | None, constraint: object) -> None: - self._constraints.append(ConstraintSource(source, constraint)) + self.constraints.append(ConstraintSource(source, constraint)) def build_type_info( self, @@ -123,7 +123,7 @@ def build_type_info( is_dict=self.is_dict, dict_key_type=self.dict_key_type, dict_value_type=self.dict_value_type, - constraints=tuple(self._constraints), + constraints=tuple(self.constraints), literal_value=literal_value, source_type=source_type, newtype_name=self.outermost_newtype_name, diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py index 0256c0589..c826206cc 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py @@ -133,18 +133,12 @@ def extract_union( if fs.name in shared_field_names: continue key = (fs.name, _type_identity(fs.type_info)) - if key in seen: - existing = seen[key] - existing_sources = existing.variant_sources or () - seen[key] = AnnotatedField( - field_spec=fs, - variant_sources=(*existing_sources, member_cls.__name__), - ) - else: - seen[key] = AnnotatedField( - field_spec=fs, - variant_sources=(member_cls.__name__,), - ) + existing = seen.get(key) + prior_sources = existing.variant_sources or () if existing else () + seen[key] = AnnotatedField( + field_spec=fs, + variant_sources=(*prior_sources, member_cls.__name__), + ) annotated_fields.extend(seen.values()) diff --git a/packages/overture-schema-codegen/tests/test_constraint_description.py b/packages/overture-schema-codegen/tests/test_constraint_description.py new file mode 100644 index 000000000..9579f6147 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_constraint_description.py @@ -0,0 +1,400 @@ +"""Tests for constraint description (model-level and field-level).""" + +from annotated_types import Ge, Gt, Interval, Le, Lt, MaxLen, MinLen +from overture.schema.codegen.field_constraint_description import ( + describe_field_constraint, +) +from overture.schema.codegen.model_constraint_description import ( + analyze_model_constraints, +) +from overture.schema.system.model_constraint import ( + FieldEqCondition, + ForbidIfConstraint, + MinFieldsSetConstraint, + ModelConstraint, + NoExtraFieldsConstraint, + Not, + RadioGroupConstraint, + RequireAnyOfConstraint, + RequireIfConstraint, +) +from overture.schema.system.primitive import GeometryType, GeometryTypeConstraint +from overture.schema.system.ref import Reference, Relationship +from overture.schema.system.ref.id import Identified + + +def describe_model_constraints( + constraints: tuple[ModelConstraint, ...], +) -> list[str]: + descriptions, _ = analyze_model_constraints(constraints) + return descriptions + + +def field_constraint_notes( + constraints: tuple[ModelConstraint, ...], +) -> dict[str, list[str]]: + _, field_notes = analyze_model_constraints(constraints) + return field_notes + + +class TestDescribeSingleConstraint: + """Each constraint type produces readable prose.""" + + def test_require_any_of(self) -> None: + constraint = RequireAnyOfConstraint._create_internal( + "@require_any_of", "name", "description" + ) + result = describe_model_constraints((constraint,)) + + assert result == ["At least one of `name`, `description` must be set"] + + def test_radio_group(self) -> None: + constraint = RadioGroupConstraint._create_internal( + "@radio_group", "is_land", "is_territorial" + ) + result = describe_model_constraints((constraint,)) + + assert result == ["Exactly one of `is_land`, `is_territorial` must be `true`"] + + def test_min_fields_set(self) -> None: + constraint = MinFieldsSetConstraint._create_internal("@min_fields_set", 3) + result = describe_model_constraints((constraint,)) + + assert result == ["At least 3 fields must be set"] + + def test_require_if_field_eq(self) -> None: + constraint = RequireIfConstraint._create_internal( + "@require_if", ["admin_level"], FieldEqCondition("subtype", "country") + ) + result = describe_model_constraints((constraint,)) + + assert result == ["`admin_level` is required when `subtype` = `country`"] + + def test_require_if_negated_condition(self) -> None: + """Not(FieldEqCondition) uses not-equal sign.""" + constraint = RequireIfConstraint._create_internal( + "@require_if", + ["parent_division_id"], + Not(FieldEqCondition("subtype", "country")), + ) + result = describe_model_constraints((constraint,)) + + assert result == ["`parent_division_id` is required when `subtype` ≠ `country`"] + + def test_forbid_if_field_eq(self) -> None: + constraint = ForbidIfConstraint._create_internal( + "@forbid_if", + ["parent_division_id"], + FieldEqCondition("subtype", "country"), + ) + result = describe_model_constraints((constraint,)) + + assert result == [ + "`parent_division_id` is forbidden when `subtype` = `country`" + ] + + def test_multi_field_uses_plural_verb(self) -> None: + """Multiple field names produce 'are required', not 'is required'.""" + constraint = RequireIfConstraint._create_internal( + "@require_if", + ["foo", "bar"], + FieldEqCondition("flag", "on"), + ) + result = describe_model_constraints((constraint,)) + + assert result == ["`foo`, `bar` are required when `flag` = `on`"] + + +class TestDescribeFiltering: + """Filtering and fallback behavior.""" + + def test_no_extra_fields_filtered_out(self) -> None: + """@no_extra_fields produces no output.""" + constraint = NoExtraFieldsConstraint._create_internal("@no_extra_fields") + result = describe_model_constraints((constraint,)) + + assert result == [] + + def test_unknown_constraint_uses_name_fallback(self) -> None: + """Unrecognized constraint type falls back to constraint.name.""" + + class FutureConstraint(ModelConstraint): + pass + + constraint = FutureConstraint("@future_thing") + result = describe_model_constraints((constraint,)) + + assert result == ["`@future_thing`"] + + +class TestConsolidation: + """Consolidation of same-field conditional constraints.""" + + def test_consolidate_require_if_same_field(self) -> None: + """Multiple @require_if with same fields, different FieldEqCondition values, merge.""" + constraints = tuple( + RequireIfConstraint._create_internal( + "@require_if", + ["admin_level"], + FieldEqCondition("subtype", val), + ) + for val in ("country", "dependency", "macroregion") + ) + result = describe_model_constraints(constraints) + + assert result == [ + "`admin_level` is required when `subtype` is one of: " + "`country`, `dependency`, `macroregion`" + ] + + def test_no_consolidation_for_different_fields(self) -> None: + """@require_if with different field_names are not consolidated.""" + c1 = RequireIfConstraint._create_internal( + "@require_if", ["foo"], FieldEqCondition("flag", "a") + ) + c2 = RequireIfConstraint._create_internal( + "@require_if", ["bar"], FieldEqCondition("flag", "b") + ) + result = describe_model_constraints((c1, c2)) + + assert len(result) == 2 + + def test_no_consolidation_for_negated_conditions(self) -> None: + """Negated conditions are not consolidated.""" + c1 = RequireIfConstraint._create_internal( + "@require_if", ["foo"], Not(FieldEqCondition("flag", "a")) + ) + c2 = RequireIfConstraint._create_internal( + "@require_if", ["foo"], Not(FieldEqCondition("flag", "b")) + ) + result = describe_model_constraints((c1, c2)) + + assert len(result) == 2 + + def test_consolidate_forbid_if_same_field(self) -> None: + """Multiple @forbid_if with same fields also consolidate.""" + constraints = tuple( + ForbidIfConstraint._create_internal( + "@forbid_if", + ["secret"], + FieldEqCondition("role", val), + ) + for val in ("guest", "anonymous") + ) + result = describe_model_constraints(constraints) + + assert result == [ + "`secret` is forbidden when `role` is one of: `guest`, `anonymous`" + ] + + +class TestMixedConstraints: + """End-to-end with mixed constraint types.""" + + def test_division_like_model(self) -> None: + """Mixed constraints render in declaration order with consolidation.""" + constraints = ( + RequireAnyOfConstraint._create_internal("@require_any_of", "foo", "bar"), + ForbidIfConstraint._create_internal( + "@forbid_if", + ["parent_id"], + FieldEqCondition("subtype", "country"), + ), + RequireIfConstraint._create_internal( + "@require_if", + ["parent_id"], + Not(FieldEqCondition("subtype", "country")), + ), + RequireIfConstraint._create_internal( + "@require_if", + ["level"], + FieldEqCondition("subtype", "country"), + ), + RequireIfConstraint._create_internal( + "@require_if", + ["level"], + FieldEqCondition("subtype", "region"), + ), + RadioGroupConstraint._create_internal("@radio_group", "is_land", "is_sea"), + ) + result = describe_model_constraints(constraints) + + assert result == [ + "At least one of `foo`, `bar` must be set", + "`parent_id` is forbidden when `subtype` = `country`", + "`parent_id` is required when `subtype` ≠ `country`", + "`level` is required when `subtype` is one of: `country`, `region`", + "Exactly one of `is_land`, `is_sea` must be `true`", + ] + + +class TestFieldConstraintNotes: + """field_constraint_notes maps field names to their constraint descriptions.""" + + def test_require_any_of_maps_all_fields(self) -> None: + """RequireAnyOfConstraint maps each field name to the description.""" + constraint = RequireAnyOfConstraint._create_internal( + "@require_any_of", "name", "description" + ) + result = field_constraint_notes((constraint,)) + + expected = "At least one of `name`, `description` must be set" + assert result == {"name": [expected], "description": [expected]} + + def test_require_if_includes_condition_field(self) -> None: + """RequireIfConstraint includes both constrained and condition fields.""" + constraint = RequireIfConstraint._create_internal( + "@require_if", ["admin_level"], FieldEqCondition("subtype", "country") + ) + result = field_constraint_notes((constraint,)) + + expected = "`admin_level` is required when `subtype` = `country`" + assert result["admin_level"] == [expected] + assert result["subtype"] == [expected] + + def test_forbid_if_with_negated_condition_includes_condition_field(self) -> None: + """ForbidIfConstraint with Not(FieldEqCondition) includes condition field.""" + constraint = ForbidIfConstraint._create_internal( + "@forbid_if", + ["parent_id"], + Not(FieldEqCondition("subtype", "country")), + ) + result = field_constraint_notes((constraint,)) + + expected = "`parent_id` is forbidden when `subtype` ≠ `country`" + assert result["parent_id"] == [expected] + assert result["subtype"] == [expected] + + def test_consolidated_constraints_map_all_fields(self) -> None: + """Consolidated constraints map to all participating fields.""" + constraints = tuple( + RequireIfConstraint._create_internal( + "@require_if", + ["admin_level"], + FieldEqCondition("subtype", val), + ) + for val in ("country", "dependency") + ) + result = field_constraint_notes(constraints) + + expected = ( + "`admin_level` is required when `subtype` is one of: " + "`country`, `dependency`" + ) + assert result["admin_level"] == [expected] + assert result["subtype"] == [expected] + + def test_no_extra_fields_produces_no_annotations(self) -> None: + """NoExtraFieldsConstraint produces no field annotations.""" + constraint = NoExtraFieldsConstraint._create_internal("@no_extra_fields") + result = field_constraint_notes((constraint,)) + + assert result == {} + + def test_min_fields_set_produces_no_annotations(self) -> None: + """MinFieldsSetConstraint produces no field annotations.""" + constraint = MinFieldsSetConstraint._create_internal("@min_fields_set", 3) + result = field_constraint_notes((constraint,)) + + assert result == {} + + def test_radio_group_maps_all_fields(self) -> None: + """RadioGroupConstraint maps each field name to the description.""" + constraint = RadioGroupConstraint._create_internal( + "@radio_group", "is_land", "is_sea" + ) + result = field_constraint_notes((constraint,)) + + expected = "Exactly one of `is_land`, `is_sea` must be `true`" + assert result == {"is_land": [expected], "is_sea": [expected]} + + def test_multiple_constraints_on_one_field(self) -> None: + """Field appearing in multiple constraints gets all descriptions.""" + c1 = RequireAnyOfConstraint._create_internal( + "@require_any_of", "name", "description" + ) + c2 = RequireIfConstraint._create_internal( + "@require_if", ["name"], FieldEqCondition("subtype", "venue") + ) + result = field_constraint_notes((c1, c2)) + + assert len(result["name"]) == 2 + + +class TestDescribeFieldConstraint: + """Tests for describe_field_constraint readable output.""" + + def test_ge(self) -> None: + assert describe_field_constraint(Ge(ge=0)) == "`≥ 0`" + + def test_le(self) -> None: + assert describe_field_constraint(Le(le=100)) == "`≤ 100`" + + def test_gt(self) -> None: + assert describe_field_constraint(Gt(gt=0)) == "`> 0`" + + def test_lt(self) -> None: + assert describe_field_constraint(Lt(lt=100)) == "`< 100`" + + def test_min_len(self) -> None: + assert describe_field_constraint(MinLen(min_length=1)) == "`minimum length: 1`" + + def test_max_len(self) -> None: + assert ( + describe_field_constraint(MaxLen(max_length=10)) == "`maximum length: 10`" + ) + + def test_interval_closed(self) -> None: + assert describe_field_constraint(Interval(ge=0, le=100)) == "`0 ≤ x ≤ 100`" + + def test_interval_open(self) -> None: + assert describe_field_constraint(Interval(gt=0, lt=100)) == "`0 < x < 100`" + + def test_interval_half_open(self) -> None: + assert describe_field_constraint(Interval(ge=0, lt=100)) == "`0 ≤ x < 100`" + + def test_interval_lower_only(self) -> None: + assert describe_field_constraint(Interval(ge=0)) == "`≥ 0`" + + def test_interval_upper_only(self) -> None: + assert describe_field_constraint(Interval(le=100)) == "`≤ 100`" + + def test_geometry_type_single(self) -> None: + constraint = GeometryTypeConstraint(GeometryType.POINT) + assert describe_field_constraint(constraint) == "Allowed geometry types: Point" + + def test_geometry_type_multiple(self) -> None: + constraint = GeometryTypeConstraint(GeometryType.POINT, GeometryType.POLYGON) + assert ( + describe_field_constraint(constraint) + == "Allowed geometry types: Point, Polygon" + ) + + def test_geometry_type_all_types(self) -> None: + constraint = GeometryTypeConstraint( + GeometryType.POINT, + GeometryType.LINE_STRING, + GeometryType.POLYGON, + ) + assert ( + describe_field_constraint(constraint) + == "Allowed geometry types: LineString, Point, Polygon" + ) + + def test_reference_belongs_to(self) -> None: + class Target(Identified): + pass + + constraint = Reference(Relationship.BELONGS_TO, Target) + assert ( + describe_field_constraint(constraint) == "References `Target` (belongs to)" + ) + + def test_reference_connects_to(self) -> None: + class Other(Identified): + pass + + constraint = Reference(Relationship.CONNECTS_TO, Other) + assert ( + describe_field_constraint(constraint) == "References `Other` (connects to)" + ) diff --git a/packages/overture-schema-codegen/tests/test_enum_extraction.py b/packages/overture-schema-codegen/tests/test_enum_extraction.py index f51f7f707..7cdf7bcef 100644 --- a/packages/overture-schema-codegen/tests/test_enum_extraction.py +++ b/packages/overture-schema-codegen/tests/test_enum_extraction.py @@ -7,6 +7,11 @@ from overture.schema.system.doc import DocumentedEnum +def find_member(spec: EnumSpec, name: str) -> EnumMemberSpec: + """Find a member by name in an EnumSpec, raising if missing.""" + return next(m for m in spec.members if m.name == name) + + class TestEnumMemberSpec: """Tests for EnumMemberSpec dataclass.""" @@ -66,11 +71,11 @@ class RoofShape(str, Enum): assert len(result.members) == 3 # Check member extraction - flat = next(m for m in result.members if m.name == "FLAT") + flat = find_member(result, "FLAT") assert flat.value == "flat" assert flat.description is None - gabled = next(m for m in result.members if m.name == "GABLED") + gabled = find_member(result, "GABLED") assert gabled.value == "gabled" def test_enum_without_docstring(self) -> None: @@ -104,11 +109,11 @@ class Side(str, DocumentedEnum): assert result.description == "The side on which something appears." assert len(result.members) == 2 - left = next(m for m in result.members if m.name == "LEFT") + left = find_member(result, "LEFT") assert left.value == "left" assert left.description == "On the left side" - right = next(m for m in result.members if m.name == "RIGHT") + right = find_member(result, "RIGHT") assert right.value == "right" assert right.description == "On the right side" @@ -124,11 +129,11 @@ class ConnectionState(str, DocumentedEnum): result = extract_enum(ConnectionState) - connected = next(m for m in result.members if m.name == "CONNECTED") + connected = find_member(result, "CONNECTED") assert connected.value == "connected" assert connected.description is None - quiescing = next(m for m in result.members if m.name == "QUIESCING") + quiescing = find_member(result, "QUIESCING") assert quiescing.value == "quiescing" assert quiescing.description == "Gracefully shutting down" diff --git a/packages/overture-schema-codegen/tests/test_specs.py b/packages/overture-schema-codegen/tests/test_specs.py index 0b47187ed..258fbbfd9 100644 --- a/packages/overture-schema-codegen/tests/test_specs.py +++ b/packages/overture-schema-codegen/tests/test_specs.py @@ -3,6 +3,7 @@ from typing import Annotated from codegen_test_support import STR_TYPE, make_union_spec +from overture.schema.codegen.model_extraction import extract_model from overture.schema.codegen.specs import ( AnnotatedField, FeatureSpec, @@ -23,8 +24,6 @@ def test_model_spec_satisfies_feature_spec(self) -> None: class Simple(BaseModel): name: str - from overture.schema.codegen.model_extraction import extract_model - spec = extract_model(Simple) # Protocol compliance check assert isinstance(spec, FeatureSpec) diff --git a/packages/overture-schema-codegen/tests/test_type_analyzer.py b/packages/overture-schema-codegen/tests/test_type_analyzer.py index 8f1e11e13..6759e32e8 100644 --- a/packages/overture-schema-codegen/tests/test_type_analyzer.py +++ b/packages/overture-schema-codegen/tests/test_type_analyzer.py @@ -505,11 +505,11 @@ def test_bare_list_raises_type_error(self) -> None: analyze_type(list) -class _UnionModelA(BaseModel): +class UnionModelA(BaseModel): x: int -class _UnionModelB(BaseModel): +class UnionModelB(BaseModel): y: str @@ -518,19 +518,19 @@ class TestAnalyzeTypeUnion: def test_all_model_union_returns_union_kind(self) -> None: """Annotated[Union of BaseModel subclasses] returns TypeKind.UNION.""" - union_type = Annotated[_UnionModelA | _UnionModelB, Field(description="test")] + union_type = Annotated[UnionModelA | UnionModelB, Field(description="test")] result = analyze_type(union_type) assert result.kind == TypeKind.UNION assert result.union_members is not None assert len(result.union_members) == 2 - assert _UnionModelA in result.union_members - assert _UnionModelB in result.union_members + assert UnionModelA in result.union_members + assert UnionModelB in result.union_members def test_annotated_wrapped_members_unwrapped(self) -> None: """Union members wrapped in Annotated[X, Tag(...)] are unwrapped.""" union_type = Annotated[ - Annotated[_UnionModelA, Tag("a")] | Annotated[_UnionModelB, Tag("b")], + Annotated[UnionModelA, Tag("a")] | Annotated[UnionModelB, Tag("b")], Field(description="disc"), ] result = analyze_type(union_type) @@ -538,13 +538,13 @@ def test_annotated_wrapped_members_unwrapped(self) -> None: assert result.kind == TypeKind.UNION assert result.union_members is not None assert len(result.union_members) == 2 - assert _UnionModelA in result.union_members - assert _UnionModelB in result.union_members + assert UnionModelA in result.union_members + assert UnionModelB in result.union_members def test_mixed_model_nonmodel_union_still_raises(self) -> None: """Union of model + non-model types still raises UnsupportedUnionError.""" with pytest.raises(UnsupportedUnionError): - analyze_type(_UnionModelA | str) + analyze_type(UnionModelA | str) def test_non_model_multi_union_still_raises(self) -> None: """Multi-type union of non-models still raises UnsupportedUnionError.""" @@ -554,14 +554,14 @@ def test_non_model_multi_union_still_raises(self) -> None: def test_union_base_type_is_first_member_name(self) -> None: """UNION TypeInfo base_type is the first member's class name.""" result = analyze_type( - Annotated[_UnionModelA | _UnionModelB, Field(description="test")] + Annotated[UnionModelA | UnionModelB, Field(description="test")] ) - assert result.base_type == "_UnionModelA" + assert result.base_type == "UnionModelA" def test_optional_union_sets_is_optional(self) -> None: """Union with None among model members sets is_optional.""" result = analyze_type( - Annotated[_UnionModelA | _UnionModelB, Field(description="test")] | None + Annotated[UnionModelA | UnionModelB, Field(description="test")] | None ) assert result.kind == TypeKind.UNION assert result.is_optional is True From 065fef58bbb4c2dbe59b8e7b6e794f94c36e981e Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 19:01:20 -0800 Subject: [PATCH 16/38] feat(codegen): add output layout modules Determine what artifacts to generate and where they go: - module_layout: compute output directories for entry points, map Python module paths to filesystem output paths via compute_output_dir - path_assignment: build_placement_registry maps types to output file paths. Feature models get {theme}/{slug}/, shared types get types/{subsystem}/, theme-local types nest under their feature or sit flat at theme level - type_collection: discover supplementary types (enums, NewTypes, sub-models) by walking expanded feature trees - link_computation: relative_link() computes cross-page links, LinkContext holds page path + registry for resolving links during rendering --- .../schema/codegen/link_computation.py | 54 ++++++ .../codegen/model_constraint_description.py | 3 +- .../overture/schema/codegen/module_layout.py | 150 +++++++++++++++ .../schema/codegen/path_assignment.py | 101 ++++++++++ .../schema/codegen/type_collection.py | 118 ++++++++++++ .../schema/codegen/union_extraction.py | 19 +- .../tests/test_module_layout.py | 175 +++++++++++++++++ .../tests/test_type_analyzer.py | 25 +++ .../tests/test_type_collection.py | 50 +++++ .../tests/test_type_placement.py | 178 ++++++++++++++++++ 10 files changed, 855 insertions(+), 18 deletions(-) create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/module_layout.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py create mode 100644 packages/overture-schema-codegen/tests/test_module_layout.py create mode 100644 packages/overture-schema-codegen/tests/test_type_collection.py create mode 100644 packages/overture-schema-codegen/tests/test_type_placement.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py b/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py new file mode 100644 index 000000000..22aae5b0f --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py @@ -0,0 +1,54 @@ +"""Relative link computation between rendered output files.""" + +from dataclasses import dataclass +from pathlib import PurePosixPath + +__all__ = ["LinkContext", "relative_link"] + + +@dataclass +class LinkContext: + """Placement context for resolving cross-directory markdown links.""" + + page_path: PurePosixPath + registry: dict[str, PurePosixPath] + + def resolve_link(self, name: str) -> str | None: + """Resolve *name* to a relative link if it exists in the registry.""" + if name in self.registry: + return relative_link(self.page_path, self.registry[name]) + return None + + +def _is_normalized(path: PurePosixPath) -> bool: + """True when the path contains no '..' or '.' components (except root '.').""" + return ".." not in path.parts and path.parts.count(".") <= 1 + + +def relative_link(source: PurePosixPath, target: PurePosixPath) -> str: + """Compute a relative path from source file to target file. + + Both paths must be normalized (no ``..`` components) and relative + to the same output root. + """ + assert _is_normalized(source), f"Source path not normalized: {source}" + assert _is_normalized(target), f"Target path not normalized: {target}" + source_dir = source.parent + # Count how many levels up from source_dir to common ancestor, + # then descend to target. PurePosixPath doesn't have os.path.relpath, + # so compute manually. + source_parts = source_dir.parts + target_parts = target.parts + + # Find common prefix length + common = 0 + for s, t in zip(source_parts, target_parts, strict=False): + if s != t: + break + common += 1 + + ups = len(source_parts) - common + downs = target_parts[common:] + + parts = [".."] * ups + list(downs) + return "/".join(parts) if parts else "." diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py b/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py index 4401a777a..8e60a2d37 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py @@ -100,7 +100,8 @@ def _as_field_eq(constraint: _ConditionalConstraint) -> FieldEqCondition: rejects non-FieldEqCondition conditions. """ cond = constraint.condition - assert isinstance(cond, FieldEqCondition) + if not isinstance(cond, FieldEqCondition): + raise TypeError(f"Expected FieldEqCondition, got {type(cond).__name__}") return cond diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/module_layout.py b/packages/overture-schema-codegen/src/overture/schema/codegen/module_layout.py new file mode 100644 index 000000000..27eb02057 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/module_layout.py @@ -0,0 +1,150 @@ +"""Output directory layout from Python module paths. + +Translates dotted module paths into output directory paths by mirroring +the source package structure. +""" + +from __future__ import annotations + +import sys +from collections.abc import Iterable, Mapping +from pathlib import PurePosixPath + +__all__ = [ + "ROOT_DIR", + "compute_output_dir", + "compute_schema_root", + "entry_point_class", + "entry_point_module", + "is_package_module", + "module_relpath", + "output_dir_for_entry_point", +] + +ROOT_DIR = PurePosixPath(".") + + +def _split_entry_point(entry_point_path: str) -> tuple[str, str]: + """Split ``"module.path:ClassName"`` into its two parts. + + >>> _split_entry_point("overture.schema.buildings:Building") + ('overture.schema.buildings', 'Building') + """ + if ":" not in entry_point_path: + msg = f"Expected 'module:Class' format, got {entry_point_path!r}" + raise ValueError(msg) + module, cls = entry_point_path.split(":", 1) + return module, cls + + +def entry_point_module(entry_point_path: str) -> str: + """Extract module path from entry-point-style path. + + >>> entry_point_module("overture.schema.buildings:Building") + 'overture.schema.buildings' + """ + return _split_entry_point(entry_point_path)[0] + + +def entry_point_class(entry_point_path: str) -> str: + """Extract class name from entry-point-style path. + + >>> entry_point_class("overture.schema.buildings:Building") + 'Building' + """ + return _split_entry_point(entry_point_path)[1] + + +def compute_schema_root(module_paths: Iterable[str]) -> str: + """Find the longest common dotted prefix of module paths. + + Deduplicates inputs first. For a single unique path, drops the + last component (the module itself). + """ + paths = sorted(set(module_paths)) + if not paths: + msg = "No module paths provided" + raise ValueError(msg) + + segments = [p.split(".") for p in paths] + if len(segments) == 1: + return ".".join(segments[0][:-1]) + + common: list[str] = [] + for parts in zip(*segments, strict=False): + if len(set(parts)) == 1: + common.append(parts[0]) + else: + break + return ".".join(common) + + +def module_relpath(module: str, root: str) -> str: + """Strip the schema root prefix from a dotted module path.""" + if not root: + return module + if module == root: + return "" + prefix = root + "." + if not module.startswith(prefix): + msg = f"Module {module!r} does not start with root {root!r}" + raise ValueError(msg) + return module[len(prefix) :] + + +def is_package_module( + module: str, + module_registry: Mapping[str, object] | None = None, +) -> bool: + """Check whether a module is a package (directory) or a file module. + + Packages have ``__path__``; file modules do not (PEP 302). + """ + registry: Mapping[str, object] = ( + module_registry if module_registry is not None else sys.modules + ) + mod = registry.get(module) + if mod is None: + msg = f"Module {module!r} not found in registry" + raise ValueError(msg) + return hasattr(mod, "__path__") + + +def output_dir_for_entry_point( + entry_point_path: str | None, + schema_root: str, + module_registry: Mapping[str, object] | None = None, +) -> PurePosixPath: + """Compute output directory from an entry-point-style path. + + Raises ValueError if *entry_point_path* is None. + """ + if entry_point_path is None: + msg = "entry_point_path must not be None" + raise ValueError(msg) + module = entry_point_module(entry_point_path) + return compute_output_dir(module, schema_root, module_registry) + + +def compute_output_dir( + module: str, + schema_root: str, + module_registry: Mapping[str, object] | None = None, +) -> PurePosixPath: + """Compute output directory for a module, mirroring package structure. + + File modules drop their last component (the .py filename). + Packages keep all components. Returns ``PurePosixPath(".")`` for + the root directory. + """ + relpath = module_relpath(module, schema_root) + if not relpath: + return ROOT_DIR + + parts = relpath.split(".") + if not is_package_module(module, module_registry): + parts = parts[:-1] + + if not parts: + return ROOT_DIR + return PurePosixPath(*parts) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py b/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py new file mode 100644 index 000000000..020f6e44b --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py @@ -0,0 +1,101 @@ +"""Map types to markdown output file paths. + +Uses module-mirrored output directories: output paths derive from +the source Python module path relative to schema_root. +""" + +from collections.abc import Sequence +from pathlib import PurePosixPath + +from .case_conversion import slug_filename +from .module_layout import compute_output_dir, output_dir_for_entry_point +from .specs import FeatureSpec, SupplementarySpec + +__all__ = [ + "GEOMETRY_PAGE", + "PRIMITIVES_PAGE", + "build_placement_registry", + "resolve_output_path", +] + +# Aggregate page paths. +PRIMITIVES_PAGE = PurePosixPath("system/primitive/primitives.md") +GEOMETRY_PAGE = PurePosixPath("system/primitive/geometry.md") + + +def build_placement_registry( + feature_specs: Sequence[FeatureSpec], + all_specs: dict[str, SupplementarySpec], + primitive_names: list[str], + geometry_names: list[str], + schema_root: str, +) -> dict[str, PurePosixPath]: + """Build a mapping from type names to output file paths. + + Uses module-mirrored output directories: output paths derive from + the source Python module path relative to schema_root. + """ + registry: dict[str, PurePosixPath] = _aggregate_page_entries( + primitive_names, geometry_names + ) + + feature_dirs: set[PurePosixPath] = set() + for spec in feature_specs: + spec_dir = output_dir_for_entry_point(spec.entry_point, schema_root) + registry[spec.name] = _md_path(spec_dir, spec.name) + feature_dirs.add(spec_dir) + + for name, supp_spec in all_specs.items(): + if name in registry: + continue + source_module = getattr(supp_spec.source_type, "__module__", None) + if source_module is None: + continue + output_dir = compute_output_dir(source_module, schema_root) + output_dir = _nest_under_types(output_dir, feature_dirs) + registry[name] = _md_path(output_dir, name) + + return registry + + +def resolve_output_path( + type_name: str, + registry: dict[str, PurePosixPath] | None, +) -> PurePosixPath: + """Look up a type's output path from the registry, with flat-file fallback.""" + if registry is not None and type_name in registry: + return registry[type_name] + return PurePosixPath(slug_filename(type_name)) + + +def _aggregate_page_entries( + primitive_names: list[str], + geometry_names: list[str], +) -> dict[str, PurePosixPath]: + """Pre-populate registry entries for types documented on aggregate pages.""" + entries: dict[str, PurePosixPath] = dict.fromkeys(primitive_names, PRIMITIVES_PAGE) + entries.update(dict.fromkeys(geometry_names, GEOMETRY_PAGE)) + return entries + + +def _nest_under_types( + output_dir: PurePosixPath, feature_dirs: set[PurePosixPath] +) -> PurePosixPath: + """Insert ``types/`` after the feature directory portion. + + If *output_dir* equals or is a subdirectory of a feature directory, + returns a path with ``types/`` inserted after the feature directory. + Otherwise returns *output_dir* unchanged. + """ + for fd in sorted(feature_dirs, key=lambda p: len(p.parts), reverse=True): + try: + relative = output_dir.relative_to(fd) + except ValueError: + continue + return fd / "types" / relative + return output_dir + + +def _md_path(directory: PurePosixPath, name: str) -> PurePosixPath: + """Build a .md file path from a directory and a PascalCase type name.""" + return directory / slug_filename(name) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py new file mode 100644 index 000000000..1e2a137b4 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py @@ -0,0 +1,118 @@ +"""Supplementary type discovery by walking expanded feature trees. + +Walks FieldSpec.model references for sub-models (already extracted), +and extracts enums and NewTypes on first encounter. +""" + +from collections.abc import Sequence +from typing import Annotated, get_args, get_origin + +from .enum_extraction import extract_enum +from .model_extraction import extract_model +from .newtype_extraction import extract_newtype +from .specs import FeatureSpec, FieldSpec, ModelSpec, SupplementarySpec +from .type_analyzer import TypeInfo, TypeKind, analyze_type, is_newtype +from .type_registry import is_semantic_newtype + +__all__ = ["collect_all_supplementary_types"] + + +def collect_all_supplementary_types( + feature_specs: Sequence[FeatureSpec], +) -> dict[str, SupplementarySpec]: + """Collect supplementary types by walking expanded feature trees. + + Requires that expand_model_tree has been called on all feature specs + first. Walks FieldSpec.model references for sub-models (already + extracted), and extracts enums and NewTypes on first encounter. + + Returns a dict mapping type names to extracted specs. + """ + feature_names = {spec.name for spec in feature_specs} + all_specs: dict[str, SupplementarySpec] = {} + visited_models: set[str] = set() + + def _collect_from_model(model_spec: ModelSpec) -> None: + if model_spec.name in visited_models or model_spec.name in feature_names: + return + visited_models.add(model_spec.name) + all_specs[model_spec.name] = model_spec + _collect_from_fields(model_spec.fields) + + def _collect_inner_newtypes(newtype_ref: object) -> None: + """Walk a NewType's __supertype__ chain for intermediate semantic NewTypes.""" + annotation = getattr(newtype_ref, "__supertype__", None) + while annotation is not None: + if get_origin(annotation) is Annotated: + annotation = get_args(annotation)[0] + continue + if is_newtype(annotation): + inner_ti = analyze_type(annotation) + if ( + inner_ti.newtype_name is not None + and is_semantic_newtype(inner_ti) + and inner_ti.newtype_name not in all_specs + ): + all_specs[inner_ti.newtype_name] = extract_newtype(annotation) + annotation = getattr(annotation, "__supertype__", None) + continue + break + + def _collect_from_type_info(ti: TypeInfo) -> None: + """Collect supplementary types from a single TypeInfo.""" + if ti.kind == TypeKind.UNION: + if not ti.union_members: + return + # Walk each member's fields for supplementary types. + # Members that are also top-level feature specs are skipped + # by the feature_names guard in _collect_from_model. + for member_cls in ti.union_members: + member_spec = extract_model(member_cls) + _collect_from_model(member_spec) + return + if ti.kind == TypeKind.ENUM and ti.source_type is not None: + name = ti.source_type.__name__ + if name not in all_specs: + all_specs[name] = extract_enum(ti.source_type) + + # Semantic NewTypes always get extracted, including intermediate + # NewTypes in the wrapping chain (e.g., Id wraps NoWhitespaceString + # wraps str — both Id and NoWhitespaceString get pages). + if ( + ti.newtype_ref is not None + and ti.newtype_name is not None + and is_semantic_newtype(ti) + and ti.newtype_name not in all_specs + ): + all_specs[ti.newtype_name] = extract_newtype(ti.newtype_ref) + _collect_inner_newtypes(ti.newtype_ref) + + # Dict key/value types can also reference supplementary types + if ti.dict_key_type is not None: + _collect_from_type_info(ti.dict_key_type) + if ti.dict_value_type is not None: + _collect_from_type_info(ti.dict_value_type) + + def _collect_from_fields(fields: list[FieldSpec]) -> None: + # A single field can match multiple conditions (e.g., Sources is both + # a semantic NewType and wraps a MODEL-kind type), so checks are + # independent `if` statements, not `elif`. + for field_spec in fields: + ti = field_spec.type_info + _collect_from_type_info(ti) + + # MODEL-kind fields (whether direct or via NewType wrapper) get expanded + if ti.kind == TypeKind.MODEL and ti.source_type is not None: + if field_spec.model is None: + msg = ( + f"MODEL-kind field {field_spec.name!r} has source_type " + f"but model=None — call expand_model_tree first" + ) + raise RuntimeError(msg) + if not field_spec.starts_cycle: + _collect_from_model(field_spec.model) + + for spec in feature_specs: + _collect_from_fields(spec.fields) + + return all_specs diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py index c826206cc..67ab5549a 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py @@ -16,16 +16,6 @@ __all__ = ["extract_discriminator", "extract_union"] -def _extract_annotated_description(annotation: object) -> str | None: - """Extract description from Annotated metadata (FieldInfo).""" - if get_origin(annotation) is not Annotated: - return None - for metadata in get_args(annotation)[1:]: - if isinstance(metadata, FieldInfo) and metadata.description: - return metadata.description - return None - - def _find_common_base(members: list[type[BaseModel]]) -> type[BaseModel]: """Find the most-derived common BaseModel ancestor of all members.""" filtered_mros = [ @@ -46,11 +36,6 @@ def max_mro_index(cls: type) -> int: return min(common, key=max_mro_index) -def _discriminator_field_from_metadata(field_info: FieldInfo) -> str | None: - """Extract a discriminator field name from a FieldInfo's discriminator.""" - return resolve_discriminator_field_name(field_info.discriminator) - - def _find_field_by_alias(model: type[BaseModel], alias: str) -> FieldInfo | None: """Find a field in model_fields by alias-resolved name.""" direct = model.model_fields.get(alias) @@ -73,7 +58,7 @@ def extract_discriminator( disc_field_name: str | None = None for metadata in get_args(annotation)[1:]: if isinstance(metadata, FieldInfo): - disc_field_name = _discriminator_field_from_metadata(metadata) + disc_field_name = resolve_discriminator_field_name(metadata.discriminator) if disc_field_name is not None: break @@ -146,7 +131,7 @@ def extract_union( return UnionSpec( name=name, - description=_extract_annotated_description(annotation), + description=ti.description, annotated_fields=annotated_fields, members=members, discriminator_field=disc_field, diff --git a/packages/overture-schema-codegen/tests/test_module_layout.py b/packages/overture-schema-codegen/tests/test_module_layout.py new file mode 100644 index 000000000..8bb2daa85 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_module_layout.py @@ -0,0 +1,175 @@ +"""Tests for module_layout: output directory layout from module paths.""" + +from pathlib import PurePosixPath + +import pytest +from overture.schema.codegen.module_layout import ( + compute_output_dir, + compute_schema_root, + entry_point_class, + entry_point_module, + is_package_module, + module_relpath, +) + + +class TestComputeSchemaRoot: + def test_multiple_paths_common_prefix(self) -> None: + paths = [ + "overture.schema.buildings", + "overture.schema.places", + "overture.schema.divisions", + ] + assert compute_schema_root(paths) == "overture.schema" + + def test_single_path_drops_last_component(self) -> None: + assert compute_schema_root(["overture.schema.buildings"]) == "overture.schema" + + def test_mixed_depth_paths(self) -> None: + paths = [ + "overture.schema.buildings", + "overture.schema.core.names.primary_name", + ] + assert compute_schema_root(paths) == "overture.schema" + + def test_divergent_namespaces(self) -> None: + paths = ["overture.schema.buildings", "acme.transit"] + assert compute_schema_root(paths) == "" + + def test_empty_raises(self) -> None: + with pytest.raises(ValueError): + compute_schema_root([]) + + def test_single_component_path(self) -> None: + assert compute_schema_root(["buildings"]) == "" + + def test_identical_paths_deduplicated(self) -> None: + paths = ["overture.schema.buildings", "overture.schema.buildings"] + assert compute_schema_root(paths) == "overture.schema" + + +class TestEntryPointModule: + def test_extracts_module(self) -> None: + assert entry_point_module("overture.schema.buildings:Building") == ( + "overture.schema.buildings" + ) + + def test_missing_colon_raises(self) -> None: + with pytest.raises(ValueError): + entry_point_module("no_colon") + + def test_multiple_colons_splits_on_first(self) -> None: + assert entry_point_module("mod:A:B") == "mod" + + +class TestEntryPointClass: + def test_extracts_class(self) -> None: + assert entry_point_class("overture.schema.buildings:Building") == "Building" + + def test_missing_colon_raises(self) -> None: + with pytest.raises(ValueError): + entry_point_class("no_colon") + + def test_colon_at_end_returns_empty(self) -> None: + assert entry_point_class("mod:") == "" + + def test_multiple_colons_splits_on_first(self) -> None: + assert entry_point_class("mod:A:B") == "A:B" + + +class TestModuleRelpath: + def test_strips_root_prefix(self) -> None: + assert ( + module_relpath("overture.schema.buildings", "overture.schema") + == "buildings" + ) + + def test_deep_path(self) -> None: + assert ( + module_relpath("overture.schema.core.names.primary_name", "overture.schema") + == "core.names.primary_name" + ) + + def test_module_equals_root(self) -> None: + assert module_relpath("overture.schema", "overture.schema") == "" + + def test_empty_root(self) -> None: + assert module_relpath("buildings", "") == "buildings" + + def test_nonmatching_raises(self) -> None: + with pytest.raises(ValueError): + module_relpath("acme.transit", "overture.schema") + + +def _make_registry(*entries: tuple[str, bool]) -> dict[str, object]: + """Build a synthetic module registry. + + Each entry is (module_path, is_package). Packages get __path__; + file modules do not. + """ + registry: dict[str, object] = {} + for mod_path, is_pkg in entries: + if is_pkg: + registry[mod_path] = type("pkg", (), {"__path__": ["/fake"]})() + else: + registry[mod_path] = type("mod", (), {})() + return registry + + +class TestIsPackageModule: + def test_package_has_path(self) -> None: + registry = _make_registry(("my.package", True)) + assert is_package_module("my.package", registry) is True + + def test_file_module_no_path(self) -> None: + registry = _make_registry(("my.module", False)) + assert is_package_module("my.module", registry) is False + + def test_missing_module_raises(self) -> None: + with pytest.raises(ValueError): + is_package_module("nonexistent", {}) + + +class TestComputeOutputDir: + def test_package_keeps_all_parts(self) -> None: + reg = _make_registry(("overture.schema.buildings", True)) + result = compute_output_dir("overture.schema.buildings", "overture.schema", reg) + assert result == PurePosixPath("buildings") + + def test_file_module_drops_last(self) -> None: + reg = _make_registry(("overture.schema.core.names.primary_name", False)) + result = compute_output_dir( + "overture.schema.core.names.primary_name", "overture.schema", reg + ) + assert result == PurePosixPath("core/names") + + def test_deep_package(self) -> None: + reg = _make_registry(("overture.schema.core.names", True)) + result = compute_output_dir( + "overture.schema.core.names", "overture.schema", reg + ) + assert result == PurePosixPath("core/names") + + def test_file_module_in_theme(self) -> None: + reg = _make_registry(("overture.schema.buildings.enums", False)) + result = compute_output_dir( + "overture.schema.buildings.enums", "overture.schema", reg + ) + assert result == PurePosixPath("buildings") + + def test_file_module_deep(self) -> None: + reg = _make_registry(("overture.schema.divisions.division.models", False)) + result = compute_output_dir( + "overture.schema.divisions.division.models", "overture.schema", reg + ) + assert result == PurePosixPath("divisions/division") + + def test_root_module_returns_dot(self) -> None: + reg = _make_registry(("overture.schema", True)) + result = compute_output_dir("overture.schema", "overture.schema", reg) + assert result == PurePosixPath(".") + + def test_file_module_one_level_returns_dot(self) -> None: + reg = _make_registry(("overture.schema.types", False)) + result = compute_output_dir("overture.schema.types", "overture.schema", reg) + assert result == PurePosixPath(".") diff --git a/packages/overture-schema-codegen/tests/test_type_analyzer.py b/packages/overture-schema-codegen/tests/test_type_analyzer.py index 6759e32e8..a858e3f9c 100644 --- a/packages/overture-schema-codegen/tests/test_type_analyzer.py +++ b/packages/overture-schema-codegen/tests/test_type_analyzer.py @@ -10,6 +10,7 @@ TypeKind, UnsupportedUnionError, analyze_type, + single_literal_value, ) from overture.schema.system.primitive import float64, int32 from overture.schema.system.ref import Id @@ -565,3 +566,27 @@ def test_optional_union_sets_is_optional(self) -> None: ) assert result.kind == TypeKind.UNION assert result.is_optional is True + + +class TestSingleLiteralValue: + """Tests for single_literal_value extraction.""" + + def test_single_value_literal(self) -> None: + """Literal["x"] returns the literal value.""" + assert single_literal_value(Literal["x"]) == "x" + + def test_single_int_literal(self) -> None: + """Literal[42] returns the integer value.""" + assert single_literal_value(Literal[42]) == 42 + + def test_multi_value_literal_returns_none(self) -> None: + """Multi-value Literal returns None (no single default).""" + assert single_literal_value(Literal["a", "b"]) is None + + def test_non_literal_returns_none(self) -> None: + """Non-Literal types return None.""" + assert single_literal_value(str) is None + + def test_unsupported_type_returns_none(self) -> None: + """Types that raise during analysis return None.""" + assert single_literal_value("not a type") is None diff --git a/packages/overture-schema-codegen/tests/test_type_collection.py b/packages/overture-schema-codegen/tests/test_type_collection.py new file mode 100644 index 000000000..78ec75695 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_type_collection.py @@ -0,0 +1,50 @@ +"""Tests for type collection module.""" + +from codegen_test_support import FeatureWithAddress, FeatureWithSources, Instrument +from overture.schema.codegen.model_extraction import expand_model_tree, extract_model +from overture.schema.codegen.specs import ( + EnumSpec, + ModelSpec, + NewTypeSpec, + SupplementarySpec, +) +from overture.schema.codegen.type_collection import collect_all_supplementary_types + + +class TestCollectAllSupplementarySpecs: + """Tests for collect_all_supplementary_types returning specs from expanded trees.""" + + @staticmethod + def _expanded_supplementary( + model_class: type, + ) -> dict[str, SupplementarySpec]: + spec = extract_model(model_class) + expand_model_tree(spec) + return collect_all_supplementary_types([spec]) + + def test_returns_enum_specs(self) -> None: + result = self._expanded_supplementary(Instrument) + + assert "InstrumentFamily" in result + assert isinstance(result["InstrumentFamily"], EnumSpec) + + def test_returns_newtype_specs(self) -> None: + result = self._expanded_supplementary(Instrument) + + assert "HexColor" in result + assert isinstance(result["HexColor"], NewTypeSpec) + + def test_returns_model_specs_from_expanded_tree(self) -> None: + result = self._expanded_supplementary(FeatureWithAddress) + + assert "Address" in result + assert isinstance(result["Address"], ModelSpec) + + def test_collects_transitive_types(self) -> None: + """Types referenced by sub-models are also collected.""" + result = self._expanded_supplementary(FeatureWithSources) + + # Sources is a semantic NewType; SourceItem is a sub-model + # referenced transitively via the expanded tree + assert "Sources" in result + assert "SourceItem" in result diff --git a/packages/overture-schema-codegen/tests/test_type_placement.py b/packages/overture-schema-codegen/tests/test_type_placement.py new file mode 100644 index 000000000..aaa7c5fb3 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_type_placement.py @@ -0,0 +1,178 @@ +"""Tests for type placement module.""" + +from collections.abc import Sequence +from pathlib import PurePosixPath + +import overture.schema.system.primitive as _system_primitive +from codegen_test_support import STR_TYPE, flat_specs_from_discovery, make_union_spec +from overture.schema.codegen.link_computation import relative_link +from overture.schema.codegen.model_extraction import expand_model_tree +from overture.schema.codegen.path_assignment import ( + GEOMETRY_PAGE, + PRIMITIVES_PAGE, + build_placement_registry, +) +from overture.schema.codegen.primitive_extraction import ( + partition_primitive_and_geometry_names, +) +from overture.schema.codegen.specs import ( + AnnotatedField, + FeatureSpec, + FieldSpec, + ModelSpec, + SupplementarySpec, +) +from overture.schema.codegen.type_collection import collect_all_supplementary_types + +_PRIMITIVE_NAMES, _GEOMETRY_NAMES = partition_primitive_and_geometry_names( + _system_primitive +) + +_SCHEMA_ROOT = "overture.schema" + + +def _build_registry( + feature_specs: list[ModelSpec], +) -> tuple[dict[str, PurePosixPath], dict[str, SupplementarySpec]]: + """Build placement registry with standard aggregate names.""" + cache: dict[type, ModelSpec] = {} + for spec in feature_specs: + expand_model_tree(spec, cache) + all_specs = collect_all_supplementary_types(feature_specs) + registry = build_placement_registry( + feature_specs, all_specs, _PRIMITIVE_NAMES, _GEOMETRY_NAMES, _SCHEMA_ROOT + ) + return registry, all_specs + + +class TestRelativeLink: + """Test relative path computation between pages.""" + + def test_same_directory(self) -> None: + source = PurePosixPath("buildings/building.md") + target = PurePosixPath("buildings/facade_material.md") + assert relative_link(source, target) == "facade_material.md" + + def test_sibling_directory(self) -> None: + source = PurePosixPath("buildings/building.md") + target = PurePosixPath("core/names/names.md") + assert relative_link(source, target) == "../core/names/names.md" + + def test_within_core(self) -> None: + source = PurePosixPath("core/names/names.md") + target = PurePosixPath("core/sources/sources.md") + assert relative_link(source, target) == "../sources/sources.md" + + def test_to_aggregate_page(self) -> None: + source = PurePosixPath("core/names/names.md") + target = PurePosixPath("system/primitive/primitives.md") + assert relative_link(source, target) == "../../system/primitive/primitives.md" + + +class TestBuildPlacementRegistry: + """Test the full placement registry builder with module-mirrored paths.""" + + def test_features_at_theme_level(self) -> None: + """Features land directly in their theme directory.""" + specs = flat_specs_from_discovery("buildings") + registry, _ = _build_registry(specs) + + assert registry["Building"] == PurePosixPath("buildings/building.md") + assert registry["BuildingPart"] == PurePosixPath("buildings/building_part.md") + + def test_shared_types_mirror_source_modules(self) -> None: + """Core/system types land in directories matching their module path.""" + specs = flat_specs_from_discovery("buildings") + registry, _ = _build_registry(specs) + + if "Names" in registry: + assert str(registry["Names"]).startswith("core/") + + def test_no_duplicate_paths(self) -> None: + """No two individual types share an output path.""" + specs = flat_specs_from_discovery() + registry, _ = _build_registry(specs) + + aggregate_pages = { + PurePosixPath("system/primitive/primitives.md"), + PurePosixPath("system/primitive/geometry.md"), + } + individual = [p for p in registry.values() if p not in aggregate_pages] + assert len(individual) == len(set(individual)), ( + "Duplicate output paths detected" + ) + + def test_aggregate_pages_at_system_primitive(self) -> None: + """Primitive and geometry aggregate pages under system/primitive/.""" + assert PRIMITIVES_PAGE == PurePosixPath("system/primitive/primitives.md") + assert GEOMETRY_PAGE == PurePosixPath("system/primitive/geometry.md") + + def test_supplementary_types_nested_under_types(self) -> None: + """Supplementary types in a feature directory go under types/.""" + specs = flat_specs_from_discovery("buildings") + registry, _ = _build_registry(specs) + + # BuildingClass is a supplementary type from the buildings module + assert registry["BuildingClass"] == PurePosixPath( + "buildings/types/building_class.md" + ) + + def test_submodule_supplementary_types_nested_under_types(self) -> None: + """Supplementary types in a feature subdirectory go under types/.""" + specs = flat_specs_from_discovery("divisions") + registry, _ = _build_registry(specs) + + # AreaClass is from overture.schema.divisions.division_area.enums, + # a subdirectory of the divisions feature directory. + assert registry["AreaClass"] == PurePosixPath( + "divisions/types/division_area/area_class.md" + ) + + def test_shared_types_not_nested(self) -> None: + """Core/system supplementary types stay at their module-mirrored path.""" + specs = flat_specs_from_discovery("buildings") + registry, _ = _build_registry(specs) + + # Names is from overture.schema.core -- no features there, no nesting + if "Names" in registry: + path = str(registry["Names"]) + assert path.startswith("core/") + assert "/types/" not in path + + +class TestPlacementWithUnionSpec: + """Tests for placement registry with UnionSpec.""" + + def test_union_spec_gets_placement(self) -> None: + """UnionSpec is placed alongside ModelSpec in the registry.""" + from pydantic import BaseModel + + class Base(BaseModel): + name: str + + class A(Base): + x: int + + union_spec = make_union_spec( + annotated_fields=[ + AnnotatedField( + field_spec=FieldSpec( + name="name", + type_info=STR_TYPE, + description=None, + is_required=True, + ), + variant_sources=None, + ), + ], + members=[A], + common_base=Base, + entry_point="test.package:TestUnion", + ) + + feature_specs: Sequence[FeatureSpec] = [union_spec] + all_specs = collect_all_supplementary_types(feature_specs) + registry = build_placement_registry( + feature_specs, all_specs, [], [], "test.package" + ) + assert "TestUnion" in registry From 1e0ce225654da30762c0e1eb238bae41b4aebb51 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 20:29:11 -0800 Subject: [PATCH 17/38] feat(codegen): add example data to theme pyproject.toml files Embed JSON example features in [tool.overture-schema.examples] sections. Each example is a complete GeoJSON Feature matching the theme's Pydantic model, used by the codegen example_loader to render example tables in documentation. --- .../pyproject.toml | 33 +++ .../overture-schema-base-theme/pyproject.toml | 207 ++++++++++++++++++ .../pyproject.toml | 78 +++++++ .../codegen/model_constraint_description.py | 2 +- .../schema/codegen/model_extraction.py | 6 +- .../overture/schema/codegen/type_analyzer.py | 2 +- .../tests/codegen_test_support.py | 17 ++ .../overture-schema-codegen/tests/conftest.py | 17 +- .../tests/test_enum_extraction.py | 6 +- .../tests/test_type_placement.py | 3 +- .../pyproject.toml | 135 ++++++++++++ .../pyproject.toml | 57 +++++ .../pyproject.toml | 83 +++++++ 13 files changed, 620 insertions(+), 26 deletions(-) diff --git a/packages/overture-schema-addresses-theme/pyproject.toml b/packages/overture-schema-addresses-theme/pyproject.toml index b8ab65afa..26f12cf4d 100644 --- a/packages/overture-schema-addresses-theme/pyproject.toml +++ b/packages/overture-schema-addresses-theme/pyproject.toml @@ -30,3 +30,36 @@ testpaths = ["tests"] [project.entry-points."overture.models"] "overture:addresses:address" = "overture.schema.addresses:Address" + +[[examples.Address]] +id = "416ab01c-d836-4c4f-aedc-2f30941ce94d" +geometry = "POINT (-176.5637854 -43.9471955)" +country = "NZ" +postcode = "null" +street = "Tikitiki Hill Road" +number = "54" +unit = "null" +postal_city = "null" +version = 1 +theme = "addresses" +type = "address" + +[examples.Address.bbox] +xmin = -176.56381225585938 +xmax = -176.56378173828125 +ymin = -43.94719696044922 +ymax = -43.94718933105469 + +[[examples.Address.address_levels]] +value = "Chatham Islands" + +[[examples.Address.address_levels]] +value = "Chatham Island" + +[[examples.Address.sources]] +property = "" +dataset = "OpenAddresses/LINZ" +record_id = "null" +update_time = "null" +confidence = "null" +between = "null" diff --git a/packages/overture-schema-base-theme/pyproject.toml b/packages/overture-schema-base-theme/pyproject.toml index d71b20f82..24f4525c7 100644 --- a/packages/overture-schema-base-theme/pyproject.toml +++ b/packages/overture-schema-base-theme/pyproject.toml @@ -31,3 +31,210 @@ packages = ["src/overture"] "overture:base:land_cover" = "overture.schema.base:LandCover" "overture:base:land_use" = "overture.schema.base:LandUse" "overture:base:water" = "overture.schema.base:Water" + +[[examples.Bathymetry]] +id = "5d40bd6c-db14-5492-b29f-5e25a59032bc" +geometry = "MULTIPOLYGON (((-170.71296928 -76.744313428, -170.719841483 -76.757076376, -170.731061124 -76.761566192, -170.775652756 -76.76338726, -170.853616381 -76.76253958, -170.918562293 -76.755380155, -170.970490492 -76.741908984, -170.998699301 -76.729180777, -171.003188718 -76.717195533, -170.990421551 -76.703765214, -170.960397802 -76.68888982, -170.940748072 -76.674697941, -170.931472364 -76.661189576, -170.927114414 -76.637296658, -170.927674224 -76.603019188, -170.939335393 -76.574637428, -170.962097922 -76.552151379, -170.999015387 -76.535715361, -171.050087788 -76.525329373, -171.079133298 -76.50751024, -171.086151917 -76.482257963, -171.098653755 -76.462747286, -171.11663881 -76.448978211, -171.146691397 -76.437601179, -171.188811514 -76.428616191, -171.296181785 -76.4228609, -171.468802209 -76.420335306, -171.566055241 -76.41501101, -171.587940879 -76.406888013, -171.59004284 -76.387987744, -171.572361122 -76.358310204, -171.549343725 -76.334488281, -171.520990649 -76.316521976, -171.453759127 -76.301763636, -171.347649159 -76.290213262, -171.30597166 -76.267707269, -171.328726628 -76.234245658, -171.36676019 -76.195627518, -171.420072345 -76.151852851, -171.444766298 -76.12494912, -171.44084205 -76.114916326, -171.378107286 -76.099627787, -171.256562007 -76.079083503, -171.228218647 -76.058825682, -171.293077208 -76.038854322, -171.421365419 -76.023534207, -171.613083278 -76.012865337, -171.76411833 -75.99938969, -171.874470572 -75.983107266, -172.121928361 -75.958403596, -172.506491695 -75.925278679, -172.744527804 -75.899736153, -172.836036689 -75.88177602, -172.904681746 -75.862406785, -172.950462974 -75.841628448, -173.000855857 -75.830396498, -173.055860393 -75.828710933, -173.177561398 -75.810743709, -173.365958872 -75.776494827, -173.493573084 -75.759370386, -173.560404033 -75.759370386, -173.620925776 -75.77158365, -173.675138312 -75.796010178, -173.733786206 -75.808642966, -173.796869456 -75.809482015, -173.847216433 -75.805553449, -173.884827135 -75.79685727, -173.90475244 -75.789177124, -173.906992347 -75.782513013, -173.881736947 -75.76894365, -173.828986239 -75.748469035, -173.797974615 -75.732298475, -173.788702075 -75.72043197, -173.82491541 -75.701013882, -173.90661462 -75.674044211, -173.977087913 -75.656066882, -174.03633529 -75.647081894, -174.150190099 -75.643010485, -174.31865234 -75.643852655, -174.444433211 -75.652836726, -174.527532713 -75.669962696, -174.581709229 -75.687086831, -174.606962758 -75.704209131, -174.631095834 -75.708279163, -174.654108458 -75.699296928, -174.688637451 -75.699296928, -174.734682816 -75.708279163, -174.797846917 -75.708699866, -174.878129754 -75.700559037, -174.939903816 -75.70870181, -174.9831691 -75.733128185, -175.025841122 -75.746602837, -175.06791988 -75.749125768, -175.09922327 -75.755318987, -175.119751293 -75.765182495, -175.127900229 -75.775197415, -175.123670077 -75.785363749, -175.111718372 -75.791289392, -175.092045112 -75.792974345, -175.049907399 -75.780622976, -174.985305232 -75.754235285, -174.935355308 -75.74552996, -174.900057628 -75.754507001, -174.886060973 -75.766815613, -174.893365345 -75.782455795, -174.907537393 -75.791536245, -174.928577117 -75.794056963, -174.971105378 -75.818213107, -175.035122174 -75.864004677, -175.060941949 -75.892403254, -175.048564703 -75.903408839, -175.020469049 -75.909193043, -174.976654988 -75.909755867, -174.944760829 -75.90482541, -174.924786572 -75.894401673, -174.92111336 -75.881479168, -174.933741192 -75.866057897, -174.900484967 -75.857513625, -174.821344686 -75.855846351, -174.752433709 -75.839289534, -174.693752038 -75.807843172, -174.652894268 -75.780747792, -174.629860399 -75.758003392, -174.571227588 -75.745793709, -174.476995837 -75.744118743, -174.398722205 -75.751841803, -174.336406693 -75.768962888, -174.300477946 -75.783262828, -174.290935964 -75.794741623, -174.28812912 -75.812412878, -174.292057414 -75.836276591, -174.289237223 -75.852155302, -174.279668547 -75.860049012, -174.205113931 -75.879998026, -174.065573375 -75.912002343, -173.957779122 -75.924071248, -173.881731171 -75.916204739, -173.846521251 -75.926706189, -173.852149361 -75.955575598, -173.845408416 -75.979439305, -173.826298414 -75.99829731, -173.76424232 -76.018956172, -173.659240133 -76.041415889, -173.560434089 -76.057698465, -173.467824188 -76.067803901, -173.404678836 -76.077625909, -173.370998032 -76.087164489, -173.332530272 -76.106814524, -173.289275555 -76.136576014, -173.231864101 -76.154545405, -173.160295911 -76.1607227, -173.093917454 -76.17278471, -173.032728732 -76.190731436, -173.009710709 -76.205560908, -173.024863387 -76.217273124, -173.048718935 -76.225374126, -173.081277354 -76.229863912, -173.219658797 -76.237442552, -173.463863265 -76.248110046, -173.60352174 -76.25793895, -173.638634223 -76.266929265, -173.658723482 -76.274676093, -173.663789516 -76.281179435, -173.661403366 -76.289363255, -173.651565032 -76.299227554, -173.627282775 -76.313843189, -173.588556596 -76.33321016, -173.575369172 -76.355231445, -173.587720504 -76.379907046, -173.573965869 -76.402499893, -173.53410527 -76.423009985, -173.518376226 -76.437156259, -173.526778738 -76.444938715, -173.559015515 -76.446303683, -173.615086557 -76.441251162, -173.686785609 -76.421600788, -173.774112673 -76.387352563, -173.854573513 -76.372333877, -173.928168128 -76.37654473, -173.968906731 -76.383732772, -173.97678932 -76.393898005, -173.979325549 -76.410884215, -173.976515417 -76.434691403, -174.000646474 -76.454452818, -174.051718722 -76.470168462, -174.08231827 -76.482963711, -174.092445119 -76.492838563, -174.075053216 -76.514344245, -174.030142562 -76.547480757, -174.016669929 -76.575274601, -174.034635317 -76.597725777, -174.037021169 -76.62030279, -174.023827484 -76.64300564, -174.034634583 -76.661942018, -174.069442464 -76.677111923, -174.086843964 -76.690616859, -174.086839082 -76.702456825, -174.080513222 -76.712456309, -174.067866385 -76.72061531, -174.036259441 -76.725116584, -173.98569239 -76.725960131, -173.93723318 -76.720486558, -173.89088181 -76.708695864, -173.780274695 -76.695221211, -173.605411835 -76.6800626, -173.487930602 -76.662096294, -173.427830996 -76.641322294, -173.370307559 -76.630935294, -173.315360292 -76.630935294, -173.249406002 -76.637251344, -173.17244469 -76.649883444, -173.110795196 -76.653532162, -173.06445752 -76.648197497, -173.029349452 -76.637355272, -173.005470993 -76.621005486, -173.01753216 -76.605236858, -173.065532955 -76.590049388, -173.096548505 -76.576599032, -173.11057881 -76.564885791, -173.108053605 -76.552301955, -173.08897289 -76.538847523, -173.051362225 -76.527628807, -172.99522161 -76.518645807, -172.891534181 -76.516119525, -172.740299938 -76.52004996, -172.648684331 -76.524540794, -172.61668736 -76.529592027, -172.584268588 -76.541098757, -172.551428016 -76.559060982, -172.533042741 -76.576141146, -172.529112765 -76.592339249, -172.540195073 -76.604524646, -172.566289666 -76.612697339, -172.576243291 -76.621303431, -172.570055947 -76.630342924, -172.555183534 -76.636123529, -172.531626051 -76.638645245, -172.517040304 -76.643518276, -172.511426292 -76.650742621, -172.551848294 -76.672312544, -172.63830631 -76.708228042, -172.701431121 -76.728711408, -172.741222726 -76.733762641, -172.81460886 -76.72534004, -172.921589524 -76.703443605, -173.006960733 -76.697273314, -173.070722487 -76.706829166, -173.101615682 -76.719791531, -173.099640316 -76.736160408, -173.033958817 -76.759064999, -172.904571183 -76.788505304, -172.847033841 -76.810916113, -172.861346791 -76.826297424, -172.924787296 -76.856444925, -173.037355356 -76.901358615, -173.149640378 -76.935043659, -173.26164236 -76.957500057, -173.354942309 -76.968728255, -173.429540223 -76.968728255, -173.487771718 -76.964657535, -173.529636796 -76.956516094, -173.572768938 -76.955559014, -173.617168145 -76.961786296, -173.614655836 -76.97446809, -173.565232013 -76.993604396, -173.461502424 -77.006682128, -173.303467069 -77.013701287, -173.163373388 -77.02787859, -173.041221382 -77.049214037, -172.918094542 -77.059179951, -172.793992869 -77.057776334, -172.720418717 -77.044861043, -172.697372088 -77.020434079, -172.675885915 -77.003730799, -172.655960197 -76.994751205, -172.60882792 -76.987594764, -172.534489083 -76.982261476, -172.480072837 -76.983094424, -172.445579184 -76.990093609, -172.428332542 -76.998610734, -172.428332911 -77.008645799, -172.435068344 -77.018150822, -172.448538839 -77.027125803, -172.490777829 -77.039613708, -172.561785312 -77.055614535, -172.628175119 -77.080598263, -172.68994725 -77.114564892, -172.751818039 -77.133793765, -172.813787485 -77.138284883, -172.900229764 -77.131828165, -173.011144875 -77.114423613, -173.119679588 -77.128474884, -173.2258339 -77.17398198, -173.273849553 -77.202664633, -173.263726547 -77.214522842, -173.165895559 -77.239681117, -172.980356589 -77.278139457, -172.880291531 -77.312658914, -172.865700386 -77.343239487, -172.867667457 -77.371126102, -172.886192744 -77.39631876, -172.999732531 -77.429966955, -173.208286817 -77.472070689, -173.335454668 -77.509278677, -173.381236082 -77.541590921, -173.403703936 -77.570407724, -173.40285823 -77.595729086, -173.378288408 -77.634921, -173.329994472 -77.687983467, -173.241287742 -77.735563094, -173.112168219 -77.777659882, -173.054064387 -77.81089869, -173.066976248 -77.835279519, -173.063736051 -77.854657976, -173.044343797 -77.869034061, -172.890349983 -77.896435115, -172.60175461 -77.936861139, -172.376181212 -77.961986812, -172.213629791 -77.971812135, -172.023427102 -77.967320559, -171.805573145 -77.948512083, -171.581263004 -77.918894833, -171.350496677 -77.87846881, -171.217147208 -77.851799157, -171.181214596 -77.838885875, -171.160572341 -77.826074082, -171.155220441 -77.813363779, -171.178789134 -77.790158543, -171.231278422 -77.756458375, -171.27338337 -77.70988804, -171.305103978 -77.65044754, -171.293875473 -77.602346602, -171.239697854 -77.565585227, -171.168401509 -77.532887375, -171.079986438 -77.504253044, -171.028614514 -77.483042244, -171.014285737 -77.469254974, -171.016677114 -77.456576914, -171.035788644 -77.445008064, -171.086879845 -77.431646501, -171.169950715 -77.416492226, -171.216537864 -77.403175691, -171.226641293 -77.391696895, -171.228607057 -77.378968685, -171.222435157 -77.364991059, -171.168824693 -77.334840949, -171.067775664 -77.288518355, -171.000402018 -77.24121644, -170.966703754 -77.192935206, -170.894838531 -77.157002595, -170.784806349 -77.133418606, -170.725150821 -77.11627156, -170.715871945 -77.105561456, -170.710674146 -77.077210652, -170.709557424 -77.031219147, -170.697909144 -76.992502178, -170.675729304 -76.961059744, -170.654536164 -76.940848729, -170.634329723 -76.931869135, -170.581564681 -76.922044903, -170.496241038 -76.911376032, -170.429709562 -76.893409727, -170.381970254 -76.868145986, -170.285260999 -76.838950739, -170.139581798 -76.805823986, -170.061542334 -76.78431495, -170.051142608 -76.77442363, -170.076677284 -76.763148845, -170.138146365 -76.750490597, -170.192753568 -76.731526593, -170.240498896 -76.706256833, -170.315896371 -76.686462585, -170.418945993 -76.67214385, -170.498267121 -76.665405567, -170.553859754 -76.666247738, -170.609039198 -76.673409769, -170.663805452 -76.68689166, -170.695686968 -76.698414281, -170.704683743 -76.70797763, -170.710444514 -76.723277346, -170.71296928 -76.744313428), (-172.46185717 -77.485683162, -172.491725041 -77.49003391, -172.535448064 -77.490594163, -172.566986057 -77.488349711, -172.586339021 -77.483300552, -172.598540475 -77.476173053, -172.60359042 -77.466967216, -172.601627836 -77.458872071, -172.592652724 -77.451887618, -172.556765055 -77.448396429, -172.49396483 -77.448398503, -172.453726685 -77.452881992, -172.436050621 -77.461846897, -172.429868964 -77.468114837, -172.435181715 -77.47168581, -172.44584445 -77.477541919, -172.46185717 -77.485683162), (-172.812798475 -76.363628771, -172.855573928 -76.365453015, -172.885037626 -76.36040045, -172.90720433 -76.351027386, -172.92207404 -76.337333821, -172.9168827 -76.324750727, -172.89163031 -76.313278104, -172.862193885 -76.307261221, -172.828573425 -76.30670008, -172.792121028 -76.311189877, -172.752836694 -76.320730613, -172.732062811 -76.331770033, -172.729799379 -76.344308139, -172.756711267 -76.354927718, -172.812798475 -76.363628771), (-171.932998671 -76.183124002, -172.010021088 -76.180457336, -172.070931389 -76.166984091, -172.113033554 -76.150312062, -172.136327583 -76.130441248, -172.133522137 -76.111120124, -172.104617217 -76.092348689, -172.06028165 -76.080296327, -172.000515436 -76.074963039, -171.918725408 -76.076928027, -171.814911566 -76.086191292, -171.745182124 -76.097695899, -171.709537083 -76.111441849, -171.696346087 -76.126554541, -171.705609136 -76.143033974, -171.731004713 -76.156183802, -171.77253282 -76.166004024, -171.83986414 -76.174984091, -171.932998671 -76.183124002), (-173.16885937 -76.066345013, -173.199147981 -76.070696107, -173.23950163 -76.071257052, -173.269213382 -76.065813298, -173.288283234 -76.054364845, -173.2799961 -76.038973879, -173.244351978 -76.0196404, -173.207608446 -76.007588038, -173.169765504 -76.002816794, -173.139490241 -76.003094691, -173.116782658 -76.008421729, -173.104589039 -76.016938854, -173.102909386 -76.028646065, -173.111183172 -76.03940804, -173.129410398 -76.049224779, -173.148635798 -76.05820377, -173.16885937 -76.066345013)))" +version = 0 +depth = 500 +theme = "base" +type = "bathymetry" + +[examples.Bathymetry.bbox] +xmin = -175.12791442871094 +xmax = -170.05111694335938 +ymin = -77.9718246459961 +ymax = -75.64299774169922 + +[[examples.Bathymetry.sources]] +property = "" +dataset = "ETOPO/GLOBathy" +record_id = "2024-12-09T00:00:00.000Z" +update_time = "null" +confidence = "null" +between = "null" + +[examples.Bathymetry.cartography] +prominence = "null" +min_zoom = "null" +max_zoom = "null" +sort_key = 12 + +[[examples.Infrastructure]] +id = "e9e3d506-89c0-3473-8cee-5e5ac6596d6c" +geometry = "POINT (-179.9999994 -82.42408)" +version = 0 +level = "null" +subtype = "pedestrian" +class = "information" +height = "null" +surface = "null" +wikidata = "Q800558" +theme = "base" +type = "infrastructure" + +[examples.Infrastructure.bbox] +xmin = -180.0 +xmax = -179.99998474121094 +ymin = -82.42408752441406 +ymax = -82.42407989501953 + +[[examples.Infrastructure.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "n7674174803@2" +update_time = "2023-04-07T17:37:48.000Z" +confidence = "null" +between = "null" + +[examples.Infrastructure.names] +primary = "1306 km to South Pole" +common = "null" +rules = "null" + +[examples.Infrastructure.source_tags] +description = "1036 km to South Pole." +information = "route_marker" +note = "The road continue in west side of the map" +start_date = "2007" +tourism = "information" +wikipedia = "en:South Pole Traverse" + +[[examples.Land]] +id = "70fc3596-a987-3fea-820c-c016c0a2f0da" +geometry = "POINT (-178.7 -85.45)" +version = 0 +level = "null" +subtype = "physical" +class = "cliff" +surface = "null" +wikidata = "Q5282342" +elevation = "null" +theme = "base" +type = "land" + +[examples.Land.bbox] +xmin = -178.7000274658203 +xmax = -178.6999969482422 +ymin = -85.45001220703125 +ymax = -85.44999694824219 + +[[examples.Land.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "n11693475112@1" +update_time = "2024-03-05T09:23:39.000Z" +confidence = "null" +between = "null" + +[examples.Land.names] +primary = "Dismal Buttress" +common = "null" +rules = "null" + +[examples.Land.source_tags] +natural = "cliff" +"ref:linz:place_id" = "12318" +wikipedia = "en:Dismal Buttress" + +[[examples.LandCover]] +id = "c347312d-012b-5e73-8bd3-a10d04b2981d" +geometry = "POLYGON ((-179.99877531181616 65.95172539425603, -179.99740705536922 65.95265577758867, -179.99751722434937 65.9532545912543, -179.9974078443441 65.9541507615366, -179.9965398649702 65.95451215813897, -179.99644396804533 65.95493010632842, -179.99602533095998 65.95502533095993, -179.99468737767813 65.95677071067811, -179.9933586639601 65.9576086639598, -179.99313729490444 65.95812767174695, -179.99314866836227 65.95857649030111, -179.99370507548738 65.95907781410224, -179.99372539425596 65.95947468818369, -179.99395850935272 65.95967260714353, -179.99410866395988 65.96030800303998, -179.99534017576838 65.96101799736452, -179.99575621846904 65.96104928900519, -179.9959057775888 65.96150961146397, -179.9965950523775 65.96161426988128, -179.99663895270027 65.96216619349144, -179.99807649030126 65.96218466463768, -179.99819891654494 65.96189707483568, -179.99799883949768 65.96160842248709, -179.99825961146388 65.96142755541139, -179.99830761159433 65.9610635173197, -179.99936104612706 65.9609995273612, -179.9993797906372 65.96051410937864, -179.99964133604004 65.96039133604008, -179.9997 65.96016912258357, -179.99936104729989 65.95958380650865, -179.99900447103303 65.95954329910117, -179.9987608894112 65.95924038853603, -179.99806463264497 65.95902716440592, -179.99798856507215 65.95838313921075, -179.99834294463088 65.95801088941111, -179.9983374593203 65.9575910941953, -179.99855761159426 65.95723018431977, -179.99921013502978 65.95698784186104, -179.99931463264488 65.95663950159415, -179.99990450886096 65.95637680202988, -179.99997427859432 65.9560635173197, -180.00019127274402 65.9558913550169, -180.00019127274405 65.95544197881631, -180.0000389948438 65.9553039610106, -179.9996246090062 65.9553159274193, -179.99935793918766 65.95327531026125, -179.9988434361254 65.95288259953995, -179.99885243016726 65.95244253241113, -179.9991661934914 65.95227771429981, -179.9991960389287 65.95187767174694, -179.99877531181616 65.95172539425603))" +version = 0 +subtype = "barren" +theme = "base" +type = "land_cover" + +[examples.LandCover.bbox] +xmin = -180.0001983642578 +xmax = -179.99313354492188 +ymin = 65.95172119140625 +ymax = 65.96218872070312 + +[[examples.LandCover.sources]] +property = "" +dataset = "ESA WorldCover" +record_id = "null" +update_time = "2024-11-07T00:00:00.000Z" +confidence = "null" +between = "null" + +[examples.LandCover.cartography] +prominence = "null" +min_zoom = 8 +max_zoom = 15 +sort_key = 3 + +[[examples.LandUse]] +id = "1e1f6095-5bd2-3fdb-a422-41351b848e9d" +geometry = "POLYGON ((-176.5623454 -43.9567812, -176.5627644 -43.9561272, -176.5626898 -43.9557432, -176.5624297 -43.9553592, -176.562679 -43.9551603, -176.5629058 -43.9552064, -176.5631441 -43.9551769, -176.5632428 -43.9550676, -176.5633066 -43.9548702, -176.5634402 -43.9548071, -176.5639052 -43.9546682, -176.5642479 -43.9544118, -176.5647302 -43.9542142, -176.5651547 -43.954277, -176.5658293 -43.9545243, -176.5659454 -43.9543521, -176.566934 -43.9547987, -176.5669179 -43.955018, -176.5682465 -43.9553205, -176.5671004 -43.9579593, -176.5662034 -43.9600044, -176.5655366 -43.9597247, -176.5646109 -43.9595326, -176.564467 -43.9592563, -176.5639885 -43.9589226, -176.5637013 -43.9586925, -176.563223 -43.9586237, -176.5623454 -43.9567812))" +version = 0 +level = "null" +subtype = "golf" +class = "golf_course" +surface = "null" +wikidata = "null" +elevation = "null" +theme = "base" +type = "land_use" + +[examples.LandUse.bbox] +xmin = -176.56825256347656 +xmax = -176.56231689453125 +ymin = -43.96001052856445 +ymax = -43.95420837402344 + +[[examples.LandUse.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "w56117029@3" +update_time = "2010-04-24T22:35:13.000Z" +confidence = "null" +between = "null" + +[examples.LandUse.names] +primary = "Chatham Islands Golf Club" +common = "null" +rules = "null" + +[examples.LandUse.source_tags] +"LINZ:source_version" = "V16" +attribution = "http://wiki.osm.org/wiki/Attribution#LINZ" +leisure = "golf_course" +source_ref = "http://www.linz.govt.nz/topography/topo-maps/" + +[[examples.Water]] +id = "6bbb5fe5-bf26-3efa-b120-0a7079b60840" +geometry = "POINT (-177.031799 -84.934793)" +version = 0 +level = "null" +subtype = "physical" +class = "cape" +wikidata = "Q33140589" +is_salt = "null" +is_intermittent = "null" +theme = "base" +type = "water" + +[examples.Water.bbox] +xmin = -177.03179931640625 +xmax = -177.0317840576172 +ymin = -84.93480682373047 +ymax = -84.9347915649414 + +[[examples.Water.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "n11109190647@2" +update_time = "2024-02-11T05:52:05.000Z" +confidence = "null" +between = "null" + +[examples.Water.names] +primary = "Thanksgiving Point" +common = "null" +rules = "null" + +[examples.Water.source_tags] +natural = "cape" +"ref:linz:place_id" = "13433" diff --git a/packages/overture-schema-buildings-theme/pyproject.toml b/packages/overture-schema-buildings-theme/pyproject.toml index e9766fc61..8df34b884 100644 --- a/packages/overture-schema-buildings-theme/pyproject.toml +++ b/packages/overture-schema-buildings-theme/pyproject.toml @@ -27,3 +27,81 @@ packages = ["src/overture"] [project.entry-points."overture.models"] "overture:buildings:building" = "overture.schema.buildings:Building" "overture:buildings:building_part" = "overture.schema.buildings:BuildingPart" + +[[examples.Building]] +id = "148f35b1-7bc1-4180-9280-10d39b13883b" +geometry = "POLYGON ((-176.6435004 -43.9938042, -176.6435738 -43.9937107, -176.6437726 -43.9937913, -176.6436992 -43.9938849, -176.6435004 -43.9938042))" +version = 1 +level = "null" +subtype = "null" +class = "null" +height = "null" +names = "null" +has_parts = false +is_underground = false +num_floors = "null" +num_floors_underground = "null" +min_height = "null" +min_floor = "null" +facade_color = "null" +facade_material = "null" +roof_material = "null" +roof_shape = "null" +roof_direction = "null" +roof_orientation = "null" +roof_color = "null" +roof_height = "null" +theme = "buildings" +type = "building" + +[examples.Building.bbox] +xmin = -176.643798828125 +xmax = -176.64349365234375 +ymin = -43.9938850402832 +ymax = -43.993709564208984 + +[[examples.Building.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "w519166507@1" +update_time = "2017-08-27T21:39:50.000Z" +confidence = "null" +between = "null" + +[[examples.BuildingPart]] +id = "19412d64-51ac-3d6a-ac2f-8a8c8b91bb60" +geometry = "POLYGON ((-73.2462509 -39.8108937, -73.2462755 -39.8109047, -73.246291 -39.8109182, -73.2463022 -39.8109382, -73.2463039 -39.810959, -73.2462962 -39.81098, -73.2462796 -39.8109977, -73.2462674 -39.8110052, -73.2462281 -39.8110153, -73.2461998 -39.811013, -73.2461743 -39.8110034, -73.2461566 -39.8109898, -73.246144 -39.8109702, -73.2461418 -39.8109427, -73.2461511 -39.8109221, -73.2461669 -39.8109066, -73.2461908 -39.8108947, -73.2462184 -39.8108898, -73.2462509 -39.8108937))" +version = 0 +level = 3 +height = "null" +names = "null" +is_underground = false +num_floors = "null" +num_floors_underground = "null" +min_height = "null" +min_floor = "null" +facade_color = "null" +facade_material = "null" +roof_material = "null" +roof_shape = "null" +roof_direction = "null" +roof_orientation = "null" +roof_color = "null" +roof_height = "null" +building_id = "bd663bd4-1844-4d7d-a400-114de051cf49" +theme = "buildings" +type = "building_part" + +[examples.BuildingPart.bbox] +xmin = -73.24630737304688 +xmax = -73.24613952636719 +ymin = -39.81101608276367 +ymax = -39.81088638305664 + +[[examples.BuildingPart.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "w223076787@2" +update_time = "2014-10-31T22:55:36.000Z" +confidence = "null" +between = "null" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py b/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py index 8e60a2d37..76f2934fc 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py @@ -188,7 +188,7 @@ def _analyze_constraints( for group_items in groups.values(): first_idx = group_items[0][0] group_constraints = [c for _, c in group_items] - all_fields = frozenset[str]().union( + all_fields: frozenset[str] = frozenset().union( *(_affected_field_names(c) for c in group_constraints) ) if len(group_constraints) == 1: diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py index 7f122e868..0b195859d 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py @@ -53,7 +53,7 @@ def _merge_field_metadata(type_info: TypeInfo, field_info: FieldInfo) -> TypeInf def _is_field_required(field_info: FieldInfo, type_info: TypeInfo) -> bool: - """A field is required when it has no default and is not Optional.""" + """Determine whether a field is required (no default and not Optional).""" has_default = ( field_info.default is not PydanticUndefined or field_info.default_factory is not None @@ -79,9 +79,7 @@ def _class_order(model_class: type[BaseModel]) -> list[type]: return [ cls for cls in reversed(model_class.__mro__) - if isinstance(cls, type) - and issubclass(cls, BaseModel) - and cls is not BaseModel + if issubclass(cls, BaseModel) and cls is not BaseModel ] primary = _class_order(bases[0]) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py index cedae8506..afce2c505 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py @@ -1,4 +1,4 @@ -"""Type analysis for Pydantic models.""" +"""Iterative type unwrapping for Pydantic model annotations.""" from __future__ import annotations diff --git a/packages/overture-schema-codegen/tests/codegen_test_support.py b/packages/overture-schema-codegen/tests/codegen_test_support.py index e573153ad..1dbc6549d 100644 --- a/packages/overture-schema-codegen/tests/codegen_test_support.py +++ b/packages/overture-schema-codegen/tests/codegen_test_support.py @@ -14,6 +14,8 @@ from overture.schema.codegen.model_extraction import extract_model from overture.schema.codegen.specs import ( AnnotatedField, + EnumMemberSpec, + EnumSpec, FieldSpec, ModelSpec, UnionSpec, @@ -245,11 +247,26 @@ def make_union_spec( ) +def find_model_class(name: str, models: dict[object, object]) -> type[BaseModel]: + """Find a discovered model class by name.""" + matches = [v for v in models.values() if getattr(v, "__name__", None) == name] + assert matches, f"{name} model not found" + match = matches[0] + assert isinstance(match, type) + assert issubclass(match, BaseModel) + return match + + def find_field(spec: ModelSpec, name: str) -> FieldSpec: """Find a field by name in a ModelSpec, raising if missing.""" return next(f for f in spec.fields if f.name == name) +def find_member(spec: EnumSpec, name: str) -> EnumMemberSpec: + """Find a member by name in an EnumSpec, raising if missing.""" + return next(m for m in spec.members if m.name == name) + + def assert_literal_field( spec: ModelSpec, field_name: str, expected_value: object ) -> None: diff --git a/packages/overture-schema-codegen/tests/conftest.py b/packages/overture-schema-codegen/tests/conftest.py index f95e74d46..5731662ee 100644 --- a/packages/overture-schema-codegen/tests/conftest.py +++ b/packages/overture-schema-codegen/tests/conftest.py @@ -1,6 +1,7 @@ """Shared pytest fixtures for overture-schema-codegen tests.""" import pytest +from codegen_test_support import find_model_class from overture.schema.codegen.model_extraction import extract_model from overture.schema.codegen.specs import ModelSpec from overture.schema.core.discovery import discover_models @@ -21,16 +22,6 @@ def update_golden(request: pytest.FixtureRequest) -> bool: return bool(request.config.getoption("--update-golden")) -def _find_model_class(name: str, models: dict[object, object]) -> type[BaseModel]: - """Find a discovered model class by name.""" - matches = [v for v in models.values() if getattr(v, "__name__", None) == name] - assert matches, f"{name} model not found" - match = matches[0] - assert isinstance(match, type) - assert issubclass(match, BaseModel) - return match - - @pytest.fixture def all_discovered_models() -> dict: """Discover and return all registered Overture models.""" @@ -40,7 +31,7 @@ def all_discovered_models() -> dict: @pytest.fixture def building_class(all_discovered_models: dict) -> type[BaseModel]: """Get the Building model class.""" - return _find_model_class("Building", all_discovered_models) + return find_model_class("Building", all_discovered_models) @pytest.fixture @@ -52,10 +43,10 @@ def building_spec(building_class: type[BaseModel]) -> ModelSpec: @pytest.fixture def place_class(all_discovered_models: dict) -> type[BaseModel]: """Get the Place model class.""" - return _find_model_class("Place", all_discovered_models) + return find_model_class("Place", all_discovered_models) @pytest.fixture def division_class(all_discovered_models: dict) -> type[BaseModel]: """Get the Division model class.""" - return _find_model_class("Division", all_discovered_models) + return find_model_class("Division", all_discovered_models) diff --git a/packages/overture-schema-codegen/tests/test_enum_extraction.py b/packages/overture-schema-codegen/tests/test_enum_extraction.py index 7cdf7bcef..3c05a65a1 100644 --- a/packages/overture-schema-codegen/tests/test_enum_extraction.py +++ b/packages/overture-schema-codegen/tests/test_enum_extraction.py @@ -2,16 +2,12 @@ from enum import Enum +from codegen_test_support import find_member from overture.schema.codegen.enum_extraction import extract_enum from overture.schema.codegen.specs import EnumMemberSpec, EnumSpec from overture.schema.system.doc import DocumentedEnum -def find_member(spec: EnumSpec, name: str) -> EnumMemberSpec: - """Find a member by name in an EnumSpec, raising if missing.""" - return next(m for m in spec.members if m.name == name) - - class TestEnumMemberSpec: """Tests for EnumMemberSpec dataclass.""" diff --git a/packages/overture-schema-codegen/tests/test_type_placement.py b/packages/overture-schema-codegen/tests/test_type_placement.py index aaa7c5fb3..17661daf1 100644 --- a/packages/overture-schema-codegen/tests/test_type_placement.py +++ b/packages/overture-schema-codegen/tests/test_type_placement.py @@ -1,6 +1,5 @@ """Tests for type placement module.""" -from collections.abc import Sequence from pathlib import PurePosixPath import overture.schema.system.primitive as _system_primitive @@ -170,7 +169,7 @@ class A(Base): entry_point="test.package:TestUnion", ) - feature_specs: Sequence[FeatureSpec] = [union_spec] + feature_specs: list[FeatureSpec] = [union_spec] all_specs = collect_all_supplementary_types(feature_specs) registry = build_placement_registry( feature_specs, all_specs, [], [], "test.package" diff --git a/packages/overture-schema-divisions-theme/pyproject.toml b/packages/overture-schema-divisions-theme/pyproject.toml index e56b1df6a..9ce3cedff 100644 --- a/packages/overture-schema-divisions-theme/pyproject.toml +++ b/packages/overture-schema-divisions-theme/pyproject.toml @@ -29,3 +29,138 @@ packages = ["src/overture"] "overture:divisions:division" = "overture.schema.divisions:Division" "overture:divisions:division_area" = "overture.schema.divisions:DivisionArea" "overture:divisions:division_boundary" = "overture.schema.divisions:DivisionBoundary" + +[[examples.Division]] +id = "350e85f6-68ba-4114-9906-c2844815988b" +geometry = "POINT (-175.2551522 -21.1353686)" +country = "TO" +version = 1 +subtype = "locality" +class = "village" +wikidata = "null" +region = "TO-04" +perspectives = "null" +hierarchies = [ + [ + {division_id = "fef8748b-0c91-46ad-9f2d-976d8d2de3e9", subtype = "country", name = "Tonga"}, + {division_id = "4d67561a-2292-41bd-8996-7853d276a42c", subtype = "region", name = "Tongatapu"}, + {division_id = "8730f0cc-d436-4f11-a7d3-49085813ef44", subtype = "county", name = "Vahe Kolomotu'a"}, + {division_id = "350e85f6-68ba-4114-9906-c2844815988b", subtype = "locality", name = "Sia'atoutai"}, + ], +] +parent_division_id = "8730f0cc-d436-4f11-a7d3-49085813ef44" +norms = "null" +population = 534 +capital_division_ids = "null" +capital_of_divisions = "null" +theme = "divisions" +type = "division" + +[examples.Division.bbox] +xmin = -175.25515747070312 +xmax = -175.255126953125 +ymin = -21.1353702545166 +ymax = -21.13536834716797 + +[[examples.Division.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "n3173231082@4" +update_time = "2014-12-18T09:17:03Z" +confidence = "null" +between = "null" + +[examples.Division.cartography] +prominence = 29 +min_zoom = "null" +max_zoom = "null" +sort_key = "null" + +[examples.Division.names] +primary = "Sia'atoutai" +common = "null" + +[[examples.Division.names.rules]] +variant = "alternate" +language = "null" +perspectives = "null" +value = "Nafualu" +between = "null" +side = "null" + +[examples.Division.local_type] +en = "village" + +[[examples.DivisionArea]] +id = "eb9b112f-ec3c-47f7-b519-6f9f2e6fc2bd" +geometry = "MULTIPOLYGON (((-174.9553949 -21.4730179, -174.9514163 -21.4719978, -174.9520108 -21.4681253, -174.9566122 -21.4687535, -174.9553949 -21.4730179)), ((-174.9634398 -21.3476807, -174.9753507 -21.3833656, -174.9702168 -21.4037277, -174.950488 -21.4269887, -174.9082983 -21.4577763, -174.9004303 -21.4398142, -174.9048159 -21.3698688, -174.9165467 -21.3035402, -174.9126977 -21.2903268, -174.9199765 -21.2834922, -174.9634398 -21.3476807)))" +country = "TO" +version = 2 +subtype = "region" +class = "land" +is_land = true +is_territorial = false +region = "TO-01" +division_id = "21597af0-b564-463c-a356-42c29e712b7d" +theme = "divisions" +type = "division_area" + +[examples.DivisionArea.bbox] +xmin = -174.97535705566406 +xmax = -174.90040588378906 +ymin = -21.473018646240234 +ymax = -21.283489227294922 + +[[examples.DivisionArea.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "r7247527@3" +update_time = "2020-12-30T18:41:56Z" +confidence = "null" +between = "null" + +[examples.DivisionArea.names] +primary = "ʻEua" +common = "null" +rules = "null" + +[[examples.DivisionBoundary]] +id = "2bdf68e4-860d-3d8c-a472-ccf439a5302a" +geometry = "LINESTRING (-147.064823 -15.4231537, -147.0519131 -15.2885069, -147.048482 -15.1511701)" +country = "PF" +version = 1 +subtype = "county" +class = "maritime" +is_land = false +is_territorial = true +division_ids = [ + "ae266459-63a4-4508-8295-0101e27d039b", + "d4a6873d-885a-4f2a-bc0f-37e9d9e874e4" +] +region = "null" +is_disputed = false +perspectives = "null" +theme = "divisions" +type = "division_boundary" + +[examples.DivisionBoundary.bbox] +xmin = -147.06483459472656 +xmax = -147.04847717285156 +ymin = -15.4231538772583 +ymax = -15.151169776916504 + +[[examples.DivisionBoundary.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "r6063055@9" +update_time = "2023-07-20T00:28:40Z" +confidence = "null" +between = "null" + +[[examples.DivisionBoundary.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "r6063063@12" +update_time = "2023-07-20T00:28:40Z" +confidence = "null" +between = "null" diff --git a/packages/overture-schema-places-theme/pyproject.toml b/packages/overture-schema-places-theme/pyproject.toml index 40877a435..c6b4c2e5f 100644 --- a/packages/overture-schema-places-theme/pyproject.toml +++ b/packages/overture-schema-places-theme/pyproject.toml @@ -28,3 +28,60 @@ packages = ["src/overture"] [project.entry-points."overture.models"] "overture:places:place" = "overture.schema.places:Place" + +[[examples.Place]] +id = "99003ee6-e75b-4dd6-8a8a-53a5a716c50d" +geometry = "POINT (-150.46875 -79.1713346)" +version = 1 +confidence = 0.7337175792507205 +websites = [ + "https://www.superhotel.co.jp/s_hotels/beppu/" +] +socials = [ + "https://www.facebook.com/107663894904826" +] +emails = "null" +phones = [ + "+81977009000" +] +operating_status = "open" +theme = "places" +type = "place" + +[examples.Place.bbox] +xmin = -150.46875 +xmax = -150.46875 +ymin = -79.17134094238281 +ymax = -79.17133331298828 + +[[examples.Place.sources]] +property = "" +dataset = "meta" +record_id = "107663894904826" +update_time = "2025-06-30T07:00:00.000Z" +confidence = 0.7337175792507205 +between = "null" + +[examples.Place.names] +primary = "スーパーホテル別府駅前" +common = "null" +rules = "null" + +[examples.Place.categories] +primary = "hotel" +alternate = "null" + +[examples.Place.brand] +wikidata = "null" + +[examples.Place.brand.names] +primary = "SUPER HOTEL" +common = "null" +rules = "null" + +[[examples.Place.addresses]] +freeform = "秋田県横手市駅前町13−8" +locality = "横手市" +postcode = "013-0036" +region = "null" +country = "JP" diff --git a/packages/overture-schema-transportation-theme/pyproject.toml b/packages/overture-schema-transportation-theme/pyproject.toml index 2be7ed8ea..31fe0b824 100644 --- a/packages/overture-schema-transportation-theme/pyproject.toml +++ b/packages/overture-schema-transportation-theme/pyproject.toml @@ -27,3 +27,86 @@ packages = ["src/overture"] [project.entry-points."overture.models"] "overture:transportation:connector" = "overture.schema.transportation:Connector" "overture:transportation:segment" = "overture.schema.transportation:Segment" + +[[examples.Connector]] +id = "39542bee-230f-4b91-b7e5-a9b58e0c59b1" +geometry = "POINT (-176.5472979 -43.9679472)" +version = 1 +theme = "transportation" +type = "connector" + +[examples.Connector.bbox] +xmin = -176.54730224609375 +xmax = -176.54727172851562 +ymin = -43.96794891357422 +ymax = -43.96794128417969 + +[[examples.Connector.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "null" +update_time = "null" +confidence = "null" +between = "null" + +[[examples.Segment]] +id = "1bc62f3b-08b5-42b8-89fe-36f685f60455" +geometry = "LINESTRING (-176.5636191 -43.954404, -176.5643637 -43.9538145, -176.5647264 -43.9535274, -176.5649947 -43.953251)" +version = 1 +subtype = "road" +class = "residential" +routes = "null" +subclass_rules = "null" +access_restrictions = "null" +level_rules = "null" +destinations = "null" +prohibited_transitions = "null" +road_flags = "null" +speed_limits = "null" +width_rules = "null" +subclass = "null" +rail_flags = "null" +theme = "transportation" +type = "segment" + +[examples.Segment.bbox] +xmin = -176.5650177001953 +xmax = -176.56361389160156 +ymin = -43.954410552978516 +ymax = -43.953250885009766 + +[[examples.Segment.sources]] +property = "" +dataset = "OpenStreetMap" +record_id = "w53435546@6" +update_time = "2021-05-03T06:37:03Z" +confidence = "null" +between = "null" + +[examples.Segment.names] +primary = "Meteorological Lane" +common = "null" + +[[examples.Segment.names.rules]] +variant = "common" +language = "null" +perspectives = "null" +value = "Meteorological Lane" +between = "null" +side = "null" + +[[examples.Segment.connectors]] +connector_id = "15b2c131-9137-4add-88c6-2acd3fa61355" +at = 0.0 + +[[examples.Segment.connectors]] +connector_id = "23ae2702-ef77-4d2e-b39d-77360b696d20" +at = 0.523536154 + +[[examples.Segment.connectors]] +connector_id = "8e944ce1-4b81-49eb-a823-7d98779c855c" +at = 1.0 + +[[examples.Segment.road_surface]] +value = "gravel" +between = "null" From 92c656cc617bb26d12cec63f91c431e9752b8df4 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 20:30:02 -0800 Subject: [PATCH 18/38] feat(codegen): add markdown renderers Jinja2 templates and rendering logic for documentation pages: - markdown_renderer: orchestrates page rendering for features, enums, NewTypes, primitives, and geometry. Recursively expands MODEL-kind fields inline with dot-notation. - markdown_type_format: type string formatting with link-aware rendering via LinkContext - example_loader: loads examples from theme pyproject.toml, validates against Pydantic models, flattens to dot-notation - reverse_references: computes "Used By" cross-references between types and the features that reference them Templates: feature, enum, newtype, primitives, geometry pages. Golden-file snapshot tests verify rendered output stability. Adds renderer-specific fixtures to conftest.py (cli_runner, primitives_markdown, geometry_markdown). --- .../overture/schema/codegen/example_loader.py | 315 ++++ .../schema/codegen/link_computation.py | 10 + .../schema/codegen/markdown_renderer.py | 597 ++++++++ .../schema/codegen/markdown_type_format.py | 188 +++ .../schema/codegen/reverse_references.py | 141 ++ .../templates/markdown/_used_by.md.jinja2 | 10 + .../codegen/templates/markdown/enum.md.jinja2 | 13 + .../templates/markdown/feature.md.jinja2 | 45 + .../templates/markdown/geometry.md.jinja2 | 11 + .../templates/markdown/newtype.md.jinja2 | 17 + .../templates/markdown/primitives.md.jinja2 | 27 + .../overture/schema/codegen/type_analyzer.py | 16 + .../overture-schema-codegen/tests/conftest.py | 32 + .../tests/golden/markdown/common_names.md | 7 + .../golden/markdown/feature_with_address.md | 15 + .../golden/markdown/feature_with_dict.md | 15 + .../golden/markdown/feature_with_sources.md | 13 + .../tests/golden/markdown/hex_color.md | 19 + .../tests/golden/markdown/id.md | 15 + .../tests/golden/markdown/instrument.md | 20 + .../golden/markdown/instrument_family.md | 13 + .../tests/golden/markdown/simple_kind.md | 6 + .../tests/golden/markdown/sources.md | 14 + .../tests/golden/markdown/venue.md | 22 + .../tests/golden/markdown/widget.md | 8 + .../tests/test_example_loader.py | 821 ++++++++++ .../tests/test_golden_markdown.py | 124 ++ .../tests/test_markdown_renderer.py | 1339 +++++++++++++++++ .../tests/test_markdown_type_format.py | 181 +++ .../tests/test_reverse_references.py | 165 ++ 30 files changed, 4219 insertions(+) create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/_used_by.md.jinja2 create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/enum.md.jinja2 create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/feature.md.jinja2 create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/geometry.md.jinja2 create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/newtype.md.jinja2 create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/primitives.md.jinja2 create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/common_names.md create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/feature_with_address.md create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/feature_with_dict.md create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/feature_with_sources.md create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/hex_color.md create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/id.md create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/instrument.md create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/instrument_family.md create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/simple_kind.md create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/sources.md create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/venue.md create mode 100644 packages/overture-schema-codegen/tests/golden/markdown/widget.md create mode 100644 packages/overture-schema-codegen/tests/test_example_loader.py create mode 100644 packages/overture-schema-codegen/tests/test_golden_markdown.py create mode 100644 packages/overture-schema-codegen/tests/test_markdown_renderer.py create mode 100644 packages/overture-schema-codegen/tests/test_markdown_type_format.py create mode 100644 packages/overture-schema-codegen/tests/test_reverse_references.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py b/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py new file mode 100644 index 000000000..e1cf02a05 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py @@ -0,0 +1,315 @@ +"""Load and process example data from theme pyproject.toml files.""" + +import logging +import sys +from dataclasses import dataclass +from pathlib import Path +from typing import Any + +from pydantic import BaseModel, TypeAdapter, ValidationError +from pydantic.fields import FieldInfo + +from .model_extraction import resolve_field_alias +from .type_analyzer import single_literal_value + +log = logging.getLogger(__name__) + +__all__ = ["ExampleRecord", "load_examples", "validate_example"] + +# tomllib is stdlib from 3.11+; tomli is the backport for 3.10. +try: + import tomllib # type: ignore[import-not-found] +except ModuleNotFoundError: + import tomli as tomllib # type: ignore[import-not-found] + + +@dataclass +class ExampleRecord: + """A flattened example with field-value pairs in documentation order.""" + + rows: list[tuple[str, Any]] + + +def _inject_literal_fields( + model_fields_dict: dict[str, FieldInfo], data: dict[str, Any] +) -> dict[str, Any]: + """Inject single-value Literal field defaults missing from *data*. + + Inspects *model_fields_dict* for fields with single-value ``Literal`` + annotations. For each field missing from *data*, injects the literal + value using the field's ``validation_alias`` (if set), falling back + to ``alias``, then to the field name. + + Returns a new dict; the original is not mutated. + """ + result = data.copy() + + for field_name, field_info in model_fields_dict.items(): + key = resolve_field_alias(field_name, field_info) + if key in result: + continue + + literal_value = single_literal_value(field_info.annotation) + if literal_value is not None: + result[key] = literal_value + + return result + + +def _denull_value(value: object) -> object: + """Convert a single value, replacing ``"null"`` strings with ``None``.""" + if value == "null": + return None + if isinstance(value, dict): + return _denull(value) + if isinstance(value, list): + return [_denull_value(item) for item in value] + return value + + +def _denull(data: dict[str, Any]) -> dict[str, Any]: + """Convert ``"null"`` sentinel strings to ``None``. + + TOML has no null literal, so example data uses the string ``"null"`` + as a stand-in. This recursively walks *data* (including nested dicts, + lists of dicts, and plain lists) and replaces every ``"null"`` value + with ``None``. + + Returns a new dict; the original is not mutated. + """ + return {key: _denull_value(value) for key, value in data.items()} + + +def _known_field_keys(model_fields_dict: dict[str, FieldInfo]) -> frozenset[str]: + """Alias-resolved field keys from a model_fields dict.""" + return frozenset( + resolve_field_alias(name, info) for name, info in model_fields_dict.items() + ) + + +def _strip_null_unknown_fields( + data: dict[str, Any], known_keys: frozenset[str] +) -> dict[str, Any]: + """Drop null-valued fields not in *known_keys*. + + For discriminated unions, *known_keys* contains only common base + fields. Variant-specific null fields from other arms (present in + flat parquet schemas) are stripped so the selected arm's validator + doesn't reject them as unknown extras. + + Non-null fields are always kept so the arm's own validator can + accept or reject them normally. + """ + return {k: v for k, v in data.items() if v is not None or k in known_keys} + + +def validate_example( + validation_type: object, + raw: dict[str, Any], + *, + model_fields: dict[str, FieldInfo] | None = None, +) -> dict[str, Any]: + """Validate example data against a model or union type. + + Uses TypeAdapter for validation, supporting both concrete models + and discriminated union aliases. + + Preprocesses *raw* data by: + 1. Converting "null" strings to None + 2. Injecting missing Literal fields for validation (if model_fields provided) + 3. Stripping null-valued fields not in *model_fields* (handles + flat-schema examples from discriminated unions where fields from + non-selected arms appear as nulls) + + Returns the denulled dict (not the preprocessed one with injected + literals). Lets ValidationError propagate on validation failure. + """ + denulled = _denull(raw) + + if model_fields is None: + if isinstance(validation_type, type) and issubclass(validation_type, BaseModel): + model_fields = validation_type.model_fields + else: + model_fields = {} + + known_keys = _known_field_keys(model_fields) + preprocessed = _inject_literal_fields(model_fields, denulled) + preprocessed = _strip_null_unknown_fields(preprocessed, known_keys) + TypeAdapter(validation_type).validate_python(preprocessed) + return denulled + + +_DEFAULT_SKIP_KEYS: frozenset[str] = frozenset({"bbox"}) + + +def _flatten_value(prefix: str, value: object) -> list[tuple[str, Any]]: + """Recursively flatten a value into dot/bracket-notation rows.""" + if isinstance(value, dict): + result: list[tuple[str, Any]] = [] + for k, v in value.items(): + result.extend(_flatten_value(f"{prefix}.{k}", v)) + return result + if isinstance(value, list) and value and isinstance(value[0], (dict, list)): + result = [] + for i, item in enumerate(value): + result.extend(_flatten_value(f"{prefix}[{i}]", item)) + return result + return [(prefix, value)] + + +def flatten_example( + raw: dict[str, Any], + *, + skip_keys: frozenset[str] = _DEFAULT_SKIP_KEYS, +) -> list[tuple[str, Any]]: + """Flatten nested example dict to dot-notation key-value pairs. + + Nested dicts become ``"parent.child"``; lists of dicts become + ``"parent[0].child"``; lists of lists of dicts use double-index + notation ``"parent[0][1].child"``. Keys in *skip_keys* are dropped + at the top level only. Plain lists are kept as values. + """ + result: list[tuple[str, Any]] = [] + for key, value in raw.items(): + if key in skip_keys: + continue + result.extend(_flatten_value(key, value)) + return result + + +def extract_base_field(key: str) -> str: + """Extract the top-level field name from a flattened key. + + >>> extract_base_field("sources[0].dataset") + 'sources' + >>> extract_base_field("names.primary") + 'names' + >>> extract_base_field("id") + 'id' + """ + if "[" in key: + return key.split("[")[0] + if "." in key: + return key.split(".")[0] + return key + + +def order_example_rows( + flat_rows: list[tuple[str, Any]], + field_names: list[str], +) -> list[tuple[str, Any]]: + """Order flattened rows by field position in documentation. + + Sorts by position of base field name in *field_names*. + Fields with the same base maintain their original order (stable sort). + Unknown fields sort to end. + """ + position = {name: i for i, name in enumerate(field_names)} + sentinel = len(field_names) + + def sort_key(row: tuple[str, Any]) -> int: + return position.get(extract_base_field(row[0]), sentinel) + + return sorted(flat_rows, key=sort_key) + + +def load_examples_from_toml( + pyproject_path: Path, + model_name: str, +) -> list[dict[str, Any]]: + """Load ``[examples.]`` from a pyproject.toml file.""" + with pyproject_path.open("rb") as f: + data = tomllib.load(f) + + examples: dict[str, list[dict[str, Any]]] = data.get("examples", {}) + return examples.get(model_name, []) + + +def resolve_pyproject_path(model_class: type) -> Path | None: + """Find pyproject.toml by walking up from the model's module location.""" + module_name = getattr(model_class, "__module__", None) + if not module_name: + return None + + module = sys.modules.get(module_name) + if not module: + return None + + module_file = getattr(module, "__file__", None) + if not module_file: + return None + + # Walk up from module directory + current = Path(module_file).parent + while current != current.parent: # Stop at filesystem root + pyproject = current / "pyproject.toml" + if pyproject.exists(): + return pyproject + current = current.parent + + return None + + +def load_examples( + validation_type: object, + model_name: str, + field_names: list[str], + *, + pyproject_source: type | None = None, + model_fields: dict[str, FieldInfo] | None = None, +) -> list[ExampleRecord]: + """Load examples for a model, flattened and ordered by *field_names*. + + Validates each example against the validation type. Invalid examples + are skipped with a warning logged. Returns an empty list on any failure + (missing file, missing section, parse error). + + Parameters + ---------- + validation_type : type[BaseModel] | object + Model class or union alias to validate against. + model_name : str + Name of the model to load examples for. + field_names : list[str] + List of field names for ordering output. + pyproject_source : type or None + Type to use for finding pyproject.toml. If None, + uses validation_type if it's a class. + model_fields : dict[str, FieldInfo] or None + Field info dict for Literal injection. If None, infers + from validation_type if it's a BaseModel class. + """ + source_type = pyproject_source if pyproject_source is not None else validation_type + if not isinstance(source_type, type): + return [] + + pyproject_path = resolve_pyproject_path(source_type) + if not pyproject_path: + return [] + + try: + raw_examples = load_examples_from_toml(pyproject_path, model_name) + except (OSError, tomllib.TOMLDecodeError): + log.debug("Failed to load examples for %s", model_name, exc_info=True) + return [] + + if not raw_examples: + return [] + + records = [] + for raw in raw_examples: + try: + denulled = validate_example(validation_type, raw, model_fields=model_fields) + except ValidationError as e: + log.warning( + "Skipping invalid example for %s in %s: %s", + model_name, + pyproject_path, + e, + ) + continue + flat_rows = flatten_example(denulled) + ordered_rows = order_example_rows(flat_rows, field_names) + records.append(ExampleRecord(rows=ordered_rows)) + + return records diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py b/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py index 22aae5b0f..aa508edc1 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py @@ -3,6 +3,8 @@ from dataclasses import dataclass from pathlib import PurePosixPath +from .case_conversion import slug_filename + __all__ = ["LinkContext", "relative_link"] @@ -19,6 +21,14 @@ def resolve_link(self, name: str) -> str | None: return relative_link(self.page_path, self.registry[name]) return None + def resolve_link_or_slug(self, name: str) -> str: + """Resolve *name* to a relative link, falling back to a slug filename. + + Always returns a usable link string. Use when the caller needs a + link regardless of whether the type has a registered page. + """ + return self.resolve_link(name) or slug_filename(name) + def _is_normalized(path: PurePosixPath) -> bool: """True when the path contains no '..' or '.' components (except root '.').""" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py new file mode 100644 index 000000000..c52a89d2c --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py @@ -0,0 +1,597 @@ +"""Markdown renderer for Pydantic model documentation.""" + +import functools +import re +from collections.abc import Callable +from dataclasses import dataclass +from pathlib import Path +from typing import TypedDict, cast + +from annotated_types import Interval +from jinja2 import Environment, FileSystemLoader +from typing_extensions import NotRequired + +from .example_loader import ExampleRecord +from .field_constraint_description import constraint_display_text +from .link_computation import LinkContext +from .markdown_type_format import format_type, format_underlying_type +from .model_constraint_description import analyze_model_constraints +from .reverse_references import UsedByEntry +from .specs import ( + AnnotatedField, + EnumSpec, + FeatureSpec, + FieldSpec, + ModelSpec, + NewTypeSpec, + PrimitiveSpec, + UnionSpec, +) +from .type_analyzer import ( + ConstraintSource, +) + +__all__ = [ + "render_enum", + "render_feature", + "render_geometry_from_values", + "render_newtype", + "render_primitives_from_specs", +] + + +_LinkFn = Callable[[str], str] + +_TEMPLATES_DIR = Path(__file__).parent / "templates" / "markdown" + +_BARE_URL_RE = re.compile( + r"(?)]+|www\.[^\s<>)]+)" +) +_TRAILING_PUNCT_RE = re.compile(r"[.,;:!?]+$") +# (.+?) deliberately does not match newlines -- CommonMark code spans are inline. +_CODE_SPAN_RE = re.compile(r"(`+)(.+?)\1") + + +def _linkify_bare_urls(text: str) -> str: + """Wrap bare URLs in Markdown link syntax. + + Turns ``www.example.com`` into ``[www.example.com](https://www.example.com)`` + and ``https://example.com`` into ``[https://example.com](https://example.com)``. + URLs already inside ``[text](url)`` or backtick code spans are left + untouched. Trailing sentence punctuation (``.``, ``,``, etc.) is excluded + from the link. + + Two-pass approach: extract code spans first, linkify the remaining + text, then restore code spans. + """ + # Extract code spans, replacing with placeholders + spans: list[str] = [] + + def _stash_span(m: re.Match[str]) -> str: + spans.append(m.group(0)) + return f"\x00CODESPAN{len(spans) - 1}\x00" + + text = _CODE_SPAN_RE.sub(_stash_span, text) + + # Linkify bare URLs in non-code text + def _to_link(m: re.Match[str]) -> str: + raw = m.group(0) + url = _TRAILING_PUNCT_RE.sub("", raw) + trailing = raw[len(url) :] + href = url if url.startswith("http") else f"https://{url}" + return f"[{url}]({href}){trailing}" + + text = _BARE_URL_RE.sub(_to_link, text) + + # Restore code spans + for i, span in enumerate(spans): + text = text.replace(f"\x00CODESPAN{i}\x00", span) + + return text + + +@functools.lru_cache(maxsize=1) +def _get_jinja_env() -> Environment: + """Return the Jinja2 environment, creating it on first use.""" + env = Environment( + loader=FileSystemLoader(_TEMPLATES_DIR), + trim_blocks=True, + lstrip_blocks=True, + ) + env.filters["linkify_urls"] = _linkify_bare_urls + return env + + +_EXAMPLE_TRUNCATION_LIMIT = 100 + + +class _FieldRow(TypedDict): + """Template context for a single field table row. + + ``pre_formatted`` indicates the ``name`` already contains backticks + and variant tags, so the template should render it verbatim. + """ + + name: str + type_str: str + description: str | None + pre_formatted: NotRequired[bool] + + +_PARAGRAPH_BREAK_RE = re.compile(r"\n(?:[ \t]*\n)+") + + +def _unwrap_paragraphs(text: str) -> str: + """Unwrap hard-wrapped lines within paragraphs, preserving paragraph breaks. + + Splits on blank lines (paragraph boundaries), replaces single newlines + within each paragraph with spaces, then rejoins with ``\\n\\n``. + Matches markdown's treatment of newlines within paragraphs. + """ + paragraphs = _PARAGRAPH_BREAK_RE.split(text) + return "\n\n".join(p.replace("\n", " ") for p in paragraphs) + + +def _sanitize_for_table_cell(text: str) -> str: + """Sanitize text for embedding in a markdown table cell. + + Unwraps within-paragraph newlines to spaces, then converts paragraph + breaks to ``

``. Escapes pipe characters for table safety. + Uses ``
`` (not ``
``) for MDX/Docusaurus compatibility. + """ + text = text.strip() + text = _unwrap_paragraphs(text) + text = text.replace("\n\n", "

") + return text.replace("|", "\\|") + + +def _format_example_value(value: object) -> str: + """Format an example value for display in a markdown Column | Value table. + + All non-empty values render in backticks for consistent monospace + formatting. Long strings are truncated before wrapping. + """ + if value is None: + return "`null`" + + if isinstance(value, bool): + return "`true`" if value else "`false`" + + if isinstance(value, str): + if value == "": + return "" + if len(value) > _EXAMPLE_TRUNCATION_LIMIT: + value = value[:_EXAMPLE_TRUNCATION_LIMIT] + "..." + return f"`{value}`" + + if isinstance(value, list): + items = ", ".join(str(item) for item in value) + return f"`[{items}]`" + + if isinstance(value, dict): + pairs = ", ".join(f"{k}: {v}" for k, v in value.items()) + return f"`{{{pairs}}}`" + + return f"`{value}`" + + +def _field_template_context( + field: FieldSpec, + ctx: LinkContext | None = None, +) -> _FieldRow: + """Build template context dict for a field.""" + description = ( + _sanitize_for_table_cell(field.description) if field.description else None + ) + return _FieldRow( + name=field.name, + type_str=format_type(field, ctx), + description=description, + ) + + +def _annotate_constraint_notes( + row: _FieldRow, + notes: list[str], +) -> None: + """Append italic constraint descriptions to a field's description cell.""" + formatted = "
".join(f"*{note}*" for note in notes) + if row["description"]: + row["description"] = f"{row['description']}
{formatted}" + else: + row["description"] = formatted + + +def _link_fn_from_ctx(ctx: LinkContext | None) -> _LinkFn: + r"""Build a name-to-markdown-link resolver from a LinkContext. + + Returns a function that resolves a type name to ``[`Name`](href)`` + when the name has a page in the registry, or plain ``\`Name\``` otherwise. + """ + + def resolve(name: str) -> str: + if ctx: + href = ctx.resolve_link(name) + if href: + return f"[`{name}`]({href})" + return f"`{name}`" + + return resolve + + +def _annotate_field_constraints( + row: _FieldRow, field: FieldSpec, ctx: LinkContext | None +) -> None: + """Annotate a field row with constraints from the field's own annotation. + + Shows constraints where source is None — those applied directly to + the field, not inherited from NewType chains. NewType-inherited + constraints appear on the NewType's own page instead. + """ + link_fn = _link_fn_from_ctx(ctx) + notes = [ + constraint_display_text(cs, link_fn=link_fn) + for cs in field.type_info.constraints + if cs.source is None + ] + if notes: + _annotate_constraint_notes(row, notes) + + +def _expandable_list_suffix(field_spec: FieldSpec) -> str: + """Return ``"[]"`` when a field has a list-of-model that will be expanded inline.""" + if ( + field_spec.type_info.is_list + and field_spec.model + and not field_spec.starts_cycle + ): + return "[]" + return "" + + +def _expand_sub_model( + field_spec: FieldSpec, + name: str, + ctx: LinkContext | None, + result: list[_FieldRow], +) -> None: + """Expand sub-model fields inline, appending child rows to *result*.""" + sub = field_spec.model if not field_spec.starts_cycle else None + if sub is not None: + child_prefix = f"{name}{_expandable_list_suffix(field_spec)}." + result.extend(_expand_model_fields(sub.fields, ctx, prefix=child_prefix)) + + +def _annotate_top_level_constraints( + rows: list[_FieldRow], + constraint_notes: dict[str, list[str]] | None, +) -> None: + """Annotate top-level field rows with model-constraint notes. + + Top-level rows are those without dot-notation prefixes. + """ + if not constraint_notes: + return + for row in rows: + name = row["name"] + if "." in name: + continue + field_name = name.removesuffix("[]") + if field_name in constraint_notes: + _annotate_constraint_notes(row, constraint_notes[field_name]) + + +def _expand_model_fields( + fields: list[FieldSpec], + ctx: LinkContext | None, + prefix: str = "", +) -> list[_FieldRow]: + """Flatten nested model fields into dot-notation rows for display. + + Walks the pre-populated FieldSpec.model tree. Stops recursion at + fields marked with starts_cycle. + """ + result: list[_FieldRow] = [] + for field_spec in fields: + row = _field_template_context(field_spec, ctx) + name = f"{prefix}{field_spec.name}" if prefix else field_spec.name + row["name"] = f"{name}{_expandable_list_suffix(field_spec)}" + if not prefix: + _annotate_field_constraints(row, field_spec, ctx) + result.append(row) + + _expand_sub_model(field_spec, name, ctx, result) + return result + + +def _short_variant_name(class_name: str, union_name: str) -> str: + """Strip common suffix to produce short variant name. + + Examples: + RoadSegment, Segment -> Road + WaterSegment, Segment -> Water + Building, Building -> Building + """ + if class_name.endswith(union_name): + short = class_name[: -len(union_name)] + if short: + return short + return class_name + + +def _variant_tag(annotated: AnnotatedField, union_name: str) -> str | None: + """Return an italic variant tag like ``*(Road, Water)*``, or None for shared fields.""" + if annotated.variant_sources is None: + return None + short_names = [ + _short_variant_name(v, union_name) for v in annotated.variant_sources + ] + return f" *({', '.join(short_names)})*" + + +def _expand_union_fields( + spec: UnionSpec, + ctx: LinkContext | None, + constraint_notes: dict[str, list[str]] | None = None, +) -> list[_FieldRow]: + """Expand UnionSpec fields with inline variant tags. + + Shared fields (variant_sources=None) render normally. Variant-specific + fields get *(ShortName)* tag after the field name. + """ + result: list[_FieldRow] = [] + for annotated in spec.annotated_fields: + field_spec = annotated.field_spec + row = _field_template_context(field_spec, ctx) + name = field_spec.name + suffix = _expandable_list_suffix(field_spec) + + _annotate_field_constraints(row, field_spec, ctx) + if constraint_notes and field_spec.name in constraint_notes: + _annotate_constraint_notes(row, constraint_notes[field_spec.name]) + + tag = _variant_tag(annotated, spec.name) + if tag is not None: + row["name"] = f"`{name}{suffix}`{tag}" + row["pre_formatted"] = True + else: + row["name"] = f"{name}{suffix}" + + result.append(row) + _expand_sub_model(field_spec, name, ctx, result) + return result + + +def render_feature( + spec: FeatureSpec, + link_ctx: LinkContext | None = None, + examples: list[ExampleRecord] | None = None, + used_by: list[UsedByEntry] | None = None, +) -> str: + """Render a FeatureSpec (ModelSpec or UnionSpec) as Markdown documentation. + + For ModelSpec, requires expand_model_tree to have been called first. + For UnionSpec, adds inline variant tags to variant-specific fields. + """ + template = _get_jinja_env().get_template("feature.md.jinja2") + + constraint_descriptions, field_notes = analyze_model_constraints(spec.constraints) + + if isinstance(spec, UnionSpec): + fields = _expand_union_fields(spec, link_ctx, constraint_notes=field_notes) + elif isinstance(spec, ModelSpec): + fields = _expand_model_fields(spec.fields, link_ctx) + _annotate_top_level_constraints(fields, field_notes) + else: + raise TypeError(f"Unsupported spec type: {type(spec).__name__}") + + formatted_examples: list[list[dict[str, str]]] | None = None + if examples: + formatted_examples = [ + [ + {"column": key, "value": _format_example_value(val)} + for key, val in record.rows + ] + for record in examples + ] + + return template.render( + model=spec, + fields=fields, + constraints=constraint_descriptions, + examples=formatted_examples, + used_by=_build_used_by_context(used_by, link_ctx), + ) + + +def render_enum( + enum_spec: EnumSpec, + link_ctx: LinkContext | None = None, + used_by: list[UsedByEntry] | None = None, +) -> str: + """Render an EnumSpec as Markdown documentation.""" + template = _get_jinja_env().get_template("enum.md.jinja2") + return template.render( + enum=enum_spec, used_by=_build_used_by_context(used_by, link_ctx) + ) + + +@dataclass +class _NewTypeConstraintRow: + """Rendered constraint for template.""" + + display: str + source: str | None = None + source_link: str | None = None + + +def _format_constraint( + cs: ConstraintSource, + newtype_name: str, + ctx: LinkContext | None = None, +) -> _NewTypeConstraintRow: + """Format a ConstraintSource for display in a NewType page.""" + display = constraint_display_text(cs) + + if not cs.source or cs.source == newtype_name: + return _NewTypeConstraintRow(display=display) + + source = cs.source + source_link = ctx.resolve_link(source) if ctx else None + return _NewTypeConstraintRow( + display=display, source=source, source_link=source_link + ) + + +class _UsedByContext(TypedDict): + """Template context for a used-by entry.""" + + name: str + link: str | None + + +def _build_used_by_context( + used_by: list[UsedByEntry] | None, + link_ctx: LinkContext | None, +) -> list[_UsedByContext] | None: + """Build template context for used-by entries.""" + if not used_by: + return None + return [ + { + "name": entry.name, + "link": link_ctx.resolve_link(entry.name) if link_ctx else None, + } + for entry in used_by + ] + + +def render_newtype( + newtype_spec: NewTypeSpec, + link_ctx: LinkContext | None = None, + used_by: list[UsedByEntry] | None = None, +) -> str: + """Render a NewTypeSpec as Markdown documentation.""" + template = _get_jinja_env().get_template("newtype.md.jinja2") + ti = newtype_spec.type_info + underlying = format_underlying_type(ti, link_ctx) + constraints = [ + _format_constraint(cs, newtype_spec.name, link_ctx) for cs in ti.constraints + ] + + return template.render( + newtype=newtype_spec, + underlying_type=underlying, + constraints=constraints, + used_by=_build_used_by_context(used_by, link_ctx), + ) + + +# Matches the ge/le bounds of the int64 NewType in overture.schema.system.primitive. +_INT64_MIN = -(2**63) +_INT64_MAX = 2**63 - 1 + +_Bound = int | float | None + +# IEEE 754 precision by bit width — formatting knowledge, not schema data. +_FLOAT_PRECISION: dict[int, str] = {32: "~7 decimal digits", 64: "~15 decimal digits"} + + +def _format_bound(value: int | float) -> str: + """Format a numeric bound for display. + + Uses ``2^63`` notation for int64-scale values to avoid unreadable + numbers; otherwise formats with thousands separators for ints. + """ + if value == _INT64_MIN: + return "-2^63" + if value == _INT64_MAX: + return "2^63-1" + if isinstance(value, float): + return str(value) + return f"{value:,}" + + +def _format_interval(bounds: Interval) -> str: + """Format an Interval as a range string, or empty if unconstrained. + + Two inclusive bounds render as ``lower to upper``. All other + combinations use explicit comparison operators so the + inclusivity/exclusivity is unambiguous. + """ + # Interval fields are typed as Supports* protocols; narrow to numeric + # since we only encounter int/float constraints from the schema. + ge = cast(_Bound, bounds.ge) + gt = cast(_Bound, bounds.gt) + le = cast(_Bound, bounds.le) + lt = cast(_Bound, bounds.lt) + + # Both bounds inclusive: compact "lower to upper" form + if ge is not None and le is not None: + return f"{_format_bound(ge)} to {_format_bound(le)}" + + # Any other two-bound combination: use explicit operators + parts: list[str] = [] + if ge is not None: + parts.append(f">= {_format_bound(ge)}") + elif gt is not None: + parts.append(f"> {_format_bound(gt)}") + + if le is not None: + parts.append(f"<= {_format_bound(le)}") + elif lt is not None: + parts.append(f"< {_format_bound(lt)}") + + return ", ".join(parts) + + +def _bit_width_key(name: str) -> tuple[str, int]: + """Sort key: prefix then numeric bit width.""" + prefix = name.rstrip("0123456789") + digits = name[len(prefix) :] + return (prefix, int(digits) if digits else 0) + + +def render_primitives_from_specs(specs: list[PrimitiveSpec]) -> str: + """Render the primitives.md page from pre-extracted PrimitiveSpecs.""" + template = _get_jinja_env().get_template("primitives.md.jinja2") + + signed_ints: list[dict[str, str | None]] = [] + unsigned_ints: list[dict[str, str | None]] = [] + floats: list[dict[str, str | None]] = [] + + for spec in sorted(specs, key=lambda s: _bit_width_key(s.name)): + if spec.name.startswith(("int", "uint")): + target = signed_ints if spec.name.startswith("int") else unsigned_ints + target.append( + { + "name": spec.name, + "range": _format_interval(spec.bounds), + "description": _sanitize_for_table_cell(spec.description or ""), + } + ) + elif spec.name.startswith("float"): + precision = ( + _FLOAT_PRECISION.get(spec.float_bits, "") if spec.float_bits else "" + ) + floats.append( + { + "name": spec.name, + "precision": precision, + "description": _sanitize_for_table_cell(spec.description or ""), + } + ) + + return template.render( + signed_ints=signed_ints, + unsigned_ints=unsigned_ints, + floats=floats, + ) + + +def render_geometry_from_values(geometry_type_values: list[str]) -> str: + """Render the geometry.md page from pre-extracted geometry type values.""" + template = _get_jinja_env().get_template("geometry.md.jinja2") + geometry_types = ", ".join(f"`{v}`" for v in geometry_type_values) + return template.render(geometry_types=geometry_types) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py new file mode 100644 index 000000000..976505e7f --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py @@ -0,0 +1,188 @@ +"""Format TypeInfo as markdown type strings with cross-page links.""" + +from __future__ import annotations + +from pydantic import BaseModel + +from .link_computation import LinkContext +from .specs import FieldSpec +from .type_analyzer import TypeInfo, TypeKind +from .type_registry import is_semantic_newtype, resolve_type_name + +__all__ = ["format_dict_type", "format_type", "format_underlying_type"] + + +def _code_link(name: str, href: str) -> str: + """Format a markdown link with inline-code text: [``name``](href).""" + return f"[`{name}`]({href})" + + +def _resolve_type_link(type_name: str, ctx: LinkContext | None = None) -> str: + """Resolve a type name to a linked code span or plain code span. + + When *ctx* is provided, links only to types in the registry (types + without pages render as inline code). Without context, renders as + inline code -- producing a link requires a placement registry to + compute correct relative paths. + """ + if ctx: + href = ctx.resolve_link(type_name) + if href: + return _code_link(type_name, href) + return f"`{type_name}`" + + +def _wrap_list(inner: str) -> str: + """Wrap an inner type string in list<...> markdown syntax.""" + return f"`list<`{inner}`>`" + + +def _linked_type_name(ti: TypeInfo) -> str | None: + """Return the name to use for a markdown link, or None for non-linked types.""" + if is_semantic_newtype(ti): + return ti.newtype_name + if ti.kind in (TypeKind.ENUM, TypeKind.MODEL): + return ti.base_type + return None + + +def _markdown_type_name(ti: TypeInfo) -> str: + """Return the markdown display name for a type. + + Uses the semantic NewType name when present (e.g. ``LanguageTag``), + otherwise falls back to the resolved markdown type (e.g. ``string``). + """ + name = ti.newtype_name if is_semantic_newtype(ti) else None + return name or resolve_type_name(ti, "markdown") + + +def format_dict_type(ti: TypeInfo) -> str: + """Format a dict TypeInfo as bare ``map`` using resolved markdown names.""" + assert ti.dict_key_type is not None + assert ti.dict_value_type is not None + key = _markdown_type_name(ti.dict_key_type) + value = _markdown_type_name(ti.dict_value_type) + return f"map<{key}, {value}>" + + +def _format_union_members( + members: tuple[type[BaseModel], ...], + ctx: LinkContext | None, + separator: str = r" \| ", +) -> str: + """Format union members as individually linked/backticked names. + + Each member is resolved independently so members with pages get linked + while others render as plain code spans. *separator* is inserted between + members (default is ``\\|`` for table-cell safety). + """ + return separator.join( + _resolve_type_link(member.__name__, ctx) for member in members + ) + + +def format_type( + field: FieldSpec, + ctx: LinkContext | None = None, +) -> str: + """Format a field's type for markdown display, with links and qualifiers.""" + ti = field.type_info + qualifiers: list[str] = [] + + if ti.kind == TypeKind.LITERAL and ti.literal_value is not None: + return f'`"{ti.literal_value}"`' + + link_name = _linked_type_name(ti) + + if ti.kind == TypeKind.UNION and ti.union_members: + display = _format_union_members(ti.union_members, ctx) + if ti.is_list: + qualifiers.append("list") + elif ti.is_dict: + if link_name: + display = _resolve_type_link(link_name, ctx) + qualifiers.append("map") + else: + display = f"`{format_dict_type(ti)}`" + elif link_name: + display = _resolve_type_link(link_name, ctx) + if ti.is_list and link_name == ti.newtype_name: + qualifiers.append("list") + elif ti.is_list: + display = _wrap_list(display) + else: + base = resolve_type_name(ti, "markdown") + if ti.is_list: + display = f"`list<{base}>`" + else: + display = f"`{base}`" + + if not field.is_required: + qualifiers.append("optional") + + if qualifiers: + return f"{display} ({', '.join(qualifiers)})" + return display + + +def _linked_or_backticked(ti: TypeInfo, ctx: LinkContext | None) -> tuple[str, bool]: + """Return (formatted_string, has_link) for a TypeInfo component. + + Used by format_underlying_type to decide whether container types + need broken-backtick formatting (interleaving backtick runs with + linked text). + + When ``has_link`` is True, ``formatted_string`` is a markdown link + ready for broken-backtick container syntax. When False, it is a raw + name that the caller embeds inside backticks. + """ + link_name = _linked_type_name(ti) + if link_name and ctx: + href = ctx.resolve_link(link_name) + if href: + return _code_link(link_name, href), True + return _markdown_type_name(ti), False + + +def format_underlying_type(ti: TypeInfo, ctx: LinkContext | None = None) -> str: + """Format a NewType's underlying type for the page header, with links. + + Links enums and models that have their own pages. Does not link the + outermost NewType (which would self-reference). Dict key/value types + use full link resolution since they reference other types. + """ + if ti.kind == TypeKind.UNION and ti.union_members: + return _format_union_members(ti.union_members, ctx, separator=" | ") + + if ti.is_dict and ti.dict_key_type and ti.dict_value_type: + key_str, key_linked = _linked_or_backticked(ti.dict_key_type, ctx) + val_str, val_linked = _linked_or_backticked(ti.dict_value_type, ctx) + if key_linked or val_linked: + if not key_linked: + key_str = f"`{key_str}`" + if not val_linked: + val_str = f"`{val_str}`" + return f"`map<`{key_str}`,`{val_str}`>`" + return f"`map<{key_str}, {val_str}>`" + + # Only link enums and models — skip is_semantic_newtype to avoid + # self-linking (this TypeInfo belongs to the NewType being rendered). + # Use source_type.__name__ rather than base_type: base_type may be + # the outermost NewType name when only one NewType wraps a class. + link_name = ( + ti.source_type.__name__ + if ti.kind in (TypeKind.ENUM, TypeKind.MODEL) and ti.source_type + else None + ) + if link_name and ctx: + href = ctx.resolve_link(link_name) + if href: + linked = _code_link(link_name, href) + if ti.is_list: + return _wrap_list(linked) + return linked + + base = link_name or resolve_type_name(ti, "markdown") + if ti.is_list: + return f"`list<{base}>`" + return f"`{base}`" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py b/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py new file mode 100644 index 000000000..66cfa5d05 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py @@ -0,0 +1,141 @@ +"""Compute reverse references from types to their referrers.""" + +from __future__ import annotations + +from collections.abc import Mapping, Sequence +from dataclasses import dataclass +from enum import Enum + +from .specs import ( + FeatureSpec, + FieldSpec, + ModelSpec, + NewTypeSpec, + SupplementarySpec, + UnionSpec, +) +from .type_analyzer import TypeInfo, TypeKind, walk_type_info + +__all__ = [ + "UsedByEntry", + "UsedByKind", + "compute_reverse_references", +] + + +class UsedByKind(Enum): + """Kind of referrer in a 'used by' entry.""" + + MODEL = 0 + NEWTYPE = 1 + + +@dataclass(frozen=True, slots=True) +class UsedByEntry: + """A single 'used by' entry pointing to a referrer.""" + + name: str + kind: UsedByKind + + +def compute_reverse_references( + feature_specs: Sequence[FeatureSpec], + all_specs: Mapping[str, SupplementarySpec], +) -> dict[str, list[UsedByEntry]]: + """Compute reverse references from types to their referrers. + + Returns a dict mapping type names to lists of UsedByEntry, sorted with + models before NewTypes, alphabetical within each group. + + Parameters + ---------- + feature_specs : Sequence[FeatureSpec] + Feature-level specs (ModelSpec or UnionSpec). + all_specs : Mapping[str, SupplementarySpec] + Supplementary types (enums, newtypes, sub-models). + + Returns + ------- + dict[str, list[UsedByEntry]] + Dict mapping type names to sorted lists of UsedByEntry. + """ + # Track references with sets to deduplicate + references: dict[str, set[UsedByEntry]] = {} + + def add_reference(target: str, referrer_name: str, kind: UsedByKind) -> None: + """Add a reference from referrer to target, with deduplication.""" + if target == referrer_name or target not in all_specs: + return + references.setdefault(target, set()).add(UsedByEntry(referrer_name, kind)) + + def collect_from_type_info( + ti: TypeInfo, referrer_name: str, referrer_kind: UsedByKind + ) -> None: + """Collect references from a TypeInfo.""" + + def _visit(node: TypeInfo) -> None: + if node.newtype_name is not None: + add_reference(node.newtype_name, referrer_name, referrer_kind) + + if ( + node.kind in (TypeKind.ENUM, TypeKind.MODEL) + and node.source_type is not None + ): + add_reference(node.source_type.__name__, referrer_name, referrer_kind) + + if node.union_members is not None: + for member_cls in node.union_members: + add_reference(member_cls.__name__, referrer_name, referrer_kind) + + walk_type_info(ti, _visit) + + def collect_from_fields( + fields: list[FieldSpec], referrer_name: str, referrer_kind: UsedByKind + ) -> None: + """Collect references from model fields.""" + for field_spec in fields: + collect_from_type_info(field_spec.type_info, referrer_name, referrer_kind) + + def collect_from_model_spec(spec: ModelSpec) -> None: + """Collect references from a ModelSpec.""" + collect_from_fields(spec.fields, spec.name, UsedByKind.MODEL) + + def collect_from_union_spec(spec: UnionSpec) -> None: + """Collect references from a UnionSpec.""" + # Union features reference their members + for member_cls in spec.members: + add_reference(member_cls.__name__, spec.name, UsedByKind.MODEL) + # Also walk fields for other supplementary types + collect_from_fields(spec.fields, spec.name, UsedByKind.MODEL) + + def collect_from_newtype_spec(spec: NewTypeSpec, referrer_name: str) -> None: + """Collect references from a NewTypeSpec.""" + collect_from_type_info(spec.type_info, referrer_name, UsedByKind.NEWTYPE) + + # Collect inherited NewTypes from constraint sources + for cs in spec.type_info.constraints: + if cs.source is not None: + add_reference(cs.source, referrer_name, UsedByKind.NEWTYPE) + + # Collect from features + for spec in feature_specs: + if isinstance(spec, ModelSpec): + collect_from_model_spec(spec) + elif isinstance(spec, UnionSpec): + collect_from_union_spec(spec) + + # Collect from supplementary specs (NewTypes and sub-models reference + # other types; enums do not, so they need no processing here) + for name, supp_spec in all_specs.items(): + if isinstance(supp_spec, NewTypeSpec): + collect_from_newtype_spec(supp_spec, name) + elif isinstance(supp_spec, ModelSpec): + collect_from_model_spec(supp_spec) + + # Sort sets into lists + result: dict[str, list[UsedByEntry]] = {} + for target, ref_set in references.items(): + entries = sorted(ref_set, key=lambda e: (e.kind.value, e.name)) + result[target] = entries + + return result diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/_used_by.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/_used_by.md.jinja2 new file mode 100644 index 000000000..fcbd9e82b --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/_used_by.md.jinja2 @@ -0,0 +1,10 @@ +{% if used_by %} + +## Used By + +{% for entry in used_by -%} +{% if entry.link %}- [`{{ entry.name }}`]({{ entry.link }}) +{% else %}- `{{ entry.name }}` +{% endif %} +{% endfor %} +{% endif %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/enum.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/enum.md.jinja2 new file mode 100644 index 000000000..b5b71c254 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/enum.md.jinja2 @@ -0,0 +1,13 @@ +# {{ enum.name }} +{% if enum.description %} + +{{ enum.description | linkify_urls }} +{% endif %} + +## Values + +{% for member in enum.members -%} +- `{{ member.value }}`{% if member.description %} - {{ member.description }}{% endif %} + +{% endfor %} +{% include '_used_by.md.jinja2' %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/feature.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/feature.md.jinja2 new file mode 100644 index 000000000..78a183c5e --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/feature.md.jinja2 @@ -0,0 +1,45 @@ +# {{ model.name }} +{% if model.description %} + +{{ model.description | linkify_urls }} +{% endif %} + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +{% for field in fields -%} +| {% if field.pre_formatted %}{{ field.name }}{% else %}`{{ field.name }}`{% endif %} | {{ field.type_str }} | {% if field.description %}{{ field.description }} {% endif %}| +{% endfor %} +{% if constraints %} + +## Constraints + +{% for c in constraints %} +- {{ c }} +{% endfor %} +{% endif %} +{% if examples %} + +## Examples +{% if examples|length == 1 %} + +| Column | Value | +| -------: | ------- | +{% for row in examples[0] -%} +| `{{ row.column }}` | {{ row.value }} | +{% endfor %} +{% else %} +{% for example in examples %} + +### Example {{ loop.index }} + +| Column | Value | +| -------: | ------- | +{% for row in example -%} +| `{{ row.column }}` | {{ row.value }} | +{% endfor %} +{% endfor %} +{% endif %} +{% endif %} +{% include '_used_by.md.jinja2' %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/geometry.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/geometry.md.jinja2 new file mode 100644 index 000000000..cd6b200de --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/geometry.md.jinja2 @@ -0,0 +1,11 @@ +# Geometry Types + +Spatial types for representing geographic features. + +## Types + +| Type | Description | +| -----: | ------------- | +| `Geometry` | GeoJSON geometry value (Point, LineString, Polygon, etc.) | +| `BBox` | Bounding box as 4 or 6 coordinate values: [west, south, east, north] or [west, south, min-altitude, east, north, max-altitude] | +| `GeometryType` | Enumeration of geometry types: {{ geometry_types }} | diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/newtype.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/newtype.md.jinja2 new file mode 100644 index 000000000..3d2c58f3a --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/newtype.md.jinja2 @@ -0,0 +1,17 @@ +# {{ newtype.name }} +{% if newtype.description %} + +{{ newtype.description | linkify_urls }} +{% endif %} + +Underlying type: {{ underlying_type }} +{% if constraints %} + +## Constraints + +{% for c in constraints -%} +- {{ c.display }}{% if c.source_link %} (from [`{{ c.source }}`]({{ c.source_link }})){% endif %} + +{% endfor %} +{% endif %} +{% include '_used_by.md.jinja2' %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/primitives.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/primitives.md.jinja2 new file mode 100644 index 000000000..fd87a1ec0 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/primitives.md.jinja2 @@ -0,0 +1,27 @@ +# Primitive Types + +Numeric types used for schema field definitions. + +## Integer Types + +| Type | Range | Description | +| -----: | :-----: | ------------- | +{% for t in signed_ints -%} +| `{{ t.name }}` | {{ t.range }} | {{ t.description }} | +{% endfor %} + +## Unsigned Integer Types + +| Type | Range | Description | +| -----: | :-----: | ------------- | +{% for t in unsigned_ints -%} +| `{{ t.name }}` | {{ t.range }} | {{ t.description }} | +{% endfor %} + +## Floating Point Types + +| Type | Precision | Description | +| -----: | :---------: | ------------- | +{% for t in floats -%} +| `{{ t.name }}` | {{ t.precision }} | {{ t.description }} | +{% endfor %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py index afce2c505..495fc7faa 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py @@ -3,6 +3,7 @@ from __future__ import annotations import types +from collections.abc import Callable from dataclasses import dataclass, field from enum import Enum, auto from typing import Annotated, Any, Literal, Union, get_args, get_origin @@ -21,6 +22,7 @@ "analyze_type", "is_newtype", "single_literal_value", + "walk_type_info", ] @@ -66,6 +68,20 @@ class TypeInfo: description: str | None = None +def walk_type_info(ti: TypeInfo, visitor: Callable[[TypeInfo], None]) -> None: + """Call *visitor* on *ti*, then recurse into dict key/value types. + + Captures the shared recursive descent pattern used by type collection + and reverse reference computation. Union members are ``type`` objects + (not ``TypeInfo``), so callers handle them directly. + """ + visitor(ti) + if ti.dict_key_type is not None: + walk_type_info(ti.dict_key_type, visitor) + if ti.dict_value_type is not None: + walk_type_info(ti.dict_value_type, visitor) + + def is_newtype(annotation: object) -> bool: """Check if annotation is a typing.NewType. diff --git a/packages/overture-schema-codegen/tests/conftest.py b/packages/overture-schema-codegen/tests/conftest.py index 5731662ee..6b19c7454 100644 --- a/packages/overture-schema-codegen/tests/conftest.py +++ b/packages/overture-schema-codegen/tests/conftest.py @@ -1,10 +1,21 @@ """Shared pytest fixtures for overture-schema-codegen tests.""" +import overture.schema.system.primitive as _system_primitive import pytest +from click.testing import CliRunner from codegen_test_support import find_model_class +from overture.schema.codegen.markdown_renderer import ( + render_geometry_from_values, + render_primitives_from_specs, +) from overture.schema.codegen.model_extraction import extract_model +from overture.schema.codegen.primitive_extraction import ( + extract_primitives, + partition_primitive_and_geometry_names, +) from overture.schema.codegen.specs import ModelSpec from overture.schema.core.discovery import discover_models +from overture.schema.system.primitive import GeometryType from pydantic import BaseModel @@ -22,6 +33,12 @@ def update_golden(request: pytest.FixtureRequest) -> bool: return bool(request.config.getoption("--update-golden")) +@pytest.fixture +def cli_runner() -> CliRunner: + """Provide a Click CLI test runner.""" + return CliRunner() + + @pytest.fixture def all_discovered_models() -> dict: """Discover and return all registered Overture models.""" @@ -50,3 +67,18 @@ def place_class(all_discovered_models: dict) -> type[BaseModel]: def division_class(all_discovered_models: dict) -> type[BaseModel]: """Get the Division model class.""" return find_model_class("Division", all_discovered_models) + + +@pytest.fixture(scope="module") +def primitives_markdown() -> str: + """Render the primitives.md page from the system primitive module.""" + primitive_names, _ = partition_primitive_and_geometry_names(_system_primitive) + return render_primitives_from_specs( + extract_primitives(primitive_names, _system_primitive) + ) + + +@pytest.fixture(scope="module") +def geometry_markdown() -> str: + """Render the geometry.md page from system GeometryType values.""" + return render_geometry_from_values([m.value for m in GeometryType]) diff --git a/packages/overture-schema-codegen/tests/golden/markdown/common_names.md b/packages/overture-schema-codegen/tests/golden/markdown/common_names.md new file mode 100644 index 000000000..c73d708c9 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/common_names.md @@ -0,0 +1,7 @@ +# CommonNames + +Underlying type: `map` + +## Used By + +- `FeatureWithDict` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/feature_with_address.md b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_address.md new file mode 100644 index 000000000..fdbfdc7a8 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_address.md @@ -0,0 +1,15 @@ +# FeatureWithAddress + +A feature with an address field. + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `theme` | `"test"` | | +| `type` | `"addressed"` | | +| `title` | `string` | Feature title | +| `address` | `Address` | | +| `address.street` | `string` | Street name | +| `address.city` | `string` | City name | +| `address.zip_code` | `string` (optional) | Postal code | diff --git a/packages/overture-schema-codegen/tests/golden/markdown/feature_with_dict.md b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_dict.md new file mode 100644 index 000000000..499787d06 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_dict.md @@ -0,0 +1,15 @@ +# FeatureWithDict + +A feature with dict fields. + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `theme` | `"test"` | | +| `type` | `"dictfeat"` | | +| `name` | `string` | Feature name | +| `names` | `CommonNames` (map, optional) | Localized names | +| `alt_names` | `map` (optional) | Alternate localized names | +| `tags` | `map` (optional) | Arbitrary tags | +| `metadata` | `map` | Numeric metadata | diff --git a/packages/overture-schema-codegen/tests/golden/markdown/feature_with_sources.md b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_sources.md new file mode 100644 index 000000000..c3e4bc39b --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/feature_with_sources.md @@ -0,0 +1,13 @@ +# FeatureWithSources + +A feature with a Sources field. + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `theme` | `"test"` | | +| `type` | `"sourced"` | | +| `name` | `string` | Feature name | +| `sources[]` | `Sources` (list, optional) | Source data references | +| `sources[].dataset` | `string` | Source dataset name | diff --git a/packages/overture-schema-codegen/tests/golden/markdown/hex_color.md b/packages/overture-schema-codegen/tests/golden/markdown/hex_color.md new file mode 100644 index 000000000..847a1b9a5 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/hex_color.md @@ -0,0 +1,19 @@ +# HexColor + +A color represented as an #RRGGBB or #RGB hexadecimal string. + +For example: + +- `"#ff0000"` or `#f00` for pure red 🟥 +- `"#ffa500"` for bright orange 🟧 +- `"#000000"` or `"#000"` for black ⬛ + +Underlying type: `string` + +## Constraints + +- Allows only hexadecimal color codes (e.g., #FF0000 or #FFF). (`HexColorConstraint`, pattern: `^#[0-9A-Fa-f]{3}([0-9A-Fa-f]{3})?$`) + +## Used By + +- `Instrument` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/id.md b/packages/overture-schema-codegen/tests/golden/markdown/id.md new file mode 100644 index 000000000..993e7661e --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/id.md @@ -0,0 +1,15 @@ +# Id + +A unique identifier. + +Underlying type: `string` + +## Constraints + +- `minimum length: 1` +- Allows only strings that contain no whitespace characters. (`NoWhitespaceConstraint`, pattern: `^\S+$`) + +## Used By + +- `Instrument` +- `Venue` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/instrument.md b/packages/overture-schema-codegen/tests/golden/markdown/instrument.md new file mode 100644 index 000000000..5145aba43 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/instrument.md @@ -0,0 +1,20 @@ +# Instrument + +A musical instrument. + +Instruments produce sound through vibration. They are classified +by how sound is produced. + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `id` | `Id` | Unique identifier | +| `category` | `"music"` | | +| `kind` | `"instrument"` | | +| `name` | `string` | Common name | +| `tuning` | `float64` (optional) | Concert pitch in Hz.

Standard tuning is 440 Hz. | +| `num_strings` | `int32` (optional) | | +| `family` | `InstrumentFamily` (optional) | | +| `color` | `HexColor` (optional) | Body color | +| `tags` | `list` (optional) | *Ensures all items in a collection are unique. (`UniqueItemsConstraint`)* | diff --git a/packages/overture-schema-codegen/tests/golden/markdown/instrument_family.md b/packages/overture-schema-codegen/tests/golden/markdown/instrument_family.md new file mode 100644 index 000000000..d8489cc4f --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/instrument_family.md @@ -0,0 +1,13 @@ +# InstrumentFamily + +Classification by sound production method. + +## Values + +- `string` - Sound from vibrating strings +- `wind` - Sound from vibrating air column +- `percussion` + +## Used By + +- `Instrument` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/simple_kind.md b/packages/overture-schema-codegen/tests/golden/markdown/simple_kind.md new file mode 100644 index 000000000..f0aca0300 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/simple_kind.md @@ -0,0 +1,6 @@ +# SimpleKind + +## Values + +- `small` +- `large` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/sources.md b/packages/overture-schema-codegen/tests/golden/markdown/sources.md new file mode 100644 index 000000000..0089f3597 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/sources.md @@ -0,0 +1,14 @@ +# Sources + +Source data references + +Underlying type: `list` + +## Constraints + +- `minimum length: 1` +- Ensures all items in a collection are unique. (`UniqueItemsConstraint`) + +## Used By + +- `FeatureWithSources` diff --git a/packages/overture-schema-codegen/tests/golden/markdown/venue.md b/packages/overture-schema-codegen/tests/golden/markdown/venue.md new file mode 100644 index 000000000..f712b8201 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/venue.md @@ -0,0 +1,22 @@ +# Venue + +A concert venue. + +A location where musical performances take place. + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `id` | `Id` | Unique identifier | +| `category` | `"music"` | | +| `kind` | `"venue"` | | +| `name` | `string` (optional) | Venue name
*At least one of `name`, `description` must be set* | +| `description` | `string` (optional) | *At least one of `name`, `description` must be set* | +| `geometry` | `geometry` | *Allowed geometry types: Point, Polygon* | +| `capacity` | `int64` (optional) | *`≥ 1`* | +| `resident_ensemble` | `Id` (optional) | A unique identifier
*References `Instrument` (belongs to)* | + +## Constraints + +- At least one of `name`, `description` must be set diff --git a/packages/overture-schema-codegen/tests/golden/markdown/widget.md b/packages/overture-schema-codegen/tests/golden/markdown/widget.md new file mode 100644 index 000000000..c056d27a3 --- /dev/null +++ b/packages/overture-schema-codegen/tests/golden/markdown/widget.md @@ -0,0 +1,8 @@ +# Widget + +## Fields + +| Name | Type | Description | +| -----: | :----: | ------------- | +| `active` | `boolean` | | +| `label` | `string` | Display label | diff --git a/packages/overture-schema-codegen/tests/test_example_loader.py b/packages/overture-schema-codegen/tests/test_example_loader.py new file mode 100644 index 000000000..fc418e67c --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_example_loader.py @@ -0,0 +1,821 @@ +"""Tests for example_loader module.""" + +import logging +import sys +import types +from collections.abc import Iterator +from pathlib import Path +from textwrap import dedent +from typing import Annotated, Literal + +import pytest +from overture.schema.codegen.example_loader import ( + ExampleRecord, + _denull, + _inject_literal_fields, + flatten_example, + load_examples, + load_examples_from_toml, + order_example_rows, + resolve_pyproject_path, + validate_example, +) +from pydantic import BaseModel, ConfigDict, Field, Tag, ValidationError + + +class TestFlattenExample: + """Tests for flatten_example function.""" + + def test_simple_fields(self) -> None: + """Flatten simple key-value pairs.""" + raw = {"id": "123", "version": 1, "name": "test"} + result = flatten_example(raw) + assert result == [("id", "123"), ("version", 1), ("name", "test")] + + def test_nested_dict(self) -> None: + """Flatten nested dict to dot notation.""" + raw = {"names": {"primary": "foo", "common": {"en": "bar"}}} + result = flatten_example(raw) + assert result == [ + ("names.primary", "foo"), + ("names.common.en", "bar"), + ] + + def test_list_of_dicts(self) -> None: + """Flatten list of dicts with array notation.""" + raw = {"sources": [{"dataset": "OSM", "record_id": "w123"}]} + result = flatten_example(raw) + assert result == [ + ("sources[0].dataset", "OSM"), + ("sources[0].record_id", "w123"), + ] + + def test_skip_bbox_at_top_level(self) -> None: + """Skip bbox field at top level.""" + raw = { + "id": "123", + "bbox": {"xmin": -176.6, "xmax": -176.64}, + "version": 1, + } + result = flatten_example(raw) + assert result == [("id", "123"), ("version", 1)] + + def test_nested_bbox_not_skipped(self) -> None: + """Nested bbox fields are not skipped.""" + raw = {"outer": {"bbox": {"xmin": 1.0}}} + result = flatten_example(raw) + assert result == [("outer.bbox.xmin", 1.0)] + + def test_plain_list_kept_as_value(self) -> None: + """Plain lists (non-dict items) are kept as values.""" + raw = {"phones": ["+1234", "+5678"]} + result = flatten_example(raw) + assert result == [("phones", ["+1234", "+5678"])] + + def test_empty_dict(self) -> None: + """Empty dict produces empty list.""" + raw: dict[str, object] = {} + result = flatten_example(raw) + assert result == [] + + def test_empty_list(self) -> None: + """Empty list is kept as value.""" + raw: dict[str, object] = {"tags": []} + result = flatten_example(raw) + assert result == [("tags", [])] + + def test_list_of_list_of_dicts(self) -> None: + """Flatten list[list[dict]] with double-index notation.""" + raw = { + "hierarchies": [ + [ + {"division_id": "aaa", "name": "Country"}, + {"division_id": "bbb", "name": "Region"}, + ], + ] + } + result = flatten_example(raw) + assert result == [ + ("hierarchies[0][0].division_id", "aaa"), + ("hierarchies[0][0].name", "Country"), + ("hierarchies[0][1].division_id", "bbb"), + ("hierarchies[0][1].name", "Region"), + ] + + def test_multiple_list_items(self) -> None: + """Handle multiple items in list of dicts.""" + raw = { + "sources": [ + {"dataset": "OSM", "confidence": 0.9}, + {"dataset": "MSFT", "confidence": 0.8}, + ] + } + result = flatten_example(raw) + assert result == [ + ("sources[0].dataset", "OSM"), + ("sources[0].confidence", 0.9), + ("sources[1].dataset", "MSFT"), + ("sources[1].confidence", 0.8), + ] + + +class TestOrderExampleRows: + """Tests for order_example_rows function.""" + + def test_order_by_field_names(self) -> None: + """Order rows by position in field_names.""" + flat_rows = [("version", 1), ("id", "123"), ("name", "test")] + field_names = ["id", "name", "version"] + result = order_example_rows(flat_rows, field_names) + assert result == [("id", "123"), ("name", "test"), ("version", 1)] + + def test_extract_base_field_from_dot_notation(self) -> None: + """Extract base field from dotted keys.""" + flat_rows = [ + ("names.primary", "foo"), + ("id", "123"), + ("names.common.en", "bar"), + ] + field_names = ["id", "names"] + result = order_example_rows(flat_rows, field_names) + assert result == [ + ("id", "123"), + ("names.primary", "foo"), + ("names.common.en", "bar"), + ] + + def test_extract_base_field_from_array_notation(self) -> None: + """Extract base field from array notation.""" + flat_rows = [ + ("sources[0].dataset", "OSM"), + ("id", "123"), + ("sources[0].record_id", "w123"), + ("sources[1].dataset", "MSFT"), + ] + field_names = ["id", "sources"] + result = order_example_rows(flat_rows, field_names) + assert result == [ + ("id", "123"), + ("sources[0].dataset", "OSM"), + ("sources[0].record_id", "w123"), + ("sources[1].dataset", "MSFT"), + ] + + def test_order_with_mixed_notation(self) -> None: + """Order rows with mixed simple, dotted, and array notation.""" + flat_rows = [ + ("version", 1), + ("sources[0].dataset", "OSM"), + ("id", "123"), + ("names.primary", "foo"), + ] + field_names = ["id", "names", "sources", "version"] + result = order_example_rows(flat_rows, field_names) + assert result == [ + ("id", "123"), + ("names.primary", "foo"), + ("sources[0].dataset", "OSM"), + ("version", 1), + ] + + def test_unknown_fields_sort_to_end(self) -> None: + """Unknown fields sort to end, maintaining relative order.""" + flat_rows = [ + ("unknown2", "b"), + ("id", "123"), + ("unknown1", "a"), + ("version", 1), + ] + field_names = ["id", "version"] + result = order_example_rows(flat_rows, field_names) + assert result == [ + ("id", "123"), + ("version", 1), + ("unknown2", "b"), + ("unknown1", "a"), + ] + + +class TestLoadExamplesFromToml: + """Tests for load_examples_from_toml function.""" + + def test_load_example_list(self, tmp_path: Path) -> None: + """Load examples for a model from TOML.""" + toml_path = tmp_path / "pyproject.toml" + toml_path.write_text( + dedent(""" + [project] + name = "test-package" + + [[examples.Building]] + id = "123" + version = 1 + + [[examples.Building]] + id = "456" + version = 2 + """) + ) + + result = load_examples_from_toml(toml_path, "Building") + assert len(result) == 2 + assert result[0] == {"id": "123", "version": 1} + assert result[1] == {"id": "456", "version": 2} + + def test_model_not_found_returns_empty(self, tmp_path: Path) -> None: + """Return empty list when model has no examples.""" + toml_path = tmp_path / "pyproject.toml" + toml_path.write_text( + dedent(""" + [project] + name = "test-package" + + [[examples.Building]] + id = "123" + """) + ) + + result = load_examples_from_toml(toml_path, "Road") + assert result == [] + + def test_no_examples_section_returns_empty(self, tmp_path: Path) -> None: + """Return empty list when no examples section exists.""" + toml_path = tmp_path / "pyproject.toml" + toml_path.write_text( + dedent(""" + [project] + name = "test-package" + """) + ) + + result = load_examples_from_toml(toml_path, "Building") + assert result == [] + + +class MockProject: + """A temporary project directory with registered mock modules.""" + + def __init__(self, root: Path, pyproject: Path, mod_name: str) -> None: + self.root = root + self.pyproject = pyproject + self.mod_name = mod_name + self._registered_modules: list[str] = [mod_name] + + def write_pyproject(self, content: str) -> None: + self.pyproject.write_text(content) + + def add_submodule(self, *subdirs: str) -> str: + """Register a deeper module under this project's src directory. + + Returns the module name for use in __module__ attributes. + """ + pkg_dir = self.root / "src" / Path(*subdirs) + pkg_dir.mkdir(parents=True, exist_ok=True) + module_file = pkg_dir / "module.py" + module_file.write_text("# module") + + sub_mod_name = f"{self.mod_name}_{'_'.join(subdirs)}" + mod = types.ModuleType(sub_mod_name) + mod.__file__ = str(module_file) + sys.modules[sub_mod_name] = mod + self._registered_modules.append(sub_mod_name) + return sub_mod_name + + def cleanup(self) -> None: + for name in self._registered_modules: + sys.modules.pop(name, None) + + +@pytest.fixture +def mock_project(tmp_path: Path) -> Iterator[MockProject]: + """Create a project directory with a mock module registered in sys.modules. + + Yields a MockProject with root, pyproject path, and mod_name. + Writes a minimal pyproject.toml by default; tests can overwrite via + ``project.write_pyproject()``. + """ + root = tmp_path / "project" + root.mkdir() + pyproject = root / "pyproject.toml" + pyproject.write_text("[project]\nname = 'test'") + + src_dir = root / "src" + src_dir.mkdir() + module_file = src_dir / "module.py" + module_file.write_text("# module") + + mod_name = f"_test_mock_{id(tmp_path)}" + mod = types.ModuleType(mod_name) + mod.__file__ = str(module_file) + sys.modules[mod_name] = mod + + project = MockProject(root=root, pyproject=pyproject, mod_name=mod_name) + yield project + project.cleanup() + + +class TestResolvePyprojectPath: + """Tests for resolve_pyproject_path function.""" + + def test_finds_pyproject_in_parent_dirs(self, mock_project: MockProject) -> None: + """Walk up from module location to find pyproject.toml.""" + deeper_mod = mock_project.add_submodule("pkg") + + class MockModel: + __module__ = deeper_mod + + result = resolve_pyproject_path(MockModel) + assert result == mock_project.pyproject + + def test_returns_none_when_not_found(self, tmp_path: Path) -> None: + """Return None when pyproject.toml doesn't exist.""" + module_dir = tmp_path / "src" + module_dir.mkdir() + module_file = module_dir / "module.py" + module_file.write_text("# module") + + mod_name = f"_test_resolve_nf_{id(tmp_path)}" + mod = types.ModuleType(mod_name) + mod.__file__ = str(module_file) + sys.modules[mod_name] = mod + try: + + class MockModel: + __module__ = mod_name + + result = resolve_pyproject_path(MockModel) + assert result is None + finally: + sys.modules.pop(mod_name, None) + + def test_returns_none_when_no_module(self) -> None: + """Return None when model's module is not in sys.modules.""" + + class MockModel: + __module__ = "_nonexistent_module_for_test" + + result = resolve_pyproject_path(MockModel) + assert result is None + + +class TestLoadExamples: + """Tests for load_examples entry point.""" + + def test_end_to_end(self, mock_project: MockProject) -> None: + """Load, flatten, and order examples end-to-end.""" + mock_project.write_pyproject( + dedent(""" + [project] + name = "test" + + [[examples.Building]] + version = 1 + names = { primary = "Tower" } + id = "123" + + [examples.Building.bbox] + xmin = 1.0 + xmax = 2.0 + + [[examples.Building.sources]] + dataset = "OSM" + record_id = "w456" + """) + ) + + class MockModel(BaseModel): + __module__ = mock_project.mod_name + id: str + version: int + names: dict[str, object] + sources: list[dict[str, object]] + + field_names = ["id", "names", "sources", "version"] + result = load_examples(MockModel, "Building", field_names) + + assert len(result) == 1 + record = result[0] + assert isinstance(record, ExampleRecord) + + assert record.rows == [ + ("id", "123"), + ("names.primary", "Tower"), + ("sources[0].dataset", "OSM"), + ("sources[0].record_id", "w456"), + ("version", 1), + ] + + def test_returns_empty_on_missing_pyproject(self) -> None: + """Return empty list when model's module not in sys.modules.""" + + class MockModel(BaseModel): + __module__ = "_nonexistent_module_for_load_test" + + result = load_examples(MockModel, "Building", ["id"]) + assert result == [] + + def test_returns_empty_on_missing_model(self, mock_project: MockProject) -> None: + """Return empty list when model has no examples.""" + + class MockModel(BaseModel): + __module__ = mock_project.mod_name + + result = load_examples(MockModel, "Building", ["id"]) + assert result == [] + + def test_invalid_examples_skipped_with_warning( + self, mock_project: MockProject, caplog: pytest.LogCaptureFixture + ) -> None: + """Invalid examples are skipped and warning logged.""" + mock_project.write_pyproject( + dedent(""" + [project] + name = "test" + + [[examples.MockModel]] + name = "valid" + count = 1 + + [[examples.MockModel]] + name = "invalid" + count = "not_an_int" + + [[examples.MockModel]] + name = "also_valid" + count = 2 + """) + ) + + class MockModel(BaseModel): + __module__ = mock_project.mod_name + name: str + count: int + + caplog.set_level(logging.WARNING) + + result = load_examples(MockModel, "MockModel", ["name", "count"]) + + assert len(result) == 2 + assert result[0].rows == [("name", "valid"), ("count", 1)] + assert result[1].rows == [("name", "also_valid"), ("count", 2)] + + assert any( + "MockModel" in record.message + and "validation" in record.message.lower() + and str(mock_project.pyproject) in record.message + for record in caplog.records + ) + + def test_denulled_values_in_output(self, mock_project: MockProject) -> None: + """Flattened output contains None not "null" strings.""" + mock_project.write_pyproject( + dedent(""" + [project] + name = "test" + + [[examples.MockModel]] + name = "test" + value = "null" + """) + ) + + class MockModel(BaseModel): + __module__ = mock_project.mod_name + name: str + value: int | None + + result = load_examples(MockModel, "MockModel", ["name", "value"]) + + assert len(result) == 1 + assert result[0].rows == [("name", "test"), ("value", None)] + + +class TestDenull: + """Tests for _denull function.""" + + def test_converts_null_string_to_none(self) -> None: + """Top-level "null" strings become None.""" + assert _denull({"a": "null"}) == {"a": None} + + def test_nested_dict(self) -> None: + """Recurse into nested dicts.""" + data = {"a": {"b": "null"}} + assert _denull(data) == {"a": {"b": None}} + + def test_list_of_dicts(self) -> None: + """Recurse into dicts inside lists.""" + data = {"items": [{"x": "null"}]} + assert _denull(data) == {"items": [{"x": None}]} + + def test_mixed_types_unchanged(self) -> None: + """Non-"null" strings, ints, bools, and plain lists pass through.""" + data = { + "name": "hello", + "count": 42, + "flag": True, + "tags": ["a", "b"], + "score": 3.14, + } + assert _denull(data) == data + + def test_no_mutation(self) -> None: + """Original dict is not modified.""" + original = {"a": "null", "b": {"c": "null"}} + _denull(original) + assert original == {"a": "null", "b": {"c": "null"}} + + def test_empty_dict(self) -> None: + """Empty dict returns empty dict.""" + assert _denull({}) == {} + + def test_deeply_nested(self) -> None: + """Handle multiple levels of nesting.""" + data = {"a": {"b": {"c": "null"}}} + assert _denull(data) == {"a": {"b": {"c": None}}} + + def test_null_strings_in_plain_list(self) -> None: + """Convert "null" strings inside plain lists.""" + data = {"tags": ["a", "null", "b"]} + assert _denull(data) == {"tags": ["a", None, "b"]} + + +class TestInjectLiteralFields: + """Tests for _inject_literal_fields function.""" + + def test_injects_single_value_literal(self) -> None: + """Inject field with single-value Literal annotation.""" + + class MockModel(BaseModel): + theme: Literal["buildings"] + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower", "theme": "buildings"} + + def test_skips_non_literal_field(self) -> None: + """Do not inject fields without Literal annotations.""" + + class MockModel(BaseModel): + name: str + count: int + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower"} + + def test_skips_already_present_field(self) -> None: + """Do not overwrite fields already in data.""" + + class MockModel(BaseModel): + theme: Literal["buildings"] + name: str + + data = {"theme": "custom", "name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"theme": "custom", "name": "Tower"} + + def test_respects_validation_alias(self) -> None: + """Use validation_alias when injecting.""" + + class MockModel(BaseModel): + class_: Literal["building"] = Field(validation_alias="class") + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower", "class": "building"} + + def test_no_mutation(self) -> None: + """Original data dict is not modified.""" + + class MockModel(BaseModel): + theme: Literal["buildings"] + name: str + + data = {"name": "Tower"} + original_data = data.copy() + _inject_literal_fields(MockModel.model_fields, data) + assert data == original_data + + def test_multiple_literal_fields(self) -> None: + """Inject multiple Literal fields.""" + + class MockModel(BaseModel): + theme: Literal["buildings"] + type: Literal["building"] + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower", "theme": "buildings", "type": "building"} + + def test_skips_multi_value_literal(self) -> None: + """Do not inject Literal with multiple values.""" + + class MockModel(BaseModel): + status: Literal["active", "inactive"] + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower"} + + def test_respects_alias_fallback(self) -> None: + """Fall back to alias if validation_alias not set.""" + + class MockModel(BaseModel): + class_: Literal["building"] = Field(alias="class") + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower", "class": "building"} + + def test_unwraps_optional_literal(self) -> None: + """Inject Optional[Literal["x"]] fields (union-wrapped by Pydantic).""" + + class MockModel(BaseModel): + theme: Literal["buildings"] | None = None + name: str + + data = {"name": "Tower"} + result = _inject_literal_fields(MockModel.model_fields, data) + assert result == {"name": "Tower", "theme": "buildings"} + + +class TestValidateExample: + """Tests for validate_example function.""" + + def test_valid_data_passes(self) -> None: + """Valid data is validated and denulled dict returned.""" + + class MockModel(BaseModel): + name: str + count: int + + raw = {"name": "test", "count": 42} + result = validate_example(MockModel, raw) + assert result == {"name": "test", "count": 42} + + def test_invalid_data_raises_validation_error(self) -> None: + """Invalid data raises ValidationError.""" + + class MockModel(BaseModel): + count: int + + raw = {"count": "not_an_int"} + with pytest.raises(ValidationError): + validate_example(MockModel, raw) + + def test_denulled_dict_returned(self) -> None: + """Denulled dict is returned, not raw or preprocessed.""" + + class MockModel(BaseModel): + name: str + value: int | None + + raw = {"name": "test", "value": "null"} + result = validate_example(MockModel, raw) + assert result == {"name": "test", "value": None} + + def test_literals_injected_before_validation(self) -> None: + """Missing Literal fields are injected before validation.""" + + class MockModel(BaseModel): + theme: Literal["buildings"] + name: str + + raw = {"name": "Tower"} + result = validate_example(MockModel, raw) + # Returned dict is denulled, NOT preprocessed (no injected literals) + assert result == {"name": "Tower"} + + +class TestValidateExampleWithUnion: + """Tests for validate_example with discriminated unions via TypeAdapter.""" + + def test_validates_union_via_type_adapter(self) -> None: + """TypeAdapter validates against a discriminated union.""" + + class Dog(BaseModel): + kind: Literal["dog"] + bark: str + + class Cat(BaseModel): + kind: Literal["cat"] + purr: bool + + PetUnion = Annotated[ + Annotated[Dog, Tag("dog")] | Annotated[Cat, Tag("cat")], + Field(discriminator="kind"), + ] + + raw = {"kind": "dog", "bark": "woof"} + result = validate_example(PetUnion, raw, model_fields=Dog.model_fields) + assert result == {"kind": "dog", "bark": "woof"} + + def test_invalid_union_example_raises(self) -> None: + """Invalid data against union raises ValidationError.""" + + class Dog(BaseModel): + kind: Literal["dog"] + bark: str + + class Cat(BaseModel): + kind: Literal["cat"] + purr: bool + + PetUnion = Annotated[ + Annotated[Dog, Tag("dog")] | Annotated[Cat, Tag("cat")], + Field(discriminator="kind"), + ] + + raw = {"kind": "dog", "bark": 42} # bark should be str + with pytest.raises(ValidationError): + validate_example(PetUnion, raw, model_fields=Dog.model_fields) + + def test_null_cross_arm_fields_accepted(self) -> None: + """Null fields from other union arms are accepted in flat-schema examples. + + Parquet files have columns for all union arms. A road segment row + includes ``rail_flags=null`` because the column exists in the table. + Validation should accept these cross-arm nulls. + """ + + class _Base(BaseModel): + model_config = ConfigDict(extra="forbid") + kind: str + name: str + + class Dog(_Base): + kind: Literal["dog"] + bark: str | None = None + + class Cat(_Base): + kind: Literal["cat"] + purr: bool | None = None + + PetUnion = Annotated[ + Annotated[Dog, Tag("dog")] | Annotated[Cat, Tag("cat")], + Field(discriminator="kind"), + ] + + # Flat schema: Dog example includes Cat's "purr" field as null + raw = {"kind": "dog", "name": "Rex", "bark": "woof", "purr": "null"} + result = validate_example(PetUnion, raw, model_fields=_Base.model_fields) + # Returned dict preserves the original denulled data + assert result == { + "kind": "dog", + "name": "Rex", + "bark": "woof", + "purr": None, + } + + +class TestIntegration: + """Integration tests with real schema models.""" + + def test_real_building_examples_validate(self) -> None: + """Validate real Building examples from the schema package.""" + pytest.importorskip("overture.schema.buildings.building") + + from overture.schema.buildings.building import Building # noqa: PLC0415 + + # Find the pyproject.toml for the Building model + pyproject_path = resolve_pyproject_path(Building) + assert pyproject_path is not None, "Could not find pyproject.toml for Building" + + # Load raw examples from TOML + raw_examples = load_examples_from_toml(pyproject_path, "Building") + assert len(raw_examples) > 0, "No Building examples found in pyproject.toml" + + # Validate each example + for idx, raw_example in enumerate(raw_examples): + # Should not raise ValidationError + validated = validate_example(Building, raw_example) + assert isinstance(validated, dict), f"Example {idx}: Expected dict result" + + def test_real_segment_examples_validate(self) -> None: + """Validate real Segment examples (discriminated union with cross-arm fields).""" + pytest.importorskip("overture.schema.transportation") + + from overture.schema.transportation import Segment # noqa: PLC0415 + from overture.schema.transportation.segment.models import ( # noqa: PLC0415 + RoadSegment, + TransportationSegment, + ) + + pyproject_path = resolve_pyproject_path(RoadSegment) + assert pyproject_path is not None + + raw_examples = load_examples_from_toml(pyproject_path, "Segment") + assert len(raw_examples) > 0, "No Segment examples found" + + for idx, raw_example in enumerate(raw_examples): + validated = validate_example( + Segment, + raw_example, + model_fields=TransportationSegment.model_fields, + ) + assert isinstance(validated, dict), f"Example {idx}: Expected dict result" diff --git a/packages/overture-schema-codegen/tests/test_golden_markdown.py b/packages/overture-schema-codegen/tests/test_golden_markdown.py new file mode 100644 index 000000000..2ecb6939c --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_golden_markdown.py @@ -0,0 +1,124 @@ +"""Golden-file snapshot tests for Markdown renderer output.""" + +from enum import Enum +from pathlib import Path + +import pytest +from codegen_test_support import ( + CommonNames, + FeatureWithAddress, + FeatureWithDict, + FeatureWithSources, + HexColor, + Id, + Instrument, + InstrumentFamily, + SimpleKind, + Sources, + Venue, + Widget, + assert_golden, +) +from overture.schema.codegen.enum_extraction import extract_enum +from overture.schema.codegen.markdown_renderer import ( + render_enum, + render_feature, + render_newtype, +) +from overture.schema.codegen.model_extraction import expand_model_tree, extract_model +from overture.schema.codegen.newtype_extraction import extract_newtype +from overture.schema.codegen.reverse_references import ( + UsedByEntry, + compute_reverse_references, +) +from overture.schema.codegen.type_collection import collect_all_supplementary_types +from pydantic import BaseModel + +GOLDEN_DIR = Path(__file__).parent / "golden" / "markdown" + +FEATURE_CASES = [ + (Instrument, "instrument.md"), + (Venue, "venue.md"), + (Widget, "widget.md"), + (FeatureWithSources, "feature_with_sources.md"), + (FeatureWithAddress, "feature_with_address.md"), + (FeatureWithDict, "feature_with_dict.md"), +] + +ENUM_CASES = [ + (InstrumentFamily, "instrument_family.md"), + (SimpleKind, "simple_kind.md"), +] + +NEWTYPE_CASES = [ + (HexColor, "hex_color.md"), + (Id, "id.md"), + (Sources, "sources.md"), + (CommonNames, "common_names.md"), +] + + +@pytest.fixture(scope="module") +def reverse_refs() -> dict[str, list[UsedByEntry]]: + """Compute reverse references for all test models.""" + feature_specs = [] + for model_class, _ in FEATURE_CASES: + assert isinstance(model_class, type) and issubclass(model_class, BaseModel) + spec = extract_model(model_class) + expand_model_tree(spec) + feature_specs.append(spec) + + all_specs = collect_all_supplementary_types(feature_specs) + return compute_reverse_references(feature_specs, all_specs) + + +@pytest.mark.parametrize( + ("model_class", "golden_filename"), + FEATURE_CASES, + ids=[name for _, name in FEATURE_CASES], +) +def test_feature_golden( + model_class: type[BaseModel], + golden_filename: str, + update_golden: bool, + reverse_refs: dict[str, list[UsedByEntry]], +) -> None: + spec = extract_model(model_class) + expand_model_tree(spec) + used_by = reverse_refs.get(spec.name) + actual = render_feature(spec, used_by=used_by) + assert_golden(actual, GOLDEN_DIR / golden_filename, update=update_golden) + + +@pytest.mark.parametrize( + ("enum_class", "golden_filename"), + ENUM_CASES, + ids=[name for _, name in ENUM_CASES], +) +def test_enum_golden( + enum_class: type[Enum], + golden_filename: str, + update_golden: bool, + reverse_refs: dict[str, list[UsedByEntry]], +) -> None: + spec = extract_enum(enum_class) + used_by = reverse_refs.get(spec.name) + actual = render_enum(spec, used_by=used_by) + assert_golden(actual, GOLDEN_DIR / golden_filename, update=update_golden) + + +@pytest.mark.parametrize( + ("newtype_callable", "golden_filename"), + NEWTYPE_CASES, + ids=[name for _, name in NEWTYPE_CASES], +) +def test_newtype_golden( + newtype_callable: object, + golden_filename: str, + update_golden: bool, + reverse_refs: dict[str, list[UsedByEntry]], +) -> None: + spec = extract_newtype(newtype_callable) + used_by = reverse_refs.get(spec.name) + actual = render_newtype(spec, used_by=used_by) + assert_golden(actual, GOLDEN_DIR / golden_filename, update=update_golden) diff --git a/packages/overture-schema-codegen/tests/test_markdown_renderer.py b/packages/overture-schema-codegen/tests/test_markdown_renderer.py new file mode 100644 index 000000000..d01e63b2b --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_markdown_renderer.py @@ -0,0 +1,1339 @@ +"""Tests for Markdown renderer.""" + +from collections.abc import Callable +from enum import Enum +from pathlib import PurePosixPath +from typing import Annotated, Literal, NewType + +import pytest +from annotated_types import Ge, Interval +from codegen_test_support import ( + STR_TYPE, + CommonNames, + FeatureBase, + FeatureWithAddress, + FeatureWithSources, + SimpleModel, + Sources, + TreeNode, + Venue, + make_union_spec, +) +from overture.schema.codegen.example_loader import ExampleRecord +from overture.schema.codegen.link_computation import LinkContext +from overture.schema.codegen.markdown_renderer import ( + _format_constraint, + _format_example_value, + _linkify_bare_urls, + _sanitize_for_table_cell, + render_enum, + render_feature, + render_newtype, + render_primitives_from_specs, +) +from overture.schema.codegen.model_extraction import expand_model_tree, extract_model +from overture.schema.codegen.newtype_extraction import extract_newtype +from overture.schema.codegen.reverse_references import UsedByEntry, UsedByKind +from overture.schema.codegen.specs import ( + AnnotatedField, + EnumMemberSpec, + EnumSpec, + FieldSpec, + PrimitiveSpec, +) +from overture.schema.codegen.type_analyzer import ConstraintSource +from overture.schema.system.field_constraint import ( + CountryCodeAlpha2Constraint, + JsonPointerConstraint, + UniqueItemsConstraint, +) +from overture.schema.system.model_constraint import no_extra_fields +from overture.schema.system.primitive import int32 +from overture.schema.system.ref import Id +from overture.schema.system.string import HexColor +from pydantic import BaseModel, Field + +_FLAT_MEMBER = EnumMemberSpec(name="FLAT", value="flat", description=None) + +_ROOF_SHAPE_SPEC = EnumSpec( + name="RoofShape", + description="The shape of the roof.", + members=[_FLAT_MEMBER], +) + + +class TestSanitizeForTableCell: + """Tests for _sanitize_for_table_cell.""" + + def test_single_line_unchanged(self) -> None: + """Single-line text passes through unchanged.""" + assert ( + _sanitize_for_table_cell("A simple description.") == "A simple description." + ) + + def test_single_newline_becomes_space(self) -> None: + """Single newline within a paragraph becomes a space.""" + assert _sanitize_for_table_cell("Line one.\nLine two.") == "Line one. Line two." + + def test_blank_line_becomes_double_br(self) -> None: + """Blank line (paragraph break) becomes

.""" + assert ( + _sanitize_for_table_cell("Para one.\n\nPara two.") + == "Para one.

Para two." + ) + + def test_blank_line_with_whitespace(self) -> None: + """Blank line containing only whitespace is treated as blank.""" + assert ( + _sanitize_for_table_cell("Para one.\n \nPara two.") + == "Para one.

Para two." + ) + + def test_multiple_blank_lines_collapsed(self) -> None: + """Multiple consecutive blank lines collapse to one

.""" + assert _sanitize_for_table_cell("A.\n\n\nB.") == "A.

B." + + def test_pipe_escaped(self) -> None: + """Pipe characters escaped to avoid breaking table columns.""" + assert _sanitize_for_table_cell("foo | bar") == "foo \\| bar" + + def test_pipe_and_newline_both_handled(self) -> None: + """Pipes and newlines handled together.""" + assert _sanitize_for_table_cell("a | b\nc | d") == "a \\| b c \\| d" + + def test_strips_leading_trailing_whitespace(self) -> None: + """Leading/trailing whitespace stripped.""" + assert _sanitize_for_table_cell(" hello ") == "hello" + + +class TestLinkifyBareUrls: + """Tests for _linkify_bare_urls.""" + + def test_www_url_gets_linked(self) -> None: + """www. URLs become Markdown links with https:// href.""" + assert ( + _linkify_bare_urls("see www.example.com for details") + == "see [www.example.com](https://www.example.com) for details" + ) + + def test_https_url_gets_linked(self) -> None: + """https:// URLs become self-referencing Markdown links.""" + assert ( + _linkify_bare_urls("see https://example.com/path") + == "see [https://example.com/path](https://example.com/path)" + ) + + def test_http_url_gets_linked(self) -> None: + """http:// URLs become self-referencing Markdown links.""" + assert ( + _linkify_bare_urls("see http://example.com") + == "see [http://example.com](http://example.com)" + ) + + def test_existing_markdown_link_unchanged(self) -> None: + """URLs already inside [text](url) are left alone.""" + text = "[example](https://example.com)" + assert _linkify_bare_urls(text) == text + + def test_text_without_urls_unchanged(self) -> None: + """Plain text passes through unchanged.""" + assert _linkify_bare_urls("no urls here") == "no urls here" + + def test_url_in_parentheses(self) -> None: + """URL inside sentence parentheses gets linked.""" + result = _linkify_bare_urls("from the OA (www.openaddresses.io) project") + assert "[www.openaddresses.io](https://www.openaddresses.io)" in result + + def test_trailing_period_excluded(self) -> None: + """Trailing sentence punctuation is not part of the URL.""" + assert ( + _linkify_bare_urls("found on https://www.wikidata.org/.") + == "found on [https://www.wikidata.org/](https://www.wikidata.org/)." + ) + + def test_trailing_comma_excluded(self) -> None: + """Trailing comma is not part of the URL.""" + assert ( + _linkify_bare_urls("see https://example.com, and more") + == "see [https://example.com](https://example.com), and more" + ) + + def test_url_in_backtick_code_span_unchanged(self) -> None: + """URLs inside backtick code spans are not linkified.""" + text = "use `https://example.com` as the base" + assert _linkify_bare_urls(text) == text + + def test_url_in_double_backtick_code_span_unchanged(self) -> None: + """URLs inside double-backtick code spans are not linkified.""" + text = "use ``https://example.com/path`` as the base" + assert _linkify_bare_urls(text) == text + + def test_mixed_code_span_and_bare_url(self) -> None: + """Code-span URLs preserved while bare URLs are linkified.""" + text = "see `https://a.com` and https://b.com" + result = _linkify_bare_urls(text) + assert "`https://a.com`" in result + assert "[https://b.com](https://b.com)" in result + + +class TestRenderFeatureBasic: + """Tests for render_feature with basic models.""" + + def test_renders_title_from_model_name(self) -> None: + """Should render model name as H1 title.""" + spec = extract_model(SimpleModel) + result = render_feature(spec) + + assert "# SimpleModel" in result + + def test_renders_description_from_docstring(self) -> None: + """Should render model docstring as description.""" + + class DescribedModel(BaseModel): + """This is the model description.""" + + value: int + + spec = extract_model(DescribedModel) + result = render_feature(spec) + + assert "This is the model description." in result + + def test_renders_fields_section(self) -> None: + """Should include Fields section header.""" + + class ModelWithField(BaseModel): + """Model with a field.""" + + name: str + + spec = extract_model(ModelWithField) + result = render_feature(spec) + + assert "## Fields" in result + + def test_renders_field_table_header(self) -> None: + """Should render field table with proper headers.""" + + class ModelWithField(BaseModel): + """Model with a field.""" + + name: str + + spec = extract_model(ModelWithField) + result = render_feature(spec) + + assert "| Name | Type | Description |" in result + assert "| -----: | :----: | ------------- |" in result + + +class TestRenderFeatureFieldTable: + """Tests for field table rendering.""" + + def test_renders_required_field(self) -> None: + """Should render required field without (optional) suffix.""" + + class ModelWithRequired(BaseModel): + """Model with required field.""" + + name: str = Field(description="The name") + + spec = extract_model(ModelWithRequired) + result = render_feature(spec) + + # Should have backtick-quoted field name + assert "| `name` |" in result + # Type should be string without optional + assert "| `string` |" in result or "string" in result + # Description should be present + assert "The name" in result + + def test_renders_optional_field(self) -> None: + """Should render optional field with (optional) suffix.""" + + class ModelWithOptional(BaseModel): + """Model with optional field.""" + + nickname: str | None = Field(None, description="Optional nickname") + + spec = extract_model(ModelWithOptional) + result = render_feature(spec) + + assert "| `nickname` |" in result + assert "(optional)" in result + assert "Optional nickname" in result + + def test_renders_typed_fields(self) -> None: + """Should render field types correctly.""" + + class ModelWithTypes(BaseModel): + """Model with various types.""" + + count: int + price: float + active: bool + + spec = extract_model(ModelWithTypes) + result = render_feature(spec) + + # Check that fields are present (exact type format may vary) + assert "`count`" in result + assert "`price`" in result + assert "`active`" in result + + def test_multiline_description_sanitized_in_table(self) -> None: + """Multiline field description rendered with
in table cell.""" + + class ModelWithMultilineDesc(BaseModel): + """Model.""" + + name: str = Field(description="First line.\n\nSecond paragraph.") + + spec = extract_model(ModelWithMultilineDesc) + result = render_feature(spec) + + assert "First line.

Second paragraph." in result + # The table should not be broken by a blank line + lines = result.splitlines() + table_start = next(i for i, line in enumerate(lines) if "| Name |" in line) + for i in range(table_start, len(lines)): + if lines[i].strip() == "": + break + assert lines[i].startswith("|"), f"Table broken at line {i}: {lines[i]}" + + +class TestRenderFeatureWithThemeType: + """Tests for rendering Feature-like models with theme/type.""" + + def test_renders_theme_and_type_fields(self) -> None: + """Should render theme and type as Literal fields.""" + + class Place(FeatureBase[Literal["places"], Literal["place"]]): + """A place feature.""" + + name: str + + spec = extract_model(Place) + result = render_feature(spec) + + # Theme and type should appear somewhere in output + assert "places" in result + assert "place" in result + + +class TestRenderFeatureLiteralField: + """Tests for rendering Literal-typed fields.""" + + def test_literal_field_renders_as_quoted_value(self) -> None: + """Literal field should render as quoted string in backticks.""" + + class TestFeature(FeatureBase[Literal["test_theme"], Literal["test_type"]]): + """Test feature.""" + + name: str + + spec = extract_model(TestFeature) + result = render_feature(spec) + + assert '| `"test_theme"` |' in result + assert '| `"test_type"` |' in result + + +class TestRenderFeatureNewTypeDisplay: + """Tests for NewType rendering in Markdown.""" + + def test_newtype_wrapping_list_renders_name_with_list_qualifier( + self, + ) -> None: + """NewType wrapping a list renders as name with (list, optional).""" + + class Item(BaseModel): + value: str + + TestSources = NewType( + "TestSources", Annotated[list[Item], UniqueItemsConstraint()] + ) + + class ModelWithSources(BaseModel): + """Model with sources.""" + + sources: TestSources | None = None + + spec = extract_model(ModelWithSources) + expand_model_tree(spec) + result = render_feature(spec) + + assert "`TestSources`" in result + assert "(list, optional)" in result + + def test_hex_color_renders_as_newtype_name(self) -> None: + """HexColor (unregistered NewType) renders as code-formatted name.""" + + class ModelWithColor(BaseModel): + """Model with color.""" + + color: HexColor | None = None + + spec = extract_model(ModelWithColor) + result = render_feature(spec) + + assert "`HexColor`" in result + assert "(optional)" in result + + def test_registered_primitive_renders_through_registry(self) -> None: + """Registered primitive (int32) renders via registry, not as NewType link.""" + + class ModelWithCount(BaseModel): + """Model with count.""" + + count: int32 + + spec = extract_model(ModelWithCount) + result = render_feature(spec) + + assert "| `int32` |" in result + # Should NOT be linked + assert "](int32.md)" not in result + + def test_plain_str_renders_as_string(self) -> None: + """Plain str field renders as 'string'.""" + + class ModelWithName(BaseModel): + """Model with name.""" + + name: str + + spec = extract_model(ModelWithName) + result = render_feature(spec) + + assert "| `string` |" in result + + def test_enum_renders_as_code_without_context(self) -> None: + """Enum fields render as inline code without LinkContext.""" + + class Status(str, Enum): + ACTIVE = "active" + + class ModelWithEnum(BaseModel): + """Model with enum.""" + + status: Status + + spec = extract_model(ModelWithEnum) + result = render_feature(spec) + + assert "| `Status` |" in result + + def test_model_field_renders_as_code_without_context(self) -> None: + """BaseModel field renders as inline code without LinkContext.""" + + class Inner(BaseModel): + value: str + + class Outer(BaseModel): + """Model with nested model.""" + + inner: Inner + + spec = extract_model(Outer) + expand_model_tree(spec) + result = render_feature(spec) + + assert "| `Inner` |" in result + + +class TestRenderFeatureInlineExpansion: + """Tests for inline expansion of nested model fields.""" + + def test_direct_model_fields_expanded_with_dot_prefix(self) -> None: + """Direct model field expands sub-fields with dot notation.""" + spec = extract_model(FeatureWithAddress) + expand_model_tree(spec) + result = render_feature(spec) + + assert "| `address.street` |" in result + assert "| `address.city` |" in result + assert "| `address.zip_code` |" in result + + def test_list_of_model_fields_expanded_with_bracket_dot_prefix(self) -> None: + """List-of-model field expands sub-fields with []. notation.""" + spec = extract_model(FeatureWithSources) + expand_model_tree(spec) + result = render_feature(spec) + + assert "| `sources[]` |" in result + assert "| `sources[].dataset` |" in result + + def test_cycle_detection_prevents_infinite_recursion(self) -> None: + """Recursive model emits parent row but does not recurse.""" + spec = extract_model(TreeNode) + expand_model_tree(spec) + result = render_feature(spec) + + # The parent field row appears + assert "| `parent` |" in result + # But no recursion into parent.label + assert "parent.label" not in result + + def test_primitive_field_unchanged(self) -> None: + """Primitive fields produce a single row without expansion.""" + spec = extract_model(SimpleModel) + result = render_feature(spec) + + lines = [line for line in result.splitlines() if "| `name` |" in line] + assert len(lines) == 1 + + def test_parent_row_preserved_before_expansion(self) -> None: + """The parent field row still appears before expanded sub-fields.""" + spec = extract_model(FeatureWithAddress) + expand_model_tree(spec) + result = render_feature(spec) + + # Parent row for 'address' itself appears + assert "| `address` |" in result + # And it appears before the expanded fields + lines = result.splitlines() + address_line = next( + i for i, line in enumerate(lines) if "| `address` |" in line + ) + street_line = next( + i for i, line in enumerate(lines) if "| `address.street` |" in line + ) + assert address_line < street_line + + +class TestRenderFeatureConstraints: + """Tests for model-level constraint rendering in feature pages.""" + + def test_venue_has_constraints_section(self) -> None: + """Venue's @require_any_of renders as a Constraints section.""" + spec = extract_model(Venue) + result = render_feature(spec) + + assert "## Constraints" in result + assert "At least one of `name`, `description` must be set" in result + + def test_constraints_section_between_fields_and_examples(self) -> None: + """Constraints section appears after Fields, before Examples.""" + spec = extract_model(Venue) + examples = [ExampleRecord(rows=[("name", "test")])] + result = render_feature(spec, examples=examples) + + lines = result.splitlines() + fields_line = next(i for i, line in enumerate(lines) if "## Fields" in line) + constraints_line = next( + i for i, line in enumerate(lines) if "## Constraints" in line + ) + examples_line = next(i for i, line in enumerate(lines) if "## Examples" in line) + + assert fields_line < constraints_line < examples_line + + def test_no_constraints_section_without_constraints(self) -> None: + """Models without model-level constraints omit Constraints section.""" + + class Plain(BaseModel): + """Plain model.""" + + name: str + + spec = extract_model(Plain) + result = render_feature(spec) + + assert "## Constraints" not in result + + def test_no_constraints_section_with_only_no_extra_fields(self) -> None: + """Model with only @no_extra_fields omits Constraints section.""" + + @no_extra_fields + class Strict(BaseModel): + """Strict model.""" + + name: str + + spec = extract_model(Strict) + result = render_feature(spec) + + assert "## Constraints" not in result + + +class TestRenderFeatureConstraintNotes: + """Tests for inline constraint notes in field description cells.""" + + def test_venue_name_field_includes_constraint_note(self) -> None: + """Venue's name field description cell includes constraint note in italics.""" + spec = extract_model(Venue) + result = render_feature(spec) + + # Find the row for 'name' field + lines = result.splitlines() + name_line = next(line for line in lines if "| `name` |" in line) + assert "Venue name" in name_line + assert "*At least one of `name`, `description` must be set*" in name_line + assert "
" in name_line + + def test_field_with_no_description_gets_constraint_note(self) -> None: + """Field with no existing description still gets the constraint note.""" + spec = extract_model(Venue) + result = render_feature(spec) + + # description field on Venue has no Field(description=...) + lines = result.splitlines() + desc_line = next(line for line in lines if "| `description` |" in line) + assert "*At least one of `name`, `description` must be set*" in desc_line + + +class TestRenderFeatureFieldConstraints: + """Tests for field-level constraint annotation from TypeInfo.""" + + def test_venue_geometry_shows_allowed_types(self) -> None: + """Venue's geometry field shows GeometryTypeConstraint as a note.""" + spec = extract_model(Venue) + expand_model_tree(spec) + result = render_feature(spec) + + lines = result.splitlines() + geo_line = next(line for line in lines if "| `geometry` |" in line) + assert "*Allowed geometry types: Point, Polygon*" in geo_line + + def test_venue_reference_links_when_context_available(self) -> None: + """Reference constraint links the target type when LinkContext has the page.""" + spec = extract_model(Venue) + expand_model_tree(spec) + ctx = LinkContext( + page_path=PurePosixPath("music/venue.md"), + registry={"Instrument": PurePosixPath("music/instrument.md")}, + ) + result = render_feature(spec, link_ctx=ctx) + + lines = result.splitlines() + ref_line = next(line for line in lines if "| `resident_ensemble` |" in line) + assert "[`Instrument`](instrument.md)" in ref_line + assert "belongs to" in ref_line + + def test_venue_reference_unlinked_without_context(self) -> None: + """Reference constraint renders as plain code when no LinkContext.""" + spec = extract_model(Venue) + expand_model_tree(spec) + result = render_feature(spec) + + lines = result.splitlines() + ref_line = next(line for line in lines if "| `resident_ensemble` |" in line) + assert "References `Instrument`" in ref_line + assert "belongs to" in ref_line + + +class TestRenderEnumBasic: + """Tests for render_enum with simple enums.""" + + def test_renders_title_from_enum_name(self) -> None: + """Should render enum name as H1 title.""" + result = render_enum(_ROOF_SHAPE_SPEC) + + assert "# RoofShape" in result + + def test_renders_description_from_docstring(self) -> None: + """Should render enum docstring as description.""" + result = render_enum(_ROOF_SHAPE_SPEC) + + assert "The shape of the roof." in result + + def test_renders_values_section(self) -> None: + """Should include Values section header.""" + result = render_enum(_ROOF_SHAPE_SPEC) + + assert "## Values" in result + + def test_renders_values_as_bullet_list(self) -> None: + """Should render each value as a bullet point.""" + spec = EnumSpec( + name="RoofShape", + description="The shape of the roof.", + members=[ + EnumMemberSpec(name="FLAT", value="flat", description=None), + EnumMemberSpec(name="GABLED", value="gabled", description=None), + EnumMemberSpec(name="DOME", value="dome", description=None), + ], + ) + + result = render_enum(spec) + + assert "- `flat`" in result + assert "- `gabled`" in result + assert "- `dome`" in result + + +class TestRenderEnumDocumented: + """Tests for render_enum with DocumentedEnum (per-value descriptions).""" + + def test_renders_member_descriptions(self) -> None: + """Should render per-value descriptions after the value.""" + spec = EnumSpec( + name="Side", + description="The side on which something appears.", + members=[ + EnumMemberSpec( + name="LEFT", value="left", description="On the left side" + ), + EnumMemberSpec( + name="RIGHT", value="right", description="On the right side" + ), + ], + ) + + result = render_enum(spec) + + assert "- `left` - On the left side" in result + assert "- `right` - On the right side" in result + + def test_renders_mixed_documented_undocumented(self) -> None: + """Should handle mix of documented and undocumented members.""" + spec = EnumSpec( + name="ConnectionState", + description="Connection states.", + members=[ + EnumMemberSpec(name="CONNECTED", value="connected", description=None), + EnumMemberSpec( + name="QUIESCING", + value="quiescing", + description="Gracefully shutting down", + ), + ], + ) + + result = render_enum(spec) + + # Undocumented: just the value + assert "- `connected`" in result + # Documented: value + description + assert "- `quiescing` - Gracefully shutting down" in result + + +class TestRenderEnumNoDescription: + """Tests for enums without class docstrings.""" + + def test_enum_without_description(self) -> None: + """Should render enum without description section when None.""" + spec = EnumSpec( + name="SimpleEnum", + description=None, + members=[ + EnumMemberSpec(name="A", value="a", description=None), + EnumMemberSpec(name="B", value="b", description=None), + ], + ) + + result = render_enum(spec) + + # Should still have title and values + assert "# SimpleEnum" in result + assert "## Values" in result + assert "- `a`" in result + assert "- `b`" in result + # Should not have empty lines where description would be + lines = result.strip().split("\n") + # Title should be followed by blank line then Values header + assert lines[0] == "# SimpleEnum" + + +class TestRenderNewType: + """Tests for render_newtype.""" + + def test_renders_title(self) -> None: + """Should render NewType name as H1 title.""" + spec = extract_newtype(HexColor) + result = render_newtype(spec) + + assert "# HexColor" in result + + def test_renders_underlying_type(self) -> None: + """Should show the resolved underlying type below the description.""" + spec = extract_newtype(HexColor) + result = render_newtype(spec) + + assert "# HexColor\n" in result + assert "Underlying type: `string`" in result + + def test_renders_constraints(self) -> None: + """Should render constraints section with description and pattern.""" + spec = extract_newtype(HexColor) + result = render_newtype(spec) + + assert "## Constraints" in result + assert "Allows only hexadecimal color codes" in result + assert "`HexColorConstraint`" in result + assert "pattern:" in result + + def test_renders_id_with_provenance_without_link(self) -> None: + """Id page shows constraints without provenance links when no context.""" + spec = extract_newtype(Id) + result = render_newtype(spec) + + assert "# Id" in result + assert "NoWhitespaceConstraint" in result + # No link without LinkContext + assert "no_whitespace_string.md" not in result + + def test_builtin_underlying_type_not_linked(self) -> None: + """Built-in underlying type (string) stays in plain backticks.""" + spec = extract_newtype(HexColor) + result = render_newtype(spec) + + assert "Underlying type: `string`" in result + + def test_list_model_underlying_type_without_context(self) -> None: + """List-of-model underlying type renders without link when no context.""" + spec = extract_newtype(Sources) + result = render_newtype(spec) + + assert "Underlying type: `list`" in result + + def test_dict_underlying_types_without_context(self) -> None: + """Dict key/value NewTypes render without links when no context.""" + spec = extract_newtype(CommonNames) + result = render_newtype(spec) + + assert "map" in result + + +class TestPlacementAwareLinks: + """Tests for rendering with LinkContext for cross-directory links.""" + + def test_feature_links_to_shared_type_via_registry(self) -> None: + """Feature in theme subdir links to shared type in types/ dir.""" + + class ModelWithColor(BaseModel): + """Model with color.""" + + color: HexColor | None = None + + spec = extract_model(ModelWithColor) + page_path = PurePosixPath("buildings/building/building.md") + registry = { + "HexColor": PurePosixPath("types/strings/hex_color.md"), + } + ctx = LinkContext(page_path, registry) + + result = render_feature(spec, link_ctx=ctx) + + assert "[`HexColor`](../../types/strings/hex_color.md)" in result + + def test_feature_links_to_theme_level_type(self) -> None: + """Feature in subdir links to type at theme level.""" + + class RoofShape(str, Enum): + FLAT = "flat" + + class ModelWithRoof(BaseModel): + """Model with roof.""" + + roof: RoofShape + + spec = extract_model(ModelWithRoof) + page_path = PurePosixPath("buildings/building/building.md") + registry = { + "RoofShape": PurePosixPath("buildings/roof_shape.md"), + } + ctx = LinkContext(page_path, registry) + + result = render_feature(spec, link_ctx=ctx) + + assert "[`RoofShape`](../roof_shape.md)" in result + + def test_feature_links_to_sibling_in_same_subdir(self) -> None: + """Feature links to type in its own subdirectory.""" + + class BuildingClass(str, Enum): + RESIDENTIAL = "residential" + + class ModelWithClass(BaseModel): + """Model.""" + + building_class: BuildingClass + + spec = extract_model(ModelWithClass) + page_path = PurePosixPath("buildings/building/building.md") + registry = { + "BuildingClass": PurePosixPath("buildings/building/building_class.md"), + } + ctx = LinkContext(page_path, registry) + + result = render_feature(spec, link_ctx=ctx) + + assert "[`BuildingClass`](building_class.md)" in result + + def test_without_context_renders_as_code(self) -> None: + """Without LinkContext, types render as inline code (no link).""" + + class ModelWithColor(BaseModel): + """Model with color.""" + + color: HexColor | None = None + + spec = extract_model(ModelWithColor) + result = render_feature(spec) + + assert "`HexColor`" in result + assert "hex_color.md" not in result + + def test_newtype_underlying_type_linked_via_registry(self) -> None: + """NewType header links underlying model type through placement registry.""" + spec = extract_newtype(Sources) + page_path = PurePosixPath("types/references/sources.md") + registry = { + "SourceItem": PurePosixPath("types/references/source_item.md"), + } + ctx = LinkContext(page_path, registry) + + result = render_newtype(spec, link_ctx=ctx) + + assert "[`SourceItem`](source_item.md)" in result + + def test_newtype_underlying_type_not_linked_when_absent(self) -> None: + """Underlying type stays backtick-only when missing from registry.""" + spec = extract_newtype(Sources) + page_path = PurePosixPath("types/references/sources.md") + registry: dict[str, PurePosixPath] = {} + ctx = LinkContext(page_path, registry) + + result = render_newtype(spec, link_ctx=ctx) + + assert "`list`" in result + assert "[`SourceItem`]" not in result + + def test_newtype_provenance_link_uses_registry(self) -> None: + """NewType provenance links resolve through placement registry.""" + spec = extract_newtype(Id) + page_path = PurePosixPath("types/references/id.md") + registry = { + "NoWhitespaceString": PurePosixPath( + "types/strings/no_whitespace_string.md" + ), + } + ctx = LinkContext(page_path, registry) + + result = render_newtype(spec, link_ctx=ctx) + + assert "../strings/no_whitespace_string.md" in result + + +class TestFormatExampleValue: + """Tests for _format_example_value.""" + + def test_none_renders_as_null(self) -> None: + """None renders as backtick-quoted null.""" + + assert _format_example_value(None) == "`null`" + + def test_string_null_renders_with_backticks(self) -> None: + """String 'null' renders as a backtick-wrapped string.""" + + assert _format_example_value("null") == "`null`" + + def test_bool_true_renders_lowercase(self) -> None: + """Boolean True renders as backtick-quoted lowercase true.""" + + assert _format_example_value(True) == "`true`" + + def test_bool_false_renders_lowercase(self) -> None: + """Boolean False renders as backtick-quoted lowercase false.""" + + assert _format_example_value(False) == "`false`" + + def test_empty_string_renders_empty(self) -> None: + """Empty string renders as empty string.""" + + assert _format_example_value("") == "" + + def test_short_string_has_backticks(self) -> None: + """Non-empty strings render with backticks.""" + + assert _format_example_value("OpenStreetMap") == "`OpenStreetMap`" + + def test_long_string_truncated(self) -> None: + """Strings longer than 100 chars are truncated with ellipsis.""" + + long = "x" * 150 + result = _format_example_value(long) + assert result == f"`{'x' * 100}...`" + + def test_integer_has_backticks(self) -> None: + """Integers render with backticks.""" + + assert _format_example_value(42) == "`42`" + assert _format_example_value(0) == "`0`" + assert _format_example_value(-17) == "`-17`" + + def test_float_has_backticks(self) -> None: + """Floats render with backticks.""" + + assert _format_example_value(3.14) == "`3.14`" + assert _format_example_value(-2.5) == "`-2.5`" + + def test_list_renders_comma_separated(self) -> None: + """Lists render as backtick-wrapped comma-separated values.""" + + assert _format_example_value([1, 2, 3]) == "`[1, 2, 3]`" + assert _format_example_value(["a", "b"]) == "`[a, b]`" + assert _format_example_value([]) == "`[]`" + + def test_pipe_character_not_escaped_in_backticks(self) -> None: + """Pipe characters need no escaping inside backticks.""" + + assert _format_example_value("foo|bar") == "`foo|bar`" + assert _format_example_value("a|b|c") == "`a|b|c`" + + +class TestRenderFeatureWithExamples: + """Tests for render_feature with examples support.""" + + def test_accepts_examples_parameter(self) -> None: + """render_feature accepts examples parameter.""" + spec = extract_model(SimpleModel) + examples = [ExampleRecord(rows=[("name", "test")])] + + # Should not raise + result = render_feature(spec, examples=examples) + assert "# SimpleModel" in result + + def test_renders_single_example_without_heading(self) -> None: + """Single example renders without 'Example 1' heading.""" + + class ModelWithCount(BaseModel): + """A simple model.""" + + name: str + count: int + + spec = extract_model(ModelWithCount) + examples = [ExampleRecord(rows=[("name", "test"), ("count", 42)])] + + result = render_feature(spec, examples=examples) + assert "## Examples" in result + assert "| Column | Value |" in result + assert "| `name` | `test` |" in result + assert "| `count` | `42` |" in result + # Should NOT have "Example 1" heading + assert "### Example 1" not in result + + def test_renders_multiple_examples_with_headings(self) -> None: + """Multiple examples render with 'Example N' headings.""" + spec = extract_model(SimpleModel) + examples = [ + ExampleRecord(rows=[("name", "first")]), + ExampleRecord(rows=[("name", "second")]), + ] + + result = render_feature(spec, examples=examples) + assert "## Examples" in result + assert "### Example 1" in result + assert "### Example 2" in result + assert "| `name` | `first` |" in result + assert "| `name` | `second` |" in result + + def test_formats_example_values(self) -> None: + """Example values are formatted using _format_example_value.""" + + class TestModel(BaseModel): + """Test model.""" + + text: str + count: int + active: bool + optional: str | None + + spec = extract_model(TestModel) + examples = [ + ExampleRecord( + rows=[ + ("text", "hello"), + ("count", 42), + ("active", True), + ("optional", None), + ] + ) + ] + + result = render_feature(spec, examples=examples) + # String with backticks + assert "| `text` | `hello` |" in result + # Number with backticks + assert "| `count` | `42` |" in result + # Boolean with backticks, lowercase + assert "| `active` | `true` |" in result + # None as null + assert "| `optional` | `null` |" in result + + def test_no_examples_omits_section(self) -> None: + """When examples is None, Examples section is not rendered.""" + spec = extract_model(SimpleModel) + result = render_feature(spec, examples=None) + + assert "## Examples" not in result + + def test_empty_examples_list_omits_section(self) -> None: + """When examples is empty list, Examples section is not rendered.""" + spec = extract_model(SimpleModel) + result = render_feature(spec, examples=[]) + + assert "## Examples" not in result + + +class TestRenderPrimitivesPage: + """Tests for the aggregate primitives page.""" + + def test_contains_title(self, primitives_markdown: str) -> None: + assert "# Primitive Types" in primitives_markdown + + def test_contains_signed_integers(self, primitives_markdown: str) -> None: + assert "| `int8` |" in primitives_markdown + assert "| `int16` |" in primitives_markdown + assert "| `int32` |" in primitives_markdown + assert "| `int64` |" in primitives_markdown + + def test_contains_unsigned_integers(self, primitives_markdown: str) -> None: + assert "| `uint8` |" in primitives_markdown + assert "| `uint16` |" in primitives_markdown + assert "| `uint32` |" in primitives_markdown + + def test_contains_floats(self, primitives_markdown: str) -> None: + assert "| `float32` |" in primitives_markdown + assert "| `float64` |" in primitives_markdown + + def test_ranges_match_schema_constraints(self, primitives_markdown: str) -> None: + """Range strings derive from ge/le constraints in the schema.""" + assert "-128 to 127" in primitives_markdown + assert "-32,768 to 32,767" in primitives_markdown + assert "-2,147,483,648 to 2,147,483,647" in primitives_markdown + assert "-2^63 to 2^63-1" in primitives_markdown + assert "0 to 255" in primitives_markdown + assert "0 to 65,535" in primitives_markdown + assert "0 to 4,294,967,295" in primitives_markdown + + def test_descriptions_from_docstrings(self, primitives_markdown: str) -> None: + """Descriptions derive from first line of NewType docstrings.""" + assert "Portable 8-bit signed integer." in primitives_markdown + assert "Portable 16-bit unsigned integer." in primitives_markdown + assert "Portable IEEE 32-bit floating point number." in primitives_markdown + + def test_float_precision(self, primitives_markdown: str) -> None: + """Float entries show IEEE 754 precision.""" + assert "~7 decimal digits" in primitives_markdown + assert "~15 decimal digits" in primitives_markdown + + def test_pipe_in_description_escaped(self) -> None: + """Pipe characters in primitive descriptions are escaped.""" + specs = [ + PrimitiveSpec( + name="int8", + description="Range: -128 | 127", + bounds=Interval(ge=-128, le=127), + ), + ] + result = render_primitives_from_specs(specs) + assert "Range: -128 \\| 127" in result + + +class TestRenderGeometryPage: + """Tests for the aggregate geometry page.""" + + def test_contains_title(self, geometry_markdown: str) -> None: + assert "# Geometry Types" in geometry_markdown + + def test_contains_geometry_types(self, geometry_markdown: str) -> None: + assert "Geometry" in geometry_markdown + assert "BBox" in geometry_markdown + assert "GeometryType" in geometry_markdown + + def test_lists_geometry_type_values(self, geometry_markdown: str) -> None: + assert "`point`" in geometry_markdown or "`POINT`" in geometry_markdown + + +class TestRenderUnionTemplate: + """Tests for UnionSpec template rendering with synthetic specs.""" + + def test_shared_fields_have_no_variant_tag(self) -> None: + """Shared fields render without variant annotation.""" + spec = make_union_spec( + description="A test union.", + annotated_fields=[ + AnnotatedField( + field_spec=FieldSpec( + name="id", + type_info=STR_TYPE, + description="ID", + is_required=True, + ), + variant_sources=None, + ), + ], + ) + result = render_feature(spec) + assert "| `id` |" in result + assert "*(" not in result # no variant tag + + def test_variant_fields_have_inline_tag(self) -> None: + """Variant-specific fields get *(Variant)* tag.""" + spec = make_union_spec( + name="Segment", + annotated_fields=[ + AnnotatedField( + field_spec=FieldSpec( + name="speed_limit", + type_info=STR_TYPE, + description=None, + is_required=False, + ), + variant_sources=("RoadSegment",), + ), + ], + ) + result = render_feature(spec) + assert "| `speed_limit` *(Road)* |" in result + + +class TestFormatConstraintDisplay: + """Tests for FieldConstraint display with on-demand description/pattern extraction.""" + + def test_description_and_pattern(self) -> None: + """Constraint with docstring and pattern renders both.""" + cs = ConstraintSource(source=None, constraint=CountryCodeAlpha2Constraint()) + result = _format_constraint(cs, "CountryCodeAlpha2") + assert "Allows only ISO 3166-1 alpha-2 country codes." in result.display + assert "`CountryCodeAlpha2Constraint`" in result.display + assert "pattern: `^[A-Z]{2}$`" in result.display + + def test_description_without_pattern(self) -> None: + """Constraint with docstring but no pattern renders description only.""" + cs = ConstraintSource(source=None, constraint=JsonPointerConstraint()) + result = _format_constraint(cs, "JsonPointer") + assert "Allows only valid JSON Pointer values (RFC 6901)." in result.display + assert "`JsonPointerConstraint`" in result.display + assert "pattern" not in result.display + + def test_no_description_falls_through(self) -> None: + """Plain string metadata has no docstring and falls through.""" + cs = ConstraintSource(source=None, constraint="plain string metadata") + result = _format_constraint(cs, "SomeType") + assert result.display == "`plain string metadata`" + + def test_annotated_types_uses_operator_notation_not_docstring(self) -> None: + """annotated-types constraints use operator notation, not their __doc__.""" + cs = ConstraintSource(source=None, constraint=Ge(ge=0)) + result = _format_constraint(cs, "SomeType") + assert result.display == "`≥ 0`" + assert "Ge(ge=x)" not in result.display + + def test_constraint_class_not_linked(self) -> None: + """Constraint class name stays in backticks (no pages generated for constraints).""" + cs = ConstraintSource(source=None, constraint=CountryCodeAlpha2Constraint()) + result = _format_constraint(cs, "CountryCodeAlpha2") + assert "`CountryCodeAlpha2Constraint`" in result.display + assert "[`CountryCodeAlpha2Constraint`](" not in result.display + + +def _feature_spec() -> object: + return extract_model(SimpleModel) + + +def _enum_spec() -> object: + return _ROOF_SHAPE_SPEC + + +def _newtype_spec() -> object: + return extract_newtype(HexColor) + + +_USED_BY_CASES = [ + pytest.param(_feature_spec, render_feature, id="feature"), + pytest.param(_enum_spec, render_enum, id="enum"), + pytest.param(_newtype_spec, render_newtype, id="newtype"), +] + + +class TestUsedByRendering: + """Tests for rendering 'Used By' section across all render functions.""" + + @pytest.mark.parametrize(("spec_factory", "render_fn"), _USED_BY_CASES) + def test_entries_render_without_links_when_no_context( + self, + spec_factory: Callable[[], object], + render_fn: Callable[..., str], + ) -> None: + """Without LinkContext, 'Used By' entries render as inline code.""" + used_by = [ + UsedByEntry(name="Building", kind=UsedByKind.MODEL), + UsedByEntry(name="BuildingId", kind=UsedByKind.NEWTYPE), + ] + + result = render_fn(spec_factory(), used_by=used_by) + + assert "## Used By" in result + assert "- `Building`" in result + assert "- `BuildingId`" in result + + @pytest.mark.parametrize( + ("spec_factory", "render_fn", "page_path", "expected_link"), + [ + pytest.param( + _feature_spec, + render_feature, + PurePosixPath("types/strings/hex_color.md"), + "../../buildings/building/building.md", + id="feature", + ), + pytest.param( + _enum_spec, + render_enum, + PurePosixPath("buildings/roof_shape.md"), + "building/building.md", + id="enum", + ), + pytest.param( + _newtype_spec, + render_newtype, + PurePosixPath("types/strings/hex_color.md"), + "../../buildings/building/building.md", + id="newtype", + ), + ], + ) + def test_link_context_uses_registry( + self, + spec_factory: Callable[[], object], + render_fn: Callable[..., str], + page_path: PurePosixPath, + expected_link: str, + ) -> None: + """Used-by entries resolve links through placement registry.""" + registry = { + "Building": PurePosixPath("buildings/building/building.md"), + } + ctx = LinkContext(page_path, registry) + used_by = [UsedByEntry(name="Building", kind=UsedByKind.MODEL)] + + result = render_fn(spec_factory(), link_ctx=ctx, used_by=used_by) + + assert "## Used By" in result + assert f"[`Building`]({expected_link})" in result + + @pytest.mark.parametrize(("spec_factory", "render_fn"), _USED_BY_CASES) + def test_no_used_by_omits_section( + self, + spec_factory: Callable[[], object], + render_fn: Callable[..., str], + ) -> None: + """When used_by is None, 'Used By' section is not rendered.""" + result = render_fn(spec_factory(), used_by=None) + + assert "## Used By" not in result + + @pytest.mark.parametrize(("spec_factory", "render_fn"), _USED_BY_CASES) + def test_empty_used_by_omits_section( + self, + spec_factory: Callable[[], object], + render_fn: Callable[..., str], + ) -> None: + """When used_by is empty list, 'Used By' section is not rendered.""" + result = render_fn(spec_factory(), used_by=[]) + + assert "## Used By" not in result diff --git a/packages/overture-schema-codegen/tests/test_markdown_type_format.py b/packages/overture-schema-codegen/tests/test_markdown_type_format.py new file mode 100644 index 000000000..7692d06b1 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_markdown_type_format.py @@ -0,0 +1,181 @@ +"""Tests for markdown type formatting.""" + +from enum import Enum +from pathlib import PurePosixPath +from typing import Literal, NewType + +from overture.schema.codegen.link_computation import LinkContext +from overture.schema.codegen.markdown_type_format import ( + format_dict_type, + format_type, + format_underlying_type, +) +from overture.schema.codegen.specs import FieldSpec +from overture.schema.codegen.type_analyzer import TypeInfo, TypeKind, analyze_type +from overture.schema.system.primitive import int32 +from pydantic import BaseModel + + +class _ModelA(BaseModel): + x: int + + +class _ModelB(BaseModel): + y: str + + +class TestFormatType: + """Tests for format_type.""" + + def test_plain_str_renders_as_string(self) -> None: + ti = analyze_type(str) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + assert format_type(field) == "`string`" + + def test_optional_adds_qualifier(self) -> None: + ti = analyze_type(str | None) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=False) + assert format_type(field) == "`string` (optional)" + + def test_literal_renders_as_quoted_value(self) -> None: + ti = analyze_type(Literal["places"]) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + assert format_type(field) == '`"places"`' + + def test_enum_without_context_renders_as_code(self) -> None: + class Color(str, Enum): + RED = "red" + + ti = analyze_type(Color) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + assert format_type(field) == "`Color`" + + def test_enum_with_link_context(self) -> None: + class Color(str, Enum): + RED = "red" + + ti = analyze_type(Color) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + ctx = LinkContext( + page_path=PurePosixPath("buildings/building/building.md"), + registry={"Color": PurePosixPath("types/enums/color.md")}, + ) + assert format_type(field, ctx) == "[`Color`](../../types/enums/color.md)" + + def test_list_of_primitives(self) -> None: + ti = analyze_type(list[str]) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + assert format_type(field) == "`list`" + + def test_registered_primitive_not_linked(self) -> None: + ti = analyze_type(int32) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + result = format_type(field) + assert result == "`int32`" + assert "](int32.md)" not in result + + +class TestFormatDictType: + """Tests for format_dict_type.""" + + def test_simple_dict_renders_as_map(self) -> None: + ti = analyze_type(dict[str, int]) + result = format_dict_type(ti) + assert result == "map" + + def test_dict_with_newtype_shows_semantic_name(self) -> None: + MyKey = NewType("MyKey", str) + ti = analyze_type(dict[MyKey, int]) + result = format_dict_type(ti) + assert result == "map" + + +def _make_union_field(ti: TypeInfo, *, is_required: bool = True) -> FieldSpec: + """Build a FieldSpec wrapping a union TypeInfo for test convenience.""" + return FieldSpec(name="x", type_info=ti, description=None, is_required=is_required) + + +class TestFormatUnionType: + """Tests for UNION-kind TypeInfo in format_type.""" + + def test_union_renders_all_members(self) -> None: + ti = analyze_type(_ModelA | _ModelB) + result = format_type(_make_union_field(ti)) + assert "`_ModelA`" in result + assert "`_ModelB`" in result + # Pipe separator escaped for table cells + assert r"\|" in result + + def test_union_with_link_context_links_each_member(self) -> None: + ti = analyze_type(_ModelA | _ModelB) + ctx = LinkContext( + page_path=PurePosixPath("theme/feature/feature.md"), + registry={ + "_ModelA": PurePosixPath("theme/feature/types/model_a.md"), + "_ModelB": PurePosixPath("theme/feature/types/model_b.md"), + }, + ) + result = format_type(_make_union_field(ti), ctx) + assert "[`_ModelA`](types/model_a.md)" in result + assert "[`_ModelB`](types/model_b.md)" in result + + def test_optional_union_adds_qualifier(self) -> None: + ti = analyze_type(_ModelA | _ModelB | None) + result = format_type(_make_union_field(ti, is_required=False)) + assert "(optional)" in result + assert "`_ModelA`" in result + assert "`_ModelB`" in result + + def test_list_of_union_adds_qualifier(self) -> None: + ti = TypeInfo( + base_type="_ModelA", + kind=TypeKind.UNION, + is_list=True, + union_members=(_ModelA, _ModelB), + ) + result = format_type(_make_union_field(ti)) + assert "(list)" in result + assert "`_ModelA`" in result + assert "`_ModelB`" in result + + def test_union_members_unlinked_without_context(self) -> None: + ti = analyze_type(_ModelA | _ModelB) + result = format_type(_make_union_field(ti)) + # No markdown links without context + assert "]()" not in result + assert "[`" not in result + + def test_union_partial_links(self) -> None: + """Members with pages get linked; members without don't.""" + ti = analyze_type(_ModelA | _ModelB) + ctx = LinkContext( + page_path=PurePosixPath("theme/feature/feature.md"), + registry={"_ModelA": PurePosixPath("theme/feature/types/model_a.md")}, + ) + result = format_type(_make_union_field(ti), ctx) + assert "[`_ModelA`](types/model_a.md)" in result + assert "`_ModelB`" in result + # _ModelB should NOT be linked + assert "[`_ModelB`]" not in result + + +class TestFormatUnderlyingUnionType: + """Tests for UNION-kind TypeInfo in format_underlying_type.""" + + def test_union_renders_all_members(self) -> None: + ti = analyze_type(_ModelA | _ModelB) + result = format_underlying_type(ti) + assert result == "`_ModelA` | `_ModelB`" + + def test_union_with_link_context(self) -> None: + ti = analyze_type(_ModelA | _ModelB) + ctx = LinkContext( + page_path=PurePosixPath("types/my_union.md"), + registry={ + "_ModelA": PurePosixPath("theme/feature/types/model_a.md"), + "_ModelB": PurePosixPath("theme/feature/types/model_b.md"), + }, + ) + result = format_underlying_type(ti, ctx) + assert "[`_ModelA`](../theme/feature/types/model_a.md)" in result + assert "[`_ModelB`](../theme/feature/types/model_b.md)" in result diff --git a/packages/overture-schema-codegen/tests/test_reverse_references.py b/packages/overture-schema-codegen/tests/test_reverse_references.py new file mode 100644 index 000000000..fdc33eb26 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_reverse_references.py @@ -0,0 +1,165 @@ +"""Tests for reverse reference computation.""" + +from typing import NewType + +import pytest +from codegen_test_support import ( + FeatureWithAddress, + Instrument, + RoadSegment, + TreeNode, + Venue, + make_union_spec, +) +from overture.schema.codegen.model_extraction import expand_model_tree, extract_model +from overture.schema.codegen.newtype_extraction import extract_newtype +from overture.schema.codegen.reverse_references import ( + UsedByKind, + compute_reverse_references, +) +from overture.schema.codegen.type_collection import collect_all_supplementary_types +from overture.schema.system.ref import Id +from overture.schema.system.string import NoWhitespaceString + + +@pytest.mark.parametrize( + ("model_class", "model_name", "target_name"), + [ + (Instrument, "Instrument", "InstrumentFamily"), + (Instrument, "Instrument", "HexColor"), + (FeatureWithAddress, "FeatureWithAddress", "Address"), + ], + ids=["enum", "newtype", "sub-model"], +) +def test_model_referencing_type_produces_used_by_entry( + model_class: type, + model_name: str, + target_name: str, +) -> None: + """Model referencing a type produces a 'used by' entry on that type.""" + model_spec = extract_model(model_class, entry_point=model_name) + expand_model_tree(model_spec) + all_specs = collect_all_supplementary_types([model_spec]) + + assert target_name in all_specs + + result = compute_reverse_references([model_spec], all_specs) + + assert target_name in result + entries = result[target_name] + assert len(entries) == 1 + assert entries[0].name == model_name + assert entries[0].kind == UsedByKind.MODEL + + +def test_newtype_inheriting_from_newtype_produces_used_by_entry() -> None: + """NewType inheriting constraints from another NewType produces a 'used by' entry.""" + # Id wraps NoWhitespaceString, which is also a NewType + # When we extract Id, its constraints include ConstraintSource(source="NoWhitespaceString", ...) + id_spec = extract_newtype(Id) + nws_spec = extract_newtype(NoWhitespaceString) + + all_specs = {"Id": id_spec, "NoWhitespaceString": nws_spec} + + result = compute_reverse_references([], all_specs) + + # NoWhitespaceString should have a used_by entry from Id + assert "NoWhitespaceString" in result + entries = result["NoWhitespaceString"] + assert len(entries) == 1 + assert entries[0].name == "Id" + assert entries[0].kind == UsedByKind.NEWTYPE + + +def test_union_members_have_used_by_entries() -> None: + """Union members have 'used by' entries pointing to the union feature.""" + # Create a union spec with RoadSegment as a member + union_spec = make_union_spec( + name="TestSegment", + description="Test segment union", + members=[RoadSegment], + entry_point="TestSegment", + ) + + # Extract the member + road_spec = extract_model(RoadSegment) + expand_model_tree(road_spec) + all_specs = {"RoadSegment": road_spec} + + result = compute_reverse_references([union_spec], all_specs) + + assert "RoadSegment" in result + entries = result["RoadSegment"] + assert len(entries) == 1 + assert entries[0].name == "TestSegment" + assert entries[0].kind == UsedByKind.MODEL + + +def test_self_references_filtered_out() -> None: + """Self-references are filtered out (handles recursive types).""" + tree_spec = extract_model(TreeNode, entry_point="TreeNode") + expand_model_tree(tree_spec) + + # Manually add TreeNode to all_specs to test self-reference filtering + all_specs = {"TreeNode": tree_spec} + + result = compute_reverse_references([tree_spec], all_specs) + + # TreeNode should not appear in result since it only references itself + assert "TreeNode" not in result + + +def test_deduplication_same_type_multiple_fields() -> None: + """Deduplication works when same type is referenced via multiple fields.""" + instrument_spec = extract_model(Instrument, entry_point="Instrument") + venue_spec = extract_model(Venue, entry_point="Venue") + expand_model_tree(instrument_spec) + expand_model_tree(venue_spec) + all_specs = collect_all_supplementary_types([instrument_spec, venue_spec]) + + assert "Id" in all_specs + + result = compute_reverse_references([instrument_spec, venue_spec], all_specs) + + assert "Id" in result + entries = result["Id"] + # Both Instrument and Venue reference Id + assert len(entries) == 2 + names = {e.name for e in entries} + assert names == {"Instrument", "Venue"} + # All should be MODELs + assert all(e.kind == UsedByKind.MODEL for e in entries) + + +def test_sorting_models_before_newtypes() -> None: + """Sorting produces models before NewTypes, alphabetical within groups.""" + # Create a test where the same type (Id) is referenced by: + # - Two models (Instrument and Venue) - both MODEL referrers + # - A NewType wrapper around Id + # Create a synthetic NewType that wraps Id + CustomId = NewType("CustomId", Id) + + instrument_spec = extract_model(Instrument, entry_point="Instrument") + venue_spec = extract_model(Venue, entry_point="Venue") + expand_model_tree(instrument_spec) + expand_model_tree(venue_spec) + all_specs = collect_all_supplementary_types([instrument_spec, venue_spec]) + + # Add the CustomId NewType which references Id + custom_id_spec = extract_newtype(CustomId) + all_specs["CustomId"] = custom_id_spec + + result = compute_reverse_references([instrument_spec, venue_spec], all_specs) + + # Id should have entries from both Instrument and Venue (MODELs) and CustomId (NEWTYPE) + entries = result["Id"] + assert len(entries) == 3 + + # Check sorting: MODELs first, then NEWTYPE + # Within MODELs: alphabetical (Instrument, Venue) + assert entries[0].kind == UsedByKind.MODEL + assert entries[0].name == "Instrument" + assert entries[1].kind == UsedByKind.MODEL + assert entries[1].name == "Venue" + assert entries[2].kind == UsedByKind.NEWTYPE + assert entries[2].name == "CustomId" From 38233507f11cf10b76c2d9b6812ab90caac86bef Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 19:04:21 -0800 Subject: [PATCH 19/38] feat(codegen): add CLI and integration tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Click-based CLI entry point (overture-codegen generate) that wires discovery → extraction → output layout → rendering: - Discovers models via discover_models() entry points - Filters themes, extracts specs, builds placement registry - Renders markdown pages with field tables, examples, cross- references, and sidebar metadata - Supports --theme filtering and --output-dir targeting Integration tests verify extraction against real Overture models (Building, Division, Segment, etc.) to catch schema drift. CLI tests verify end-to-end generation, output structure, and link integrity. --- .../src/overture/schema/codegen/cli.py | 195 ++++++++ .../schema/codegen/link_computation.py | 10 +- .../schema/codegen/markdown_pipeline.py | 160 +++++++ .../schema/codegen/markdown_renderer.py | 43 +- .../schema/codegen/markdown_type_format.py | 5 +- .../overture/schema/codegen/module_layout.py | 8 +- .../schema/codegen/type_collection.py | 69 +-- .../overture-schema-codegen/tests/test_cli.py | 434 ++++++++++++++++++ .../tests/test_integration_real_models.py | 229 +++++++++ .../tests/test_markdown_renderer.py | 24 +- 10 files changed, 1113 insertions(+), 64 deletions(-) create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/cli.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py create mode 100644 packages/overture-schema-codegen/tests/test_cli.py create mode 100644 packages/overture-schema-codegen/tests/test_integration_real_models.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/cli.py b/packages/overture-schema-codegen/src/overture/schema/codegen/cli.py new file mode 100644 index 000000000..ed0a15486 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/cli.py @@ -0,0 +1,195 @@ +"""CLI entrypoint for schema code generation.""" + +import json +import logging +from pathlib import Path, PurePosixPath + +import click + +from overture.schema.core.discovery import discover_models + +from .markdown_pipeline import generate_markdown_pages +from .model_extraction import extract_model +from .module_layout import ( + OUTPUT_ROOT, + compute_schema_root, + entry_point_class, + entry_point_module, +) +from .specs import ( + FeatureSpec, + is_model_class, + is_union_alias, +) +from .union_extraction import extract_union + +log = logging.getLogger(__name__) + +__all__ = ["cli"] + +_OUTPUT_FORMATS = ("markdown",) + +_FEATURE_FRONTMATTER = "---\nsidebar_position: 1\n---\n\n" + + +def _write_output( + content: str, + output_dir: Path | None, + output_path: PurePosixPath, +) -> None: + """Write content to a file under output_dir, or stdout.""" + if output_dir: + file_path = output_dir / output_path + file_path.parent.mkdir(parents=True, exist_ok=True) + file_path.write_text(content) + else: + click.echo(content) + click.echo() # separate entries with a blank line in stdout mode + + +@click.group() +def cli() -> None: + """Overture Schema code generator. + + Generate documentation and code from Pydantic schema models. + """ + + +@cli.command("list") +def list_models() -> None: + """List all discovered models.""" + models = discover_models() + names = sorted( + model.__name__ if isinstance(model, type) else str(model) + for model in models.values() + ) + for name in names: + click.echo(name) + + +@cli.command() +@click.option( + "--format", + "output_format", + required=True, + type=click.Choice(_OUTPUT_FORMATS), + help="Output format", +) +@click.option( + "--theme", + multiple=True, + help="Filter to specific theme(s); repeatable (e.g., --theme buildings --theme places)", +) +@click.option( + "--output-dir", + type=click.Path(path_type=Path), + default=None, + help="Write output to directory (default: stdout)", +) +def generate( + output_format: str, + theme: tuple[str, ...], + output_dir: Path | None, +) -> None: + """Generate code/docs from discovered models.""" + all_models = discover_models() + + # Schema root from ALL entry points (before theme filter). + module_paths = [entry_point_module(k.entry_point) for k in all_models] + schema_root = compute_schema_root(module_paths) + + models = ( + {k: v for k, v in all_models.items() if k.theme in theme} + if theme + else all_models + ) + + if output_dir: + output_dir.mkdir(parents=True, exist_ok=True) + + feature_specs: list[FeatureSpec] = [] + for key, entry in models.items(): + if is_model_class(entry): + feature_specs.append(extract_model(entry, entry_point=key.entry_point)) + elif is_union_alias(entry): + feature_specs.append( + extract_union( + entry_point_class(key.entry_point), + entry, + entry_point=key.entry_point, + ) + ) + + _generate_markdown(feature_specs, schema_root, output_dir) + + +def _generate_markdown( + feature_specs: list[FeatureSpec], + schema_root: str, + output_dir: Path | None, +) -> None: + """Generate markdown with directory layout and placement-aware links.""" + pages = generate_markdown_pages(feature_specs, schema_root) + + for page in pages: + content = ( + f"{_FEATURE_FRONTMATTER}{page.content}" if page.is_feature else page.content + ) + _write_output(content, output_dir, page.path) + + if output_dir: + feature_paths = {page.path for page in pages if page.is_feature} + all_paths = {page.path for page in pages} + _write_category_files(output_dir, all_paths, feature_paths) + + +def _ancestor_dirs(paths: set[PurePosixPath]) -> set[PurePosixPath]: + """Collect all ancestor directories for a set of file paths.""" + dirs: set[PurePosixPath] = set() + for path in paths: + parent = path.parent + while parent != OUTPUT_ROOT: + dirs.add(parent) + parent = parent.parent + return dirs + + +def _top_level_positions( + dirs: set[PurePosixPath], + feature_paths: set[PurePosixPath], +) -> dict[PurePosixPath, int]: + """Assign sidebar positions: feature dirs first, then non-feature, both alphabetical.""" + feature_dir_names = {p.parts[0] for p in feature_paths} + top_level = sorted(d for d in dirs if d.parent == OUTPUT_ROOT) + feature_dirs = [d for d in top_level if d.name in feature_dir_names] + non_feature_dirs = [d for d in top_level if d.name not in feature_dir_names] + return {d: i for i, d in enumerate(feature_dirs + non_feature_dirs, start=1)} + + +def _write_category_files( + output_dir: Path, + all_paths: set[PurePosixPath], + feature_paths: set[PurePosixPath], +) -> None: + """Write _category_.json files for Docusaurus sidebar navigation.""" + dirs = _ancestor_dirs(all_paths) + positions = _top_level_positions(dirs, feature_paths) + + for dir_path in sorted(dirs): + label = dir_path.name.replace("_", " ").title() + category: dict[str, object] = {"label": label} + if dir_path in positions: + category["position"] = positions[dir_path] + + file_path = output_dir / dir_path / "_category_.json" + file_path.parent.mkdir(parents=True, exist_ok=True) + file_path.write_text(json.dumps(category, indent=2) + "\n") + + +def main() -> None: + """Run the CLI entry point.""" + cli() + + +if __name__ == "__main__": + main() diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py b/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py index aa508edc1..cdf1b9a17 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py @@ -31,7 +31,7 @@ def resolve_link_or_slug(self, name: str) -> str: def _is_normalized(path: PurePosixPath) -> bool: - """True when the path contains no '..' or '.' components (except root '.').""" + """Check whether the path contains no '..' or '.' components (except root '.').""" return ".." not in path.parts and path.parts.count(".") <= 1 @@ -41,8 +41,12 @@ def relative_link(source: PurePosixPath, target: PurePosixPath) -> str: Both paths must be normalized (no ``..`` components) and relative to the same output root. """ - assert _is_normalized(source), f"Source path not normalized: {source}" - assert _is_normalized(target), f"Target path not normalized: {target}" + if not _is_normalized(source): + msg = f"Source path not normalized: {source}" + raise ValueError(msg) + if not _is_normalized(target): + msg = f"Target path not normalized: {target}" + raise ValueError(msg) source_dir = source.parent # Count how many levels up from source_dir to common ancestor, # then descend to target. PurePosixPath doesn't have os.path.relpath, diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py new file mode 100644 index 000000000..0734a2ab9 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py @@ -0,0 +1,160 @@ +"""Markdown generation pipeline: render pages without I/O. + +Orchestrates tree expansion, type collection, placement, reverse +references, and rendering into a list of RenderedPage objects. The +caller decides what to do with them (write to disk, add frontmatter, +stream to stdout, etc.). +""" + +from collections.abc import Sequence +from dataclasses import dataclass +from pathlib import PurePosixPath + +import overture.schema.system.primitive as _system_primitive +from overture.schema.system.primitive import GeometryType + +from .example_loader import ExampleRecord, load_examples +from .link_computation import LinkContext +from .markdown_renderer import ( + render_enum, + render_feature, + render_geometry_from_values, + render_newtype, + render_primitives_from_specs, +) +from .model_extraction import expand_model_tree +from .path_assignment import ( + GEOMETRY_PAGE, + PRIMITIVES_PAGE, + build_placement_registry, + resolve_output_path, +) +from .primitive_extraction import ( + extract_primitives, + partition_primitive_and_geometry_names, +) +from .reverse_references import UsedByEntry, compute_reverse_references +from .specs import ( + EnumSpec, + FeatureSpec, + ModelSpec, + NewTypeSpec, + SupplementarySpec, + UnionSpec, +) +from .type_collection import collect_all_supplementary_types + +__all__ = ["RenderedPage", "generate_markdown_pages"] + + +@dataclass(frozen=True, slots=True) +class RenderedPage: + """A rendered page with its content and output path.""" + + content: str + path: PurePosixPath + is_feature: bool = False + + +def _load_model_examples( + spec: FeatureSpec, +) -> list[ExampleRecord] | None: + """Load examples for a feature spec, returning None when absent.""" + if isinstance(spec, UnionSpec): + pyproject_source = spec.members[0] if spec.members else None + validation_type = spec.source_annotation + model_fields = spec.common_base.model_fields + else: + pyproject_source = spec.source_type + validation_type = spec.source_type + model_fields = spec.source_type.model_fields if spec.source_type else {} + if not pyproject_source: + return None + field_names = [f.name for f in spec.fields] + examples = load_examples( + validation_type, + spec.name, + field_names, + pyproject_source=pyproject_source, + model_fields=model_fields, + ) + return examples or None + + +def _render_supplement( + name: str, + spec: SupplementarySpec, + registry: dict[str, PurePosixPath], + reverse_refs: dict[str, list[UsedByEntry]], +) -> RenderedPage: + """Render a single supplementary page (enum, NewType, or sub-model).""" + output_path = resolve_output_path(name, registry) + ctx = LinkContext(output_path, registry) + used_by = reverse_refs.get(name) + + if isinstance(spec, EnumSpec): + content = render_enum(spec, link_ctx=ctx, used_by=used_by) + elif isinstance(spec, NewTypeSpec): + content = render_newtype(spec, ctx, used_by=used_by) + elif isinstance(spec, ModelSpec): + content = render_feature(spec, ctx, used_by=used_by) + else: + raise TypeError(f"Unhandled SupplementarySpec variant: {type(spec).__name__}") + + return RenderedPage(content=content, path=output_path) + + +def generate_markdown_pages( + feature_specs: Sequence[FeatureSpec], + schema_root: str, +) -> list[RenderedPage]: + """Generate all markdown pages from feature specs. + + Returns rendered pages without writing to disk. The caller handles + I/O, frontmatter injection, and any output-format-specific concerns + (like Docusaurus category files). + """ + cache: dict[type, ModelSpec] = {} + for spec in feature_specs: + expand_model_tree(spec, cache) + + primitive_names, geometry_names = partition_primitive_and_geometry_names( + _system_primitive + ) + all_specs = collect_all_supplementary_types(feature_specs) + registry = build_placement_registry( + feature_specs, all_specs, primitive_names, geometry_names, schema_root + ) + + reverse_refs = compute_reverse_references(feature_specs, all_specs) + + pages: list[RenderedPage] = [] + + for spec in feature_specs: + output_path = registry[spec.name] + ctx = LinkContext(output_path, registry) + examples = _load_model_examples(spec) + used_by = reverse_refs.get(spec.name) + content = render_feature(spec, link_ctx=ctx, examples=examples, used_by=used_by) + pages.append(RenderedPage(content=content, path=output_path, is_feature=True)) + + for name, supp_spec in all_specs.items(): + pages.append(_render_supplement(name, supp_spec, registry, reverse_refs)) + + pages.append( + RenderedPage( + content=render_primitives_from_specs( + extract_primitives(primitive_names, _system_primitive) + ), + path=PRIMITIVES_PAGE, + ) + ) + + pages.append( + RenderedPage( + content=render_geometry_from_values([m.value for m in GeometryType]), + path=GEOMETRY_PAGE, + ) + ) + + return pages diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py index c52a89d2c..25a49da48 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py @@ -123,10 +123,10 @@ class _FieldRow(TypedDict): def _unwrap_paragraphs(text: str) -> str: - """Unwrap hard-wrapped lines within paragraphs, preserving paragraph breaks. + r"""Unwrap hard-wrapped lines within paragraphs, preserving paragraph breaks. Splits on blank lines (paragraph boundaries), replaces single newlines - within each paragraph with spaces, then rejoins with ``\\n\\n``. + within each paragraph with spaces, then rejoins with ``\n\n``. Matches markdown's treatment of newlines within paragraphs. """ paragraphs = _PARAGRAPH_BREAK_RE.split(text) @@ -146,11 +146,18 @@ def _sanitize_for_table_cell(text: str) -> str: return text.replace("|", "\\|") +def _truncate(text: str) -> str: + """Truncate text to ``_EXAMPLE_TRUNCATION_LIMIT`` chars, adding ellipsis.""" + if len(text) > _EXAMPLE_TRUNCATION_LIMIT: + return text[:_EXAMPLE_TRUNCATION_LIMIT] + "..." + return text + + def _format_example_value(value: object) -> str: """Format an example value for display in a markdown Column | Value table. All non-empty values render in backticks for consistent monospace - formatting. Long strings are truncated before wrapping. + formatting. Long representations are truncated before wrapping. """ if value is None: return "`null`" @@ -161,17 +168,15 @@ def _format_example_value(value: object) -> str: if isinstance(value, str): if value == "": return "" - if len(value) > _EXAMPLE_TRUNCATION_LIMIT: - value = value[:_EXAMPLE_TRUNCATION_LIMIT] + "..." - return f"`{value}`" + return f"`{_truncate(value)}`" if isinstance(value, list): items = ", ".join(str(item) for item in value) - return f"`[{items}]`" + return f"`{_truncate(f'[{items}]')}`" if isinstance(value, dict): pairs = ", ".join(f"{k}: {v}" for k, v in value.items()) - return f"`{{{pairs}}}`" + return f"`{_truncate(f'{{{pairs}}}')}`" return f"`{value}`" @@ -308,10 +313,14 @@ def _expand_model_fields( def _short_variant_name(class_name: str, union_name: str) -> str: """Strip common suffix to produce short variant name. - Examples: - RoadSegment, Segment -> Road - WaterSegment, Segment -> Water - Building, Building -> Building + Examples + -------- + >>> _short_variant_name("RoadSegment", "Segment") + 'Road' + >>> _short_variant_name("WaterSegment", "Segment") + 'Water' + >>> _short_variant_name("Building", "Building") + 'Building' """ if class_name.endswith(union_name): short = class_name[: -len(union_name)] @@ -492,7 +501,7 @@ def render_newtype( _INT64_MIN = -(2**63) _INT64_MAX = 2**63 - 1 -_Bound = int | float | None +_NumericBound = int | float | None # IEEE 754 precision by bit width — formatting knowledge, not schema data. _FLOAT_PRECISION: dict[int, str] = {32: "~7 decimal digits", 64: "~15 decimal digits"} @@ -522,10 +531,10 @@ def _format_interval(bounds: Interval) -> str: """ # Interval fields are typed as Supports* protocols; narrow to numeric # since we only encounter int/float constraints from the schema. - ge = cast(_Bound, bounds.ge) - gt = cast(_Bound, bounds.gt) - le = cast(_Bound, bounds.le) - lt = cast(_Bound, bounds.lt) + ge = cast(_NumericBound, bounds.ge) + gt = cast(_NumericBound, bounds.gt) + le = cast(_NumericBound, bounds.le) + lt = cast(_NumericBound, bounds.lt) # Both bounds inclusive: compact "lower to upper" form if ge is not None and le is not None: diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py index 976505e7f..0879e78fb 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py @@ -58,8 +58,9 @@ def _markdown_type_name(ti: TypeInfo) -> str: def format_dict_type(ti: TypeInfo) -> str: """Format a dict TypeInfo as bare ``map`` using resolved markdown names.""" - assert ti.dict_key_type is not None - assert ti.dict_value_type is not None + if ti.dict_key_type is None or ti.dict_value_type is None: + msg = f"format_dict_type requires dict key/value types, got {ti}" + raise ValueError(msg) key = _markdown_type_name(ti.dict_key_type) value = _markdown_type_name(ti.dict_value_type) return f"map<{key}, {value}>" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/module_layout.py b/packages/overture-schema-codegen/src/overture/schema/codegen/module_layout.py index 27eb02057..f7b8f0a44 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/module_layout.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/module_layout.py @@ -11,7 +11,7 @@ from pathlib import PurePosixPath __all__ = [ - "ROOT_DIR", + "OUTPUT_ROOT", "compute_output_dir", "compute_schema_root", "entry_point_class", @@ -21,7 +21,7 @@ "output_dir_for_entry_point", ] -ROOT_DIR = PurePosixPath(".") +OUTPUT_ROOT = PurePosixPath(".") def _split_entry_point(entry_point_path: str) -> tuple[str, str]: @@ -139,12 +139,12 @@ def compute_output_dir( """ relpath = module_relpath(module, schema_root) if not relpath: - return ROOT_DIR + return OUTPUT_ROOT parts = relpath.split(".") if not is_package_module(module, module_registry): parts = parts[:-1] if not parts: - return ROOT_DIR + return OUTPUT_ROOT return PurePosixPath(*parts) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py index 1e2a137b4..7373c0fb7 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py @@ -11,7 +11,7 @@ from .model_extraction import extract_model from .newtype_extraction import extract_newtype from .specs import FeatureSpec, FieldSpec, ModelSpec, SupplementarySpec -from .type_analyzer import TypeInfo, TypeKind, analyze_type, is_newtype +from .type_analyzer import TypeInfo, TypeKind, analyze_type, is_newtype, walk_type_info from .type_registry import is_semantic_newtype __all__ = ["collect_all_supplementary_types"] @@ -59,39 +59,40 @@ def _collect_inner_newtypes(newtype_ref: object) -> None: break def _collect_from_type_info(ti: TypeInfo) -> None: - """Collect supplementary types from a single TypeInfo.""" - if ti.kind == TypeKind.UNION: - if not ti.union_members: - return - # Walk each member's fields for supplementary types. - # Members that are also top-level feature specs are skipped - # by the feature_names guard in _collect_from_model. - for member_cls in ti.union_members: - member_spec = extract_model(member_cls) - _collect_from_model(member_spec) - return - if ti.kind == TypeKind.ENUM and ti.source_type is not None: - name = ti.source_type.__name__ - if name not in all_specs: - all_specs[name] = extract_enum(ti.source_type) - - # Semantic NewTypes always get extracted, including intermediate - # NewTypes in the wrapping chain (e.g., Id wraps NoWhitespaceString - # wraps str — both Id and NoWhitespaceString get pages). - if ( - ti.newtype_ref is not None - and ti.newtype_name is not None - and is_semantic_newtype(ti) - and ti.newtype_name not in all_specs - ): - all_specs[ti.newtype_name] = extract_newtype(ti.newtype_ref) - _collect_inner_newtypes(ti.newtype_ref) - - # Dict key/value types can also reference supplementary types - if ti.dict_key_type is not None: - _collect_from_type_info(ti.dict_key_type) - if ti.dict_value_type is not None: - _collect_from_type_info(ti.dict_value_type) + """Collect supplementary types from a single TypeInfo. + + Uses walk_type_info for dict key/value recursion. Handles all + TypeKind variants without early returns so newtype extraction + and dict recursion apply regardless of kind. + """ + + def _visit(node: TypeInfo) -> None: + if node.kind == TypeKind.UNION and node.union_members: + # Walk each member's fields for supplementary types. + # Members that are also top-level feature specs are skipped + # by the feature_names guard in _collect_from_model. + for member_cls in node.union_members: + member_spec = extract_model(member_cls) + _collect_from_model(member_spec) + + if node.kind == TypeKind.ENUM and node.source_type is not None: + name = node.source_type.__name__ + if name not in all_specs: + all_specs[name] = extract_enum(node.source_type) + + # Semantic NewTypes always get extracted, including intermediate + # NewTypes in the wrapping chain (e.g., Id wraps NoWhitespaceString + # wraps str — both Id and NoWhitespaceString get pages). + if ( + node.newtype_ref is not None + and node.newtype_name is not None + and is_semantic_newtype(node) + and node.newtype_name not in all_specs + ): + all_specs[node.newtype_name] = extract_newtype(node.newtype_ref) + _collect_inner_newtypes(node.newtype_ref) + + walk_type_info(ti, _visit) def _collect_from_fields(fields: list[FieldSpec]) -> None: # A single field can match multiple conditions (e.g., Sources is both diff --git a/packages/overture-schema-codegen/tests/test_cli.py b/packages/overture-schema-codegen/tests/test_cli.py new file mode 100644 index 000000000..a85ceb52a --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_cli.py @@ -0,0 +1,434 @@ +"""Tests for CLI entrypoint.""" + +import json +import re +from pathlib import Path + +import pytest +from click.testing import CliRunner +from overture.schema.codegen.cli import cli +from overture.schema.codegen.specs import ModelSpec + + +class TestCliList: + """Tests for the list command.""" + + def test_list_command_exists(self, cli_runner: CliRunner) -> None: + """list command should be available.""" + result = cli_runner.invoke(cli, ["list"]) + assert result.exit_code == 0 + + def test_list_shows_discovered_models(self, cli_runner: CliRunner) -> None: + """list command should show discovered models.""" + result = cli_runner.invoke(cli, ["list"]) + + assert "Building" in result.output + assert "Place" in result.output + + +class TestCliGenerate: + """Tests for the generate command.""" + + def test_generate_command_exists(self, cli_runner: CliRunner) -> None: + """generate command should be available.""" + result = cli_runner.invoke(cli, ["generate", "--help"]) + + assert result.exit_code == 0 + assert "Generate" in result.output or "generate" in result.output + + def test_generate_requires_format(self, cli_runner: CliRunner) -> None: + """generate command should require --format.""" + result = cli_runner.invoke(cli, ["generate"]) + assert result.exit_code != 0 + + def test_generate_markdown_to_stdout(self, cli_runner: CliRunner) -> None: + """generate --format markdown should output markdown to stdout.""" + result = cli_runner.invoke(cli, ["generate", "--format", "markdown"]) + + assert result.exit_code == 0 + assert "# Building" in result.output or "# " in result.output + + def test_generate_with_theme_filter(self, cli_runner: CliRunner) -> None: + """generate --theme should filter to specific theme.""" + result = cli_runner.invoke( + cli, ["generate", "--format", "markdown", "--theme", "buildings"] + ) + + assert result.exit_code == 0 + assert "Building" in result.output + assert "Place" not in result.output + + def test_generate_markdown_feature_at_theme_level( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Markdown features go directly in theme directory.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "buildings", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + # Feature models at theme level + assert (tmp_path / "buildings" / "building.md").exists() + assert (tmp_path / "buildings" / "building_part.md").exists() + + # NOT in subdirectories + assert not (tmp_path / "buildings" / "building" / "building.md").exists() + + def test_feature_pages_have_sidebar_position( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Feature pages include sidebar_position frontmatter.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "buildings", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + content = (tmp_path / "buildings" / "building.md").read_text() + assert content.startswith("---\nsidebar_position: 1\n---\n") + + def test_generate_markdown_shared_types_mirror_modules( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Core/system types land in module-mirrored directories.""" + result = cli_runner.invoke( + cli, + ["generate", "--format", "markdown", "--output-dir", str(tmp_path)], + ) + assert result.exit_code == 0 + + core_dir = tmp_path / "core" + assert core_dir.exists(), "core/ directory should exist" + subdirs = [d.name for d in core_dir.iterdir() if d.is_dir()] + assert len(subdirs) > 0, "core/ should have subdirectories" + + def test_generate_multiple_themes_to_output_dir( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """generate all themes should create subdirectories for each theme.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + subdirs = [d.name for d in tmp_path.iterdir() if d.is_dir()] + assert "buildings" in subdirs + assert "places" in subdirs + + def test_generate_no_duplicate_files( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """No type should produce duplicate output files.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + all_files = list(tmp_path.rglob("*.md")) + all_paths = [str(f.relative_to(tmp_path)) for f in all_files] + assert len(all_paths) == len(set(all_paths)), ( + f"Duplicate files: {[p for p in all_paths if all_paths.count(p) > 1]}" + ) + + +class TestCliGenerateLinkIntegrity: + """Verify all markdown links resolve to existing files.""" + + def test_all_links_resolve(self, cli_runner: CliRunner, tmp_path: Path) -> None: + """Every markdown link target should exist as a file.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + link_re = re.compile(r"\[.*?\]\(([^)]+\.md(?:#[^)]*)?)\)") + broken: list[str] = [] + + for md_file in tmp_path.rglob("*.md"): + content = md_file.read_text() + for match in link_re.finditer(content): + href = match.group(1).split("#")[0] + # Resolve relative path from the file's directory + target = (md_file.parent / href).resolve() + if not target.exists(): + rel = md_file.relative_to(tmp_path) + broken.append(f"{rel}: {href}") + + assert not broken, "Broken links:\n" + "\n".join(broken) + + +class TestCliGenerateCategoryFiles: + """Tests for _category_.json generation.""" + + def test_generates_category_files( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Should generate _category_.json files in output directories.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "buildings", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + # Theme directory should have a category file + cat_file = tmp_path / "buildings" / "_category_.json" + assert cat_file.exists() + data = json.loads(cat_file.read_text()) + assert data["label"] == "Buildings" + + def test_core_directory_has_category_file( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """core/ directory should have _category_.json.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + cat_file = tmp_path / "core" / "_category_.json" + assert cat_file.exists() + data = json.loads(cat_file.read_text()) + assert data["label"] == "Core" + + def test_feature_dirs_positioned_before_non_feature_dirs( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Feature directories should have lower position than non-feature directories.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + def pos(dir_name: str) -> int: + data = json.loads((tmp_path / dir_name / "_category_.json").read_text()) + result: int = data["position"] + return result + + # Feature directories (contain feature pages) should sort before + # non-feature directories (core, system -- shared types only) + feature_positions = [pos("buildings"), pos("places"), pos("transportation")] + non_feature_positions = [pos("core"), pos("system")] + + assert max(feature_positions) < min(non_feature_positions) + + def test_subdirectories_have_no_position( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Only top-level directories get position values.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + data = json.loads( + (tmp_path / "core" / "scoping" / "_category_.json").read_text() + ) + assert "position" not in data + + +class TestCliGenerateEnums: + """Tests for enum generation in the generate command.""" + + def test_generate_markdown_includes_enum_files( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """generate --format markdown should create enum documentation files.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "buildings", + "--output-dir", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + + # Enum files exist somewhere under the buildings directory + all_md = list((tmp_path / "buildings").rglob("*.md")) + all_names = [f.stem for f in all_md] + + assert "building" in all_names + + # Should have enum files beyond the feature models + non_feature = [n for n in all_names if n not in ("building", "building_part")] + assert len(non_feature) > 0, "Should generate enum documentation files" + + +class TestCliEntryPoint: + """generate populates entry_point from discovery keys.""" + + def test_generate_sets_entry_point_on_specs( + self, cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch + ) -> None: + captured: list[ModelSpec] = [] + + def spy(feature_specs: list, schema_root: str, output_dir: object) -> None: + captured.extend(feature_specs) + + monkeypatch.setattr("overture.schema.codegen.cli._generate_markdown", spy) + result = cli_runner.invoke( + cli, ["generate", "--format", "markdown", "--theme", "buildings"] + ) + + assert result.exit_code == 0 + assert len(captured) > 0 + for spec in captured: + assert spec.entry_point is not None, f"{spec.name} missing entry_point" + assert ":" in spec.entry_point, ( + f"entry_point should be entry-point style: {spec.entry_point!r}" + ) + + +class TestCliHelp: + """Tests for CLI help.""" + + def test_main_help(self, cli_runner: CliRunner) -> None: + """--help should show usage information.""" + result = cli_runner.invoke(cli, ["--help"]) + + assert result.exit_code == 0 + assert "generate" in result.output + assert "list" in result.output + + +class TestGenerateWithSegment: + """Integration test: Segment union produces markdown output.""" + + def test_segment_appears_in_markdown_output( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Generate markdown and verify Segment page exists.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "transportation", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + # Segment page should exist + segment_files = list(tmp_path.rglob("segment.md")) + assert len(segment_files) >= 1, f"No segment.md found in {tmp_path}" + + content = segment_files[0].read_text() + assert "# Segment" in content + assert "subtype" in content + + +class TestReverseReferences: + """Integration test: Reverse references appear in generated markdown.""" + + def test_used_by_sections_appear_in_markdown( + self, cli_runner: CliRunner, tmp_path: Path + ) -> None: + """Generate markdown and verify Used By sections appear.""" + result = cli_runner.invoke( + cli, + [ + "generate", + "--format", + "markdown", + "--theme", + "buildings", + "--output-dir", + str(tmp_path), + ], + ) + assert result.exit_code == 0 + + # Find a supplementary type that should have Used By section + # For example, if Building references some enum or NewType + all_md = list(tmp_path.rglob("*.md")) + + # At least one supplementary type should have a Used By section + has_used_by = False + for md_file in all_md: + content = md_file.read_text() + if "## Used By" in content: + has_used_by = True + break + + assert has_used_by, "No 'Used By' sections found in any generated markdown" diff --git a/packages/overture-schema-codegen/tests/test_integration_real_models.py b/packages/overture-schema-codegen/tests/test_integration_real_models.py new file mode 100644 index 000000000..447d64ff2 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_integration_real_models.py @@ -0,0 +1,229 @@ +"""Integration tests against real Overture models. + +These tests validate the extraction layer against actual models from +the installed Overture schema packages. +""" + +import pytest +from codegen_test_support import assert_literal_field +from overture.schema.codegen.markdown_renderer import render_feature +from overture.schema.codegen.model_extraction import extract_model +from overture.schema.codegen.specs import ( + ModelSpec, + UnionSpec, + filter_model_classes, +) +from overture.schema.codegen.type_analyzer import TypeKind +from overture.schema.codegen.union_extraction import extract_union +from overture.schema.core.discovery import discover_models +from overture.schema.transportation import Segment +from overture.schema.transportation.segment.models import RoadSegment +from pydantic import BaseModel + + +class TestDiscoverModels: + """Tests for model discovery.""" + + def test_discover_models_returns_dict(self) -> None: + """discover_models() should return a dictionary.""" + models = discover_models() + assert isinstance(models, dict) + + def test_discover_models_finds_building( + self, building_class: type[BaseModel] + ) -> None: + """Should discover the Building model.""" + assert issubclass(building_class, BaseModel) + + def test_discover_models_finds_place(self, place_class: type[BaseModel]) -> None: + """Should discover the Place model.""" + assert issubclass(place_class, BaseModel) + + def test_discover_models_returns_multiple_themes(self) -> None: + """Should discover models from multiple themes.""" + models = discover_models() + assert len(models) >= 3, f"Expected at least 3 models, got {len(models)}" + + +class TestExtractBuildingModel: + """Tests for extracting the Building model.""" + + def test_extract_building_has_name(self, building_spec: ModelSpec) -> None: + """Building model spec should have correct name.""" + assert building_spec.name == "Building" + + def test_extract_building_has_theme_type(self, building_spec: ModelSpec) -> None: + """Building should have theme='buildings', type='building' as Literal fields.""" + assert_literal_field(building_spec, "theme", "buildings") + assert_literal_field(building_spec, "type", "building") + + def test_extract_building_has_fields(self, building_spec: ModelSpec) -> None: + """Building should have multiple fields.""" + assert len(building_spec.fields) > 0, "Building should have at least one field" + field_names = {f.name for f in building_spec.fields} + assert "id" in field_names + + def test_building_field_types_are_valid(self, building_spec: ModelSpec) -> None: + """All Building fields should have valid TypeInfo.""" + for field in building_spec.fields: + assert field.type_info is not None + assert field.type_info.kind in TypeKind + + +class TestExtractPlaceModel: + """Tests for extracting the Place model.""" + + def test_extract_place_has_theme_type(self, place_class: type[BaseModel]) -> None: + """Place should have theme='places', type='place' as Literal fields.""" + spec = extract_model(place_class) + assert_literal_field(spec, "theme", "places") + assert_literal_field(spec, "type", "place") + + def test_place_has_fields(self, place_class: type[BaseModel]) -> None: + """Place model should have fields.""" + spec = extract_model(place_class) + assert len(spec.fields) > 0 + + +class TestExtractDivisionModel: + """Tests for extracting Division model.""" + + def test_extract_division_theme_type(self, division_class: type[BaseModel]) -> None: + """Division should have theme='divisions', type='division' as Literal fields.""" + spec = extract_model(division_class) + assert_literal_field(spec, "theme", "divisions") + assert_literal_field(spec, "type", "division") + + +class TestFieldTypeAnalysis: + """Tests that analyze_type handles real model field types correctly.""" + + def test_no_analyze_type_crashes(self, all_discovered_models: dict) -> None: + """extract_model should not crash on any discovered model.""" + for model_class in filter_model_classes(all_discovered_models): + spec = extract_model(model_class) + assert spec.name == model_class.__name__ + + def test_all_field_types_resolved(self, all_discovered_models: dict) -> None: + """All fields should have resolved TypeInfo.""" + for model_class in filter_model_classes(all_discovered_models): + spec = extract_model(model_class) + for field in spec.fields: + assert field.type_info.base_type, ( + f"No base_type for {spec.name}.{field.name}" + ) + assert field.type_info.kind in TypeKind, ( + f"Invalid kind for {spec.name}.{field.name}" + ) + + +class TestMarkdownRenderingRealModels: + """Tests for markdown rendering with real models.""" + + def test_render_building_content(self, building_class: type[BaseModel]) -> None: + """Building renders with title, field table, and expected fields.""" + markdown = render_feature(extract_model(building_class)) + + assert "# Building" in markdown + assert "| Name |" in markdown + assert "| Type |" in markdown + assert "id" in markdown + assert "geometry" in markdown + + def test_render_all_models_without_crash(self, all_discovered_models: dict) -> None: + """render_feature should not crash on any discovered model.""" + for model_class in filter_model_classes(all_discovered_models): + markdown = render_feature(extract_model(model_class)) + assert isinstance(markdown, str) + assert len(markdown) > 0 + + +class TestDiscriminatedUnions: + """Tests for discriminated union types like Segment. + + Segment is registered as a discriminated union (type alias), not a class. + The extraction layer handles the individual union members (RoadSegment, + RailSegment, WaterSegment) but not the union itself. + """ + + def test_segment_is_not_a_class(self) -> None: + """Segment discovery returns a type alias, not a class.""" + models = discover_models() + segment_entries = [ + (k, v) for k, v in models.items() if "segment" in str(k).lower() + ] + + assert len(segment_entries) == 1 + _key, segment = segment_entries[0] + + assert not isinstance(segment, type) + + def test_individual_segment_types_extractable(self) -> None: + """Individual segment member types have expected theme/type literals.""" + spec = extract_union("Segment", Segment) + for member_cls in spec.members: + member_spec = extract_model(member_cls) + assert_literal_field(member_spec, "theme", "transportation") + assert_literal_field(member_spec, "type", "segment") + + def test_road_segment_has_road_specific_fields(self) -> None: + """RoadSegment should have road-specific fields.""" + spec = extract_model(RoadSegment) + field_names = {f.name for f in spec.fields} + + assert "subtype" in field_names + + +class TestSegmentUnionExtraction: + """Tests for extracting the real Segment discriminated union.""" + + @pytest.fixture + def segment_spec(self) -> UnionSpec: + """Extract Segment union spec.""" + return extract_union("Segment", Segment) + + def test_segment_extract_union_succeeds(self, segment_spec: UnionSpec) -> None: + """extract_union works on the real Segment type alias.""" + assert segment_spec.name == "Segment" + assert len(segment_spec.members) == 3 + + def test_segment_has_shared_fields(self, segment_spec: UnionSpec) -> None: + """Segment UnionSpec has shared fields from TransportationSegment.""" + shared = [ + af for af in segment_spec.annotated_fields if af.variant_sources is None + ] + shared_names = {af.field_spec.name for af in shared} + # All segments share these base fields + assert "geometry" in shared_names + assert "subtype" in shared_names + assert "id" in shared_names + + def test_segment_has_variant_fields(self, segment_spec: UnionSpec) -> None: + """Segment UnionSpec has variant-specific fields.""" + variant = [ + af for af in segment_spec.annotated_fields if af.variant_sources is not None + ] + variant_names = {af.field_spec.name for af in variant} + # RoadSegment has these specific fields + assert "road_flags" in variant_names + assert "road_surface" in variant_names + assert len(variant_names) > 0 + + def test_segment_discriminator_extracted_from_callable( + self, segment_spec: UnionSpec + ) -> None: + """Segment callable discriminator is resolved via _field_name.""" + assert segment_spec.discriminator_field == "subtype" + assert segment_spec.discriminator_mapping is not None + assert len(segment_spec.discriminator_mapping) == 3 + # Keys are str(enum_member), e.g. "Subtype.ROAD" + road_key = next(k for k in segment_spec.discriminator_mapping if "ROAD" in k) + assert segment_spec.discriminator_mapping[road_key] is RoadSegment + + def test_segment_common_base_is_base_model(self, segment_spec: UnionSpec) -> None: + """Segment common_base is the shared base class.""" + assert segment_spec.common_base is not None + assert issubclass(segment_spec.common_base, BaseModel) + # Verify common base has expected fields + assert "geometry" in segment_spec.common_base.model_fields + assert "id" in segment_spec.common_base.model_fields diff --git a/packages/overture-schema-codegen/tests/test_markdown_renderer.py b/packages/overture-schema-codegen/tests/test_markdown_renderer.py index d01e63b2b..356c3cb3a 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_renderer.py +++ b/packages/overture-schema-codegen/tests/test_markdown_renderer.py @@ -241,11 +241,8 @@ class ModelWithRequired(BaseModel): spec = extract_model(ModelWithRequired) result = render_feature(spec) - # Should have backtick-quoted field name assert "| `name` |" in result - # Type should be string without optional - assert "| `string` |" in result or "string" in result - # Description should be present + assert "| `string` |" in result assert "The name" in result def test_renders_optional_field(self) -> None: @@ -976,6 +973,25 @@ def test_list_renders_comma_separated(self) -> None: assert _format_example_value(["a", "b"]) == "`[a, b]`" assert _format_example_value([]) == "`[]`" + def test_long_list_truncated(self) -> None: + """Lists longer than truncation limit are truncated with ellipsis.""" + long_list = list(range(200)) + result = _format_example_value(long_list) + assert result.startswith("`[0, 1, 2,") + assert result.endswith("...`") + # Content between backticks is at most 103 chars (100 + "...") + inner = result[1:-1] # strip backticks + assert len(inner) <= 103 + + def test_long_dict_truncated(self) -> None: + """Dicts longer than truncation limit are truncated with ellipsis.""" + long_dict = {f"key_{i}": f"value_{i}" for i in range(50)} + result = _format_example_value(long_dict) + assert result.startswith("`{key_0:") + assert result.endswith("...`") + inner = result[1:-1] + assert len(inner) <= 103 + def test_pipe_character_not_escaped_in_backticks(self) -> None: """Pipe characters need no escaping inside backticks.""" From 8b0d396845b9ba64bdc01367b4ea085164a18b7d Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 24 Feb 2026 19:30:17 -0800 Subject: [PATCH 20/38] docs(codegen): add design doc, walkthrough, and README Design doc covers the four-layer architecture, analyze_type(), domain-specific extractors, and extension points for new output targets. Walkthrough traces Segment through the full pipeline module-by-module in dependency order, with FeatureVersion as a secondary example for constraint provenance in the type analyzer. README describes the problem (Pydantic flattens domain vocabulary), the "unwrap once, render many" approach, CLI usage, architecture overview, and programmatic API. --- packages/overture-schema-codegen/README.md | 114 ++- .../overture-schema-codegen/docs/design.md | 254 ++++++ .../docs/walkthrough.md | 736 ++++++++++++++++++ 3 files changed, 1096 insertions(+), 8 deletions(-) create mode 100644 packages/overture-schema-codegen/docs/design.md create mode 100644 packages/overture-schema-codegen/docs/walkthrough.md diff --git a/packages/overture-schema-codegen/README.md b/packages/overture-schema-codegen/README.md index 4cb93011d..ce7bf5c59 100644 --- a/packages/overture-schema-codegen/README.md +++ b/packages/overture-schema-codegen/README.md @@ -1,20 +1,118 @@ # Overture Schema Codegen -Code generator that produces documentation and code from Pydantic models. +Generates documentation from Overture Maps Pydantic schema definitions. -## Installation +Pydantic's `model_json_schema()` flattens the schema's domain vocabulary into JSON +Schema primitives. NewType names disappear, constraint provenance is lost (which NewType +contributed which bound), custom constraint classes lose their identity (a +`GeometryTypeConstraint` becomes an anonymous `enum` array), and discriminated union +structure collapses into `anyOf` arrays with duplicated fields. + +Navigating Python's type annotation machinery -- NewType chains, nested `Annotated` +wrappers, union filtering, generic resolution -- is complex. The codegen does it once. +`analyze_type()` unwraps annotations into `TypeInfo`, a flat target-independent +representation. Extractors build specs from `TypeInfo`. Renderers consume specs without +touching the type system. New output targets (Arrow schemas, PySpark expressions) add +renderers, not extraction logic. + +## Usage ```bash -pip install overture-schema-codegen +# Generate markdown documentation for all themes +overture-codegen generate --format markdown --output-dir docs/schema/reference + +# Generate for a single theme +overture-codegen generate --format markdown --theme buildings --output-dir out/ + +# List discovered models +overture-codegen list ``` -## Usage +The generator discovers models via `overture.models` entry points (provided by theme +packages like `overture-schema-buildings-theme`), extracts type information, and renders +output pages with cross-page links, constraint descriptions, and validated examples. + +## Architecture + +Four layers with strict downward imports -- no layer references the one above it: + +```text +Rendering Output formatting, all presentation decisions + ^ +Output Layout What to generate, where it goes, how outputs link + ^ +Extraction TypeInfo, FieldSpec, ModelSpec, UnionSpec + ^ +Discovery discover_models() from overture-schema-core +``` + +**Discovery** loads registered Pydantic models via entry points. The return dict +includes both concrete `BaseModel` subclasses (like `Building`) and discriminated union +type aliases (like `Segment`). Both satisfy the `FeatureSpec` protocol and flow through +the same pipeline. + +**Extraction** unwraps type annotations into specs. `analyze_type()` is the central +function -- a single iterative loop that peels NewType, Annotated, Union, and container +wrappers, accumulating constraints tagged with the NewType that contributed them. +Domain-specific extractors (`model_extraction`, `union_extraction`, `enum_extraction`, +`newtype_extraction`, `primitive_extraction`) call `analyze_type()` for field types and +produce spec dataclasses. + +**Output Layout** determines what artifacts to generate and where they go. Supplementary +type collection walks expanded feature trees to find referenced enums, NewTypes, and +sub-models. Path assignment maps every type to an output file path mirroring the Python +module structure. Link computation and reverse references enable cross-page navigation. + +**Rendering** consumes specs and owns all presentation decisions. Markdown output uses +Jinja2 templates for feature pages (with field tables, constraint sections, and +examples), enum pages, NewType pages, and aggregate primitive/geometry reference pages. + +`markdown_pipeline.py` orchestrates the full pipeline without I/O, returning +`list[RenderedPage]`. The CLI writes files to disk with Docusaurus frontmatter. + +## Programmatic use ```python -from overture.schema.codegen import analyze_type, TypeInfo, TypeKind +from overture.schema.codegen.type_analyzer import analyze_type, TypeKind -# Analyze a type annotation -info = analyze_type(str) -assert info.base_type == "str" +info = analyze_type(some_annotation) assert info.kind == TypeKind.PRIMITIVE +assert info.base_type == "int32" +assert info.newtype_name == "FeatureVersion" +# Constraints carry provenance: +for cs in info.constraints: + print(f"{cs.constraint} from {cs.source}") +``` + +## Fetching sample data + +Theme packages include example records in their `pyproject.toml` files under +`[[examples.]]` sections. The codegen validates these against Pydantic +models and renders them in feature pages. + +To fetch a fresh sample from the latest Overture release using DuckDB: + +```bash +duckdb -json \ + -c "load spatial" \ + -c "attach 'http://labs.overturemaps.org/data/latest.ddb' as overture" \ + -c "select to_json(columns(*)) + from ( + select * REPLACE ST_AsText(geometry) as geometry + from overture.place + USING SAMPLE 1 + )" \ + | jq . ``` + +The `latest.ddb` database always points to the current release. Tables use +the type name directly (`overture.place`, `overture.segment`, +`overture.building`, etc.). Convert the JSON output to TOML for inclusion in +the theme's `pyproject.toml`. + +## Further reading + +- [Design document](docs/design.md) -- architecture, extension points, data flow + diagrams +- [Walkthrough](docs/walkthrough.md) -- module-by-module narrative tracing Segment + through the full pipeline diff --git a/packages/overture-schema-codegen/docs/design.md b/packages/overture-schema-codegen/docs/design.md new file mode 100644 index 000000000..d52ec931e --- /dev/null +++ b/packages/overture-schema-codegen/docs/design.md @@ -0,0 +1,254 @@ +# Code Generator Design + +Code generator that produces documentation and code from Overture Maps Pydantic schema +definitions. + +## Problem + +Overture Maps schema definitions live in Pydantic models across theme packages. Each +model carries type annotations, field constraints, docstrings, and relationships +(inheritance, composition, discriminated unions). Generating documentation or code from +these models requires introspecting all of that structure and rendering it into output +formats. + +Pydantic's internal representation is JSON-schema-oriented and discards the vocabulary +the code generator needs to preserve. `model_json_schema()` flattens `FeatureVersion` (a +NewType wrapping `int32` wrapping `Annotated[int, Field(ge=0, le=2^31-1)]`) to `{"type": +"integer", "minimum": 0}` -- the NewType names `FeatureVersion` and `int32` are gone, +custom constraint classes (`GeometryTypeConstraint`, `UniqueItemsConstraint`) are gone, +Python class references are gone, and constraint provenance (which NewType contributed +which bound) is gone. `FieldInfo.annotation` gives the raw annotation, but Pydantic does +not unwrap NewType chains or track multi-depth constraint provenance. + +The schema's domain language -- custom primitives (`int32`, `float64`), semantic +NewTypes (`FeatureVersion`, `Sources`), and custom constraint classes -- needs to +survive extraction intact. A single field annotation like `NewType("Foo", +Annotated[list[SomeModel] | None, Field(ge=0)])` encodes optionality, collection type, +element type, constraints, and semantic naming in nested Python typing constructs. Type +definitions regularly nest `Annotated` inside `NewType` inside `Annotated` -- +`FeatureVersion = NewType("FeatureVersion", int32)` where `int32 = NewType("int32", +Annotated[int, Field(ge=...)])` -- and constraints at each depth need to be tagged with +the NewType that contributed them. + +The code generator solves this by extracting type information once into a flat, +navigable representation (`TypeInfo`), then passing that to renderers that produce +output without touching Python's type system. + +## Inputs and Outputs + +**Inputs**: Pydantic `BaseModel` subclasses discovered via `overture.models` entry +points, plus example data from theme `pyproject.toml` files. Examples serve two +purposes: rendered examples in documentation pages, and a starting point for generating +tests that verify behavior of generated code. + +**Current Outputs**: Markdown documentation pages with field tables, cross-page links, +constraint descriptions, and examples. + +**Planned outputs**: Arrow schemas, PySpark expressions. + +## Architecture + +Four layers with strict downward imports -- no layer references the one above it: + +```text +Rendering Output formatting, all presentation decisions + ^ +Output Layout What to generate, where it goes, how outputs link + ^ +Extraction TypeInfo, FieldSpec, ModelSpec, EnumSpec, ... + ^ +Discovery discover_models() from overture-schema-core +``` + +`markdown_pipeline.py` orchestrates the pipeline without I/O: it expands feature trees, +collects supplementary types, builds placement registries, computes reverse references, +and calls renderers -- returning `RenderedPage` objects. The CLI (`cli.py`) is a thin +Click wrapper that calls `generate_markdown_pages()` and writes files to disk. + +```mermaid +graph TD + subgraph Discovery + DM["discover_models()"] + end + + DM -->|"dict[ModelKey, type]"| EX + + subgraph Extraction + EX["type_analyzer / extractors"] + EX -->|"ModelSpec, UnionSpec"| TREE["expand_model_tree()"] + end + + TREE -->|"FeatureSpec[]"| OL + + subgraph "Output Layout" + OL["type_collection"] + OL -->|"SupplementarySpec{}"| PA["path_assignment"] + PA -->|"dict[str, Path]"| LC["link_computation"] + RR["reverse_references"] + end + + subgraph Rendering + R["markdown_renderer"] + TR["type_registry"] -.->|"type name resolution"| R + end + + subgraph Orchestration + MP["markdown_pipeline"] + end + + OL --> MP + LC --> MP + RR --> MP + MP --> R + R -->|"RenderedPage[]"| MP + MP -->|"list[RenderedPage]"| CLI["cli.py → disk"] +``` + +## Extraction + +### `analyze_type` -- iterative type unwrapping + +`analyze_type(annotation)` is a single iterative function that peels type annotation +layers in a fixed order, accumulating information into an `_UnwrapState`: + +1. **NewType**: Records the outermost name (user-facing semantic identity, e.g. + `FeatureVersion`) and updates the "current" name (used for constraint provenance and + as `base_type` at terminal) +2. **Annotated**: Collects constraints from metadata, each tagged with whichever NewType + was most recently entered. Extracts `Field.description` when present +3. **Union**: Filters out `None` (marks optional), `Sentinel`, and `Literal` sentinel + arms. If multiple concrete `BaseModel` arms remain, classifies as `UNION`; otherwise + continues with the single remaining arm +4. **list / dict**: Sets collection flags, continues into element types +5. **Terminal**: Classifies as `PRIMITIVE`, `LITERAL`, `ENUM`, `MODEL`, or `UNION` + +The result is `TypeInfo` -- a flat dataclass that fully describes the unwrapped type: +classification (`TypeKind`), optional/list/dict flags, accumulated constraints with +provenance, NewType names, source type, literal values, and (for UNION kind) the tuple +of concrete `BaseModel` member types. Dict types carry recursively analyzed `TypeInfo` +for their key and value types. + +Multi-depth `Annotated` layers (common in practice, since NewTypes wrap `Annotated` +types that wrap further NewTypes) are handled naturally by the loop -- each iteration +processes the next wrapper. Constraints from each `Annotated` layer are tagged with the +NewType active at that depth. + +### Extractors by domain + +Extraction is split by entity kind: + +- `model_extraction.py`: Pydantic model -> `ModelSpec` (fields in MRO-aware + documentation order, alias-resolved names, model-level constraints) +- `enum_extraction.py`: Enum class -> `EnumSpec` +- `newtype_extraction.py`: NewType -> `NewTypeSpec` +- `union_extraction.py`: Discriminated union alias -> `UnionSpec` +- `primitive_extraction.py`: Numeric primitives -> `PrimitiveSpec` + +Each calls `analyze_type()` for field types. Tree expansion (`expand_model_tree()`) +walks MODEL-kind fields to populate nested model references, with a shared cache and +cycle detection (`starts_cycle=True`). + +### Unions and the FeatureSpec protocol + +Discriminated unions (e.g. `Segment = Annotated[Union[RoadSegment, ...], +Discriminator(...)]`) are type aliases, not classes. `UnionSpec` captures the union +structure: member types, discriminator field and value mapping, and a merged field list. +Fields shared across all variants appear once; fields present in some variants are +wrapped in `AnnotatedField` with `variant_sources` indicating which members contribute +them. The common base class is identified so shared fields can be deduplicated. + +`FeatureSpec` is a `Protocol` satisfied by both `ModelSpec` and `UnionSpec`. Code that +operates on "any top-level feature" -- tree expansion, supplementary type collection, +rendering dispatch -- uses `FeatureSpec` rather than a concrete type, so union and model +features flow through the same pipeline. + +### Constraints + +Field-level constraints come from `Annotated` metadata -- `Ge`, `Le`, `Interval`, custom +constraint classes. Each is tagged with the NewType that contributed it via +`ConstraintSource`. + +Model-level constraints come from decorators (`@require_any_of`, `@require_if`, +`@forbid_if`) and are extracted via `ModelConstraint.get_model_constraints()`. + +## Output Layout + +Determines the full set of artifacts to generate, where each lives on disk, and how they +reference each other. + +### Supplementary type collection + +`collect_all_supplementary_types()` walks the expanded field trees of all feature specs, +extracting enums, semantic NewTypes, and sub-models that need their own output. Returns +`dict[str, SupplementarySpec]`. + +### Module-mirrored output paths + +Output paths derive from the source Python module path relative to a computed schema +root (`compute_schema_root()` finds the longest common prefix of all entry point module +paths). `compute_output_dir()` maps a Python module to an output directory. Feature +models land in their module-derived directory. Supplementary types land at their own +module-derived path, with a `types/` segment inserted when they fall under a feature +directory. + +### Link computation + +`LinkContext` carries the current output's path and the full type-to-path registry. When +a renderer formats a type reference, it looks up the target in the registry and computes +a relative path. Links exist only for types with registry entries, avoiding broken +references to ungenerated outputs. + +### Reverse references + +`compute_reverse_references()` walks feature specs to build `dict[type_name, +list[UsedByEntry]]` for "Used By" sections. + +## Rendering + +Renderers consume specs and own all presentation decisions -- formatting, casing, link +syntax. Extraction and the type registry carry no presentation logic. + +### Type registry + +`type_registry.py` maps type names to per-target string representations via +`TypeMapping`. `format_type_string()` wraps the resolved name with list/optional +qualifiers. `is_semantic_newtype()` distinguishes NewTypes that deserve their own +identity (like `FeatureVersion` wrapping `int32`) from pass-through aliases to +registered primitives. + +### Markdown renderer + +Jinja2 templates for feature, enum, NewType, primitives, and geometry pages. +`render_feature()` expands MODEL-kind fields inline with dot-notation (e.g., +`sources[].dataset`), stopping at cycle boundaries. `format_type()` in +`markdown_type_format.py` converts `TypeInfo` into link-aware display strings using +`LinkContext`. + +### Constraint prose + +`field_constraint_description.py` and `model_constraint_description.py` convert +constraint objects into human-readable descriptions. Field constraints produce inline +text. Model constraints produce section-level descriptions and per-field notes, with +consolidation for related conditional constraints (`require_if` / `forbid_if` grouped by +trigger). + +### Example loader + +Loads example data from theme `pyproject.toml` files, validates against Pydantic models, +and flattens to dot-notation rows for display in feature pages. Also provides a starting +point for generated test data. + +## Extension Points + +**Adding a new output target** (Arrow schemas next, PySpark expressions after): Add a +column to `TypeMapping` in `type_registry.py` for type-name resolution. Write a new +renderer module that consumes specs and the type registry. The extraction layer and +output layout are target-independent. + +**Adding a new type kind**: Add a variant to `TypeKind` in `type_analyzer.py`. Handle it +in the terminal classification of `analyze_type()`. Add an extraction function and spec +dataclass if needed. Update renderers to handle the new kind. + +**Adding a new constraint type**: The iterative unwrapper collects it automatically (any +`Annotated` metadata becomes a `ConstraintSource`). Add a case to +`describe_field_constraint()` for the prose representation. diff --git a/packages/overture-schema-codegen/docs/walkthrough.md b/packages/overture-schema-codegen/docs/walkthrough.md new file mode 100644 index 000000000..89fdc2f74 --- /dev/null +++ b/packages/overture-schema-codegen/docs/walkthrough.md @@ -0,0 +1,736 @@ +# Walkthrough: overture-schema-codegen + +Pydantic's serialization machinery destroys the vocabulary that documentation needs. The +codegen recovers it. + +Consider the transportation schema's `Segment` type -- a discriminated union of +`RoadSegment`, `RailSegment`, and `WaterSegment`. All three share fields inherited from +`TransportationSegment`. Each adds variant-specific fields. The discriminator field +`subtype` carries a `Literal` value (`"road"`, `"rail"`, `"water"`) that selects the +arm. Call `model_json_schema()` and the union collapses into an `anyOf` array with +duplicated field definitions, the discriminator mapping disappears, and the common-base +relationship between variants is unrecoverable. + +The same loss happens at the field level. `FeatureVersion = NewType("FeatureVersion", +int32)` where `int32 = NewType("int32", Annotated[int, Field(ge=0, le=2147483647)])` +becomes `{"type": "integer", "minimum": 0, "maximum": 2147483647}`. Three things +vanished: the name "FeatureVersion," the name "int32," and the fact that `ge=0` came +from the `int32` layer rather than `FeatureVersion`. Custom constraint classes like +`GeometryTypeConstraint` lose their identity -- the class name, its docstring, and its +relationship to a specific NewType dissolve into anonymous JSON Schema keywords. + +Documentation needs all of this. The codegen exists to preserve it. + +Navigating Python's type annotation machinery -- NewType chains, nested `Annotated` +wrappers, union filtering, generic resolution -- is complex. The codegen does it once. +`analyze_type()` unwraps annotations into `TypeInfo`, a flat target-independent +representation. Extractors build specs from `TypeInfo`. Renderers consume specs without +re-entering the type system. New output targets add renderers, not extraction logic. + +The solution decomposes into four layers. Discovery finds models. Extraction unwraps +them into flat specifications. Output Layout decides what to generate and where it goes. +Rendering formats the output. Imports flow strictly downward -- no layer references the +one above it. + +Sixteen sections follow, ordered by dependency: each module appears before anything that +imports it. The final section inverts this and traces the full pipeline top-down. +Segment threads through as the primary example, since its path through the system -- +union classification, common base discovery, variant field partitioning, discriminator +extraction, tagged rendering -- exercises more of the pipeline than any model feature +does. + +--- + +## 1. Discovery + +The pipeline starts in `overture-schema-core`, not in the codegen package itself. +`discover_models()` calls `importlib.metadata.entry_points(group="overture.models")` and +loads every registered model. Each entry point name encodes identity as a +colon-delimited triple (`overture:buildings:building`); each value encodes the Python +location (`overture.schema.buildings:Building`). The function parses both formats -- +three-part names carry a theme component, two-part names set theme to `None` -- and +returns `dict[ModelKey, type[BaseModel]]`. + +`ModelKey` is a frozen dataclass with four fields: `namespace`, `theme`, `type`, and +`entry_point`. The `entry_point` field preserves the raw `module:Class` string that +downstream modules split to determine output directory structure. + +The return dict includes both concrete `BaseModel` subclasses and type aliases. +`Building` is a concrete class -- `isinstance(Building, type)` returns true. `Segment` +is not. It is an `Annotated` alias wrapping `Union[RoadSegment, RailSegment, +WaterSegment]` with a discriminator field. `isinstance` and `issubclass` cannot inspect +it. The entry point `overture:transportation:segment` maps to +`overture.schema.transportation:Segment`, which loads the alias itself. + +The codegen classifies these at the CLI boundary: `is_model_class` identifies concrete +`BaseModel` subclasses, `is_union_alias` calls `analyze_type` to identify discriminated +unions. From that point forward both model features and union features satisfy the +`FeatureSpec` protocol and flow through the same pipeline. + +## 2. Leaf utilities + +Two modules with no internal dependencies. Both serve multiple layers. + +### case_conversion.py + +Converts PascalCase to snake_case with two compiled regexes. `_ACRONYM_BOUNDARY` inserts +an underscore between an uppercase run and a capitalized word start: `HTMLParser` +becomes `HTML_Parser` becomes `html_parser`. `_CAMEL_BOUNDARY` inserts between +lowercase-or-digit and uppercase: `buildingPart` becomes `building_part`. +`to_snake_case` applies them in sequence and lowercases. + +`slug_filename` composes the conversion with a file extension. Every output file path in +the system passes through this function. + +```python +>>> slug_filename("HexColor") +'hex_color.md' +``` + +### docstring.py + +Distinguishes author-written docstrings from auto-generated ones. Both `Enum` and +`NewType` produce default docstrings that vary across Python versions. Rather than +hardcoding version-specific strings, the module creates temporary instances at import +time, captures their `__doc__` attributes, then deletes the instances: + +```python +class _DocstringProbeEnum(Enum): + pass + +_ENUM_DEFAULT_DOCSTRING = _DocstringProbeEnum.__doc__ +del _DocstringProbeEnum +``` + +`is_custom_docstring` compares a given docstring against these captured defaults and an +optional inherited docstring. The enum extractor uses this both at class level and +per-member, since `DocumentedEnum` members carry individual `__doc__` attributes. + +`clean_docstring` delegates to `inspect.cleandoc` and returns `None` for empty results. +`first_docstring_line` takes the first line only -- used by renderers that show +summaries. + +## 3. Type analysis + +This is the module the entire package exists to house. `analyze_type` takes a raw type +annotation and returns `TypeInfo` -- a flat dataclass that fully describes the unwrapped +type without any reference to Python's typing machinery. + +### The loop + +The function runs a single `while True` loop that peels layers in fixed order. Each +iteration handles one wrapper: + +**NewType** records names at two levels. The first NewType encountered becomes +`outermost_newtype_name` (the user-facing identity, e.g. "FeatureVersion"). Subsequent +NewTypes update `last_newtype_name` (the innermost, used for constraint provenance and +as the terminal `base_type`). The loop unwraps via `__supertype__` and continues. + +**Annotated** collects every metadata object as a `ConstraintSource`, tagging each with +whichever NewType was most recently entered. This is how constraint provenance survives: +when `int32`'s `Annotated` layer contributes `Field(ge=0)`, the constraint records +`source="int32"`. If a `FieldInfo` carries a description, the function captures it -- +first description wins, so the outermost NewType's documentation takes precedence. + +**Union** filters out `NoneType` (marks optional), `Sentinel` instances (Pydantic's +`` marker for undeclared defaults), and `Literal` sentinel arms (like +`Literal[""]` used alongside `HttpUrl`). If multiple concrete `BaseModel` subclasses +remain after filtering, the function classifies the type as `UNION` and returns +immediately with the member tuple. Non-BaseModel multi-type unions raise +`UnsupportedUnionError`. A single remaining arm continues the loop. + +The `Literal` filtering has a guard: when a union contains *only* Literal arms (like +`Optional[Literal["x"]]`), the function keeps them rather than filtering everything out. + +**list/dict** sets collection flags and continues into element types. Dict is the one +case where `analyze_type` recurses -- it calls itself for key and value types, storing +the results as nested `TypeInfo` objects. + +**Terminal** classification in `_classify_terminal` handles what remains after all +wrappers are peeled: `Any` becomes a PRIMITIVE, `Literal` returns with the literal value +(single-value only -- multi-value Literals get `literal_value=None`), `Enum` subclasses +become ENUM, `BaseModel` subclasses become MODEL, everything else becomes PRIMITIVE. + +### Concrete walkthroughs + +**Segment (union path).** `analyze_type` receives the `Annotated` alias. Iteration 1 +sees `Annotated` -- collects the `FieldInfo` with discriminator metadata as a +constraint, unwraps to `Union[RoadSegment, RailSegment, WaterSegment]`. Iteration 2 sees +the union. No `None` arm, no sentinels. Three concrete `BaseModel` subclasses remain -- +the function classifies the type as `UNION` and returns immediately: `kind=UNION`, +`union_members=(RoadSegment, RailSegment, WaterSegment)`, `base_type="RoadSegment"` (the +first member). Two iterations, done. The union members are raw type objects, not +recursively analyzed -- callers that need field details call `extract_model` on each +member separately. + +**FeatureVersion (NewType chain path).** `FeatureVersion = NewType("FeatureVersion", +int32)` where `int32 = NewType("int32", Annotated[int, Field(ge=0, le=2147483647)])`. + +Iteration 1 sees `FeatureVersion`. It's a NewType -- record +`outermost_newtype_name="FeatureVersion"`, unwrap to `int32`, continue. Iteration 2 sees +`int32`. Also a NewType -- update `last_newtype_name="int32"`, unwrap to `Annotated[int, +Field(ge=0, ...)]`, continue. Iteration 3 sees `Annotated`. Collect +`ConstraintSource(source="int32", constraint=)`, unwrap to `int`. The +loop breaks on `int` (not a NewType, not Annotated, not a union, not a container). +`_classify_terminal` returns a `TypeInfo` with `base_type="int32"`, +`newtype_name="FeatureVersion"`, `kind=PRIMITIVE`, and a constraint tuple recording the +provenance chain. + +The two paths demonstrate the function's range. Segment exits early on the union branch +with member types for downstream extraction. FeatureVersion runs the full loop through +NewType and Annotated layers, accumulating constraint provenance that survives to +rendering. + +### _UnwrapState + +The accumulator dataclass carries state across iterations: optional/list/dict flags, the +constraint list, both NewType name slots, and the captured description. Its +`build_type_info` method assembles the final `TypeInfo` from accumulated state, freezing +the constraint list into a tuple. + +### walk_type_info + +A shared visitor that recurses into dict key/value `TypeInfo` children. Both type +collection and reverse reference computation use it rather than duplicating the descent +pattern. Union members are raw `type` objects (not `TypeInfo` instances), so callers +handle them directly. + +## 4. Data structures + +`specs.py` defines the vocabulary shared between extraction and rendering. Every spec is +a dataclass with no methods beyond field access and, in `UnionSpec`'s case, one cached +property. + +**FieldSpec** represents one model field: alias-resolved name, `TypeInfo`, description, +required flag. Two fields populated later by tree expansion: `model` (a reference to the +nested `ModelSpec` for MODEL-kind fields) and `starts_cycle` (true when following this +field's model would create a cycle in the ancestor chain). + +**ModelSpec** represents one Pydantic model: class name, cleaned docstring, fields in +documentation order, source class reference, the entry point string that located it, and +model-level constraints from decorators like `@require_any_of`. + +**UnionSpec** represents a discriminated union type alias. Segment's `UnionSpec` carries +`members=[RoadSegment, RailSegment, WaterSegment]`, `discriminator_field="subtype"`, and +`common_base=TransportationSegment`. Its `annotated_fields` list pairs each `FieldSpec` +with `variant_sources` -- a tuple of class names indicating which union members +contribute that field, or `None` for fields from `TransportationSegment` shared across +all members. The `fields` cached property unwraps this for code that doesn't need +provenance. `UnionSpec` uses `eq=False` because it contains mutable lists and a +`cached_property` -- dataclass-generated `__eq__` would be unreliable. + +**FeatureSpec** is a `Protocol` satisfied by both `ModelSpec` and `UnionSpec`. This is +the pipeline's unifying abstraction. Tree expansion, type collection, rendering +dispatch, and example loading all operate on `FeatureSpec` without knowing which +concrete type they hold. + +**EnumSpec** and **EnumMemberSpec** serve enums. **NewTypeSpec** serves NewTypes. +**PrimitiveSpec** serves numeric primitives with an `Interval` for bounds and optional +`float_bits`. + +**SupplementarySpec** is the union type alias `EnumSpec | NewTypeSpec | ModelSpec` -- +the set of non-feature types that need their own output pages. `PrimitiveSpec` and +geometry types are excluded because they render on aggregate pages rather than +individual ones. + +### Classification functions + +Three functions at the bottom of `specs.py` classify discovery results. `is_model_class` +is a `TypeGuard` that checks `isinstance(obj, type) and issubclass(obj, BaseModel)`. +`is_union_alias` calls `analyze_type` and checks for `UNION` kind -- the only place +outside the type analyzer that touches Python type annotations. `filter_model_classes` +applies the model guard across the discovery dict's values. + +## 5. Type registry + +Maps type names to per-target display strings. `PRIMITIVE_TYPES` contains 15 entries: +four signed integer widths, three unsigned, two floats, `str`/`bool`, two Python builtin +aliases (`int` maps to `int64`, `float` maps to `float64`), and two geometry types +(`Geometry`, `BBox`). Each maps to a `TypeMapping` with a `markdown` field. + +`is_semantic_newtype` answers a question: does this NewType deserve its own +documentation page? The function returns true when the outermost name differs from the +base type (`FeatureVersion` wrapping `int32`) or when the base type has no registry +entry (`HexColor` wrapping `str` via constraints). It returns false for registered +primitives (`int32` wrapping `int`) -- those are the type system's building blocks, not +user-facing concepts. + +`resolve_type_name` looks up the registry by `base_type`, tries `source_type.__name__` +when the first lookup fails, and falls back to `base_type` as a last resort. Semantic +NewTypes wrapping unregistered classes (like `Sources` wrapping `SourceItem`) use the +underlying class name rather than the NewType alias -- `source_type.__name__` takes +precedence. + +## 6. Model extraction + +`extract_model` converts a Pydantic `BaseModel` subclass into a `ModelSpec`. + +### Field ordering + +Documentation order differs from Python declaration order. `_class_order` produces the +MRO-aware sequence: for single inheritance, reversed MRO puts base class fields first +and derived fields last. For multiple inheritance, the primary chain (first base) comes +first, then the class's own fields, then mixin fields. This matches how a reader +encounters the model -- shared structure before specialization. + +`_field_order` walks the class hierarchy produced by `_class_order` and collects +`__annotations__` keys, deduplicating as it goes. + +### Field extraction + +For each field, the extractor resolves the alias chain (`validation_alias` > `alias` > +Python name via `resolve_field_alias`), calls `analyze_type` on `field_info.annotation`, +and builds a `FieldSpec`. The extractor uses `field_info.annotation` rather than +`get_type_hints()` because the latter returns unresolved TypeVars for generic base +classes. + +One subtlety: Pydantic strips the `Annotated` wrapper from some fields and moves the +metadata to `field_info.metadata`. When this happens, `analyze_type` sees a bare type +and misses the constraints. `_merge_field_metadata` patches them back in, tagging them +with `source=None` since they came from the field's own annotation rather than a NewType +chain. + +Model-level constraints come from `ModelConstraint.get_model_constraints(model_class)`, +which inspects decorators like `@require_any_of` and `@require_if`. + +### Tree expansion + +`expand_model_tree` is the recursive step that populates `FieldSpec.model` references. +It maintains a shared cache keyed by Python class and an ancestor set for cycle +detection. + +The cache insert happens *before* recursion. Without this ordering, a back-edge +encounter would find no cached entry and infinite-loop instead of marking +`starts_cycle=True`. The sequence: extract the sub-model, insert it into the cache, then +recurse into its fields. Shared references (the same sub-model used in multiple fields) +reuse the cached `ModelSpec` without marking cycles. + +Union-kind fields skip inline expansion -- they appear as a single row in the output, +linking to their members, rather than expanding inline. + +## 7. Other extractors + +### Enum extraction + +`extract_enum` iterates members, checking `is_custom_docstring` for both class-level and +per-member descriptions. `DocumentedEnum` members carry `__doc__` attributes that the +extractor preserves. The class-level docstring is passed as `inherited_doc` to the +per-member check, so members that inherit the class docstring verbatim get +`description=None`. + +### NewType extraction + +`extract_newtype` calls `analyze_type` on the NewType callable and extracts the custom +docstring. When the NewType has no explicit docstring, it falls back to +`TypeInfo.description` -- the first `Field.description` found in the `Annotated` +metadata chain. + +### Union extraction + +The most involved extractor. Walk through `Segment` concretely. + +`extract_union("Segment", annotation)` calls `analyze_type` on the +`Annotated[Union[RoadSegment, RailSegment, WaterSegment], ...]` alias. The analyzer +returns `kind=UNION` with the three member types. + +Next, `_find_common_base` intersects each member's filtered MRO (BaseModel subclasses +only, excluding `BaseModel` itself). All three share `TransportationSegment` in their +MRO. The function picks the most-derived class in the intersection -- the one whose +worst-case MRO distance is smallest. `TransportationSegment` wins: it is the direct +parent of all three members. + +The extractor calls `extract_model(TransportationSegment)` to get the shared field set. +Fields like `id`, `geometry`, `version`, `sources`, and `subtype` appear in the common +base. These become shared `AnnotatedField` entries with `variant_sources=None`. + +Then it extracts each member: `RoadSegment`, `RailSegment`, `WaterSegment`. Fields not +in the shared set are variant-specific, deduplicated by `(name, type_identity)` where +`type_identity` captures `base_type`, `kind`, `is_optional`, and `is_list`. If +`RoadSegment` and `WaterSegment` both define a `width` field with the same type +identity, the `AnnotatedField` accumulates both class names: +`variant_sources=("RoadSegment", "WaterSegment")`. Fields unique to one member get a +single-element tuple. + +`extract_discriminator` inspects the `Annotated` metadata for a `FieldInfo` with a +discriminator attribute. For Segment, it finds `subtype` and builds the mapping: +`{"road": RoadSegment, "rail": RailSegment, "water": WaterSegment}` by checking each +member for single-value `Literal` fields on the discriminator. + +### Primitive extraction + +`partition_primitive_and_geometry_names` reads a module's `__all__` exports. NewType +exports are numeric primitives; non-constraint class exports are geometry types. + +`extract_primitives` builds `PrimitiveSpec` objects. For each primitive name it resolves +the object from the module, calls `extract_newtype` for the type analysis, then extracts +numeric bounds from constraints. `extract_numeric_bounds` scans constraint objects for +`ge`/`gt`/`le`/`lt` attributes and packs them into an `Interval`. + +## 8. Constraint prose + +Two modules convert constraint objects into human-readable text. + +### Field constraints + +`field_constraint_description.py` pattern-matches constraint types. `Interval` renders +as `lower <= x <= upper` using Unicode comparison operators. Single-bound constraints +(`Ge`, `Gt`, `Le`, `Lt`) render as `>= value` or `< value`. Length constraints +(`MinLen`, `MaxLen`) render as `minimum length: N`. `GeometryTypeConstraint` lists +allowed geometry types by name, converting snake_case values to PascalCase. `Reference` +describes the relationship and target model, using an optional `link_fn` to produce +markdown links. + +Opaque constraints -- classes that inherit `object.__repr__` without customization -- +render as their class name plus docstring. When a regex pattern attribute exists, the +prose includes it. + +`constraint_display_text` is the top-level entry point. It checks whether the constraint +is opaque and has a docstring, and if so, produces a composite description combining the +docstring, class name, and pattern. Otherwise it delegates to +`describe_field_constraint`. + +### Model constraints + +`model_constraint_description.py` handles model-level constraints from decorators. +`analyze_model_constraints` returns two things in one pass: a list of section-level +descriptions and a dict mapping field names to the constraint descriptions that +reference them. + +The module consolidates related conditionals. Three `require_if` constraints with the +same target fields but different trigger values merge into "when X is one of: a, b, c" +instead of three separate bullets. `_consolidation_key` groups constraints by `(type, +field_names, condition_field_name)`. Groups with one member render normally; groups with +multiple members produce consolidated prose. + +`NoExtraFieldsConstraint` is silently skipped -- it is a structural validation rule, not +something a documentation reader acts on. + +## 9. Module layout + +Translates Python module paths into output directory paths. `compute_schema_root` finds +the longest common dotted prefix across all entry point module paths. Given paths like +`overture.schema.buildings`, `overture.schema.places`, and +`overture.schema.transportation`, the root is `overture.schema`. For a single unique +path, it drops the last component. + +`compute_output_dir` mirrors the remaining package structure after stripping the root. +Packages (directories with `__path__` per PEP 302) keep all components. File modules +drop their last component, since the `.py` filename adds no useful structure. +`is_package_module` checks `sys.modules` for `__path__` to make this distinction. + +The entry point string `overture.schema.buildings:Building` encodes both module and +class. `entry_point_module` extracts the module path, `entry_point_class` extracts the +class name. `output_dir_for_entry_point` composes these to produce the output directory +for a feature. + +## 10. Supplementary type collection + +`collect_all_supplementary_types` walks the expanded field trees of all feature specs to +discover every referenced type that needs its own output page: enums, semantic NewTypes, +and sub-models. + +The walk maintains a visited set for models and a feature name set for skip detection. +Types that are themselves top-level features get skipped. For UNION-kind fields, the +function extracts and walks each member's fields. For semantic NewTypes, it walks the +`__supertype__` chain to collect intermediate NewTypes -- `Id` wraps +`NoWhitespaceString` wraps `str`, and both `Id` and `NoWhitespaceString` get their own +pages. The `walk_type_info` visitor handles dict key/value recursion. + +MODEL-kind fields follow `field_spec.model` references that were populated by +`expand_model_tree`. The function raises `RuntimeError` if it encounters a MODEL-kind +field with `model=None` -- a guard against calling collection before tree expansion. + +A single field matches multiple conditions independently. A semantic NewType wrapping a +MODEL-kind type triggers both NewType extraction and model collection. The checks use +independent `if` statements, not `elif`. + +## 11. Path assignment + +`build_placement_registry` builds the complete mapping from type names to output file +paths. Three tiers: + +Aggregate pages come first. All numeric primitives point to +`system/primitive/primitives.md`. All geometry types point to +`system/primitive/geometry.md`. These are hardcoded paths since the types share a single +reference page. + +Feature specs get individual pages. Output directories derive from +`output_dir_for_entry_point`. Filenames use `slug_filename`. + +Supplementary specs get module-derived paths from `source_type.__module__`. When a +supplementary type's output directory falls under a feature directory, +`_nest_under_types` inserts a `types/` segment. Without this insertion, an enum defined +in `overture.schema.buildings` would land alongside the Building feature page. With it, +the enum lands in `buildings/types/` -- preventing supplementary type pages from +cluttering feature directories. + +`_nest_under_types` sorts feature directories by path length (descending) before +checking containment, so the most specific match wins. + +## 12. Links and reverse references + +### Link computation + +`LinkContext` carries the current page's output path and the full type-to-path registry. +When a renderer formats a type reference, it calls `resolve_link` to compute a relative +path from the current page to the target. Types without registry entries return `None`, +telling renderers to show inline code instead of a broken link. `resolve_link_or_slug` +provides a fallback when a link is required regardless. + +`relative_link` computes `../` navigation between any two paths in the output tree. It +finds the common prefix of directory components, counts the levels up from the source +directory, and descends into the target. Both paths must be normalized -- the function +rejects `..` components to prevent path traversal surprises. + +### Reverse references + +`compute_reverse_references` walks all feature fields and supplementary specs to build +`dict[str, list[UsedByEntry]]`. Each entry maps a type name to the list of types that +reference it. Entries sort models before NewTypes, alphabetical within each group. + +The function tracks references with sets for deduplication, then sorts into lists at the +end. It skips self-references and references to types not in the supplementary spec dict +(features don't need "used by" sections since they are the entry points). + +NewType specs register additional references from their constraint sources. If `Id` +inherits a constraint from `NoWhitespaceString`, the reverse reference captures that +`Id` uses `NoWhitespaceString` -- even though the relationship is through constraint +provenance rather than direct field reference. + +## 13. Markdown type formatting + +`markdown_type_format.py` converts `TypeInfo` into display strings for markdown output. + +`format_type` handles the full range of field types. Single-value Literals render as +`"value"` in backticks. Semantic NewTypes and enums/models get markdown links via +`_resolve_type_link`, which checks the `LinkContext` registry and falls back to plain +code spans. Lists of linked types wrap with `` `list<` `` broken-backtick syntax when +the inner type carries a link. Dict types render as `` `map` ``. Qualifiers +(optional, list, map) append in parentheses. + +Union members format independently -- each gets its own link resolution, joined with +pipe separators escaped for table-cell safety. + +`format_underlying_type` handles NewType page headers. It links enums and models that +have their own pages but skips the outermost NewType name to avoid self-referencing. The +function uses `source_type.__name__` rather than `base_type` for link resolution, since +`base_type` may carry the outermost NewType name when only one NewType wraps a class. + +## 14. Markdown rendering + +`markdown_renderer.py` is the template driver. + +### Templates + +Six Jinja2 templates in `templates/markdown/`. `feature.md.jinja2` renders a field table +with Name, Type, and Description columns, an optional Constraints section, an optional +Examples section, and a "Used By" partial. `enum.md.jinja2` renders a bullet list of +values. `newtype.md.jinja2` shows underlying type and constraints with provenance links. +`primitives.md.jinja2` and `geometry.md.jinja2` render aggregate reference pages. +`_used_by.md.jinja2` is an included partial. + +The Jinja2 environment registers `linkify_urls` as a filter, which wraps bare URLs in +markdown link syntax. The filter uses a two-pass approach: extract code spans first (to +avoid modifying URLs inside backticks), linkify the remaining text, then restore code +spans. + +### Field expansion + +`render_feature` dispatches on spec type. `ModelSpec` gets `_expand_model_fields`, which +walks the pre-populated `FieldSpec.model` tree and produces dot-notation rows. +`sources[0].dataset` appears as a single row in the flat field table, with the list +suffix `[]` appended to list-of-model fields. Expansion stops at fields marked with +`starts_cycle`. + +`UnionSpec` gets `_expand_union_fields`, which adds italic variant tags to +variant-specific fields. For Segment, shared fields from `TransportationSegment` (like +`id`, `geometry`, `sources`) render as plain rows. Variant-specific fields get tagged: +`_short_variant_name` strips the union name suffix, so `RoadSegment` becomes `Road`, +`WaterSegment` becomes `Water`. A field present in two of three members renders as `` +`width` *(Road, Water)* ``. Shared fields render without tags. + +### Constraint annotation + +Field-level constraints from the field's own annotation (not inherited from NewType +chains) annotate the field's description cell as italic text. The distinction matters: +constraints with `source=None` came from the field itself, while constraints with a +named source live on the NewType's own page. + +Model-level constraints annotate top-level field rows (those without dot-notation +prefixes) using the `field_notes` dict from `analyze_model_constraints`. + +### Example formatting + +Example values render in backticks for monospace consistency. Booleans use +`true`/`false` (not Python's `True`/`False`). `None` renders as `null`. Long values +truncate at 100 characters. Lists and dicts use compact bracket/brace notation. + +### Aggregate pages + +`render_primitives_from_specs` sorts primitives by bit-width key (prefix then numeric +width), groups into signed integers, unsigned integers, and floats, and formats ranges. +Integer ranges show both bounds as a compact "lower to upper" form; `int64`-scale bounds +use `2^63` notation for readability. `render_geometry_from_values` produces a +comma-separated backtick list. + +## 15. Example loader + +Loads example data from theme `pyproject.toml` files and validates it against the +schema. + +`resolve_pyproject_path` walks up from a model's module file to find `pyproject.toml`. +`load_examples_from_toml` reads the `[examples.ModelName]` TOML section. + +Validation requires three preprocessing steps that handle TOML's limitations and +flat-schema conventions. + +TOML has no null literal, so examples use the string `"null"` as a stand-in. `_denull` +replaces these recursively, walking nested dicts and lists. + +Literal fields (like `theme="buildings"`) are omitted from examples since they carry +constant values. `_inject_literal_fields` adds them back before validation by scanning +`model_fields` for single-value `Literal` annotations via `single_literal_value`. + +Discriminated union examples from flat parquet schemas include null fields from +non-selected variant arms. `_strip_null_unknown_fields` removes null-valued fields not +in the common base's field set, so the selected arm's validator accepts the data without +choking on fields that belong to sibling variants. + +`flatten_example` converts nested dicts to dot-notation. Nested dicts become +`parent.child`, lists of dicts become `parent[0].child`. `order_example_rows` sorts by +field position in the documentation's field order using a stable sort, so sub-fields +maintain their original relative order. + +`load_examples` orchestrates the full flow: find the pyproject.toml, load the TOML +section, validate each example, flatten, and order. Invalid examples log a warning and +skip rather than failing the pipeline. + +## 16. Orchestration and CLI + +### The pipeline + +`generate_markdown_pages` in `markdown_pipeline.py` is the "main" function. It takes +feature specs and a schema root, returns rendered pages without touching the filesystem. +Eight steps: + +1. **Expand model trees** with a shared cache across all features, so sub-models + referenced by multiple features extract once. + +2. **Partition primitive and geometry names** from the system primitive module's + `__all__` exports. + +3. **Collect supplementary types** by walking expanded feature trees. + +4. **Build the placement registry** mapping every type to its output file path. + +5. **Compute reverse references** across all features and supplements. + +6. **Render each feature** with its `LinkContext`, loaded examples, and used-by entries. + +7. **Render each supplementary type** -- dispatching to `render_enum`, `render_newtype`, + or `render_feature` (for sub-models) based on spec type. + +8. **Render aggregate pages** for primitives and geometry. + +The return value is `list[RenderedPage]` -- frozen dataclasses carrying content, output +path, and a boolean `is_feature` flag. The caller decides what to do with them. + +### The CLI + +`cli.py` is a thin Click wrapper. The `generate` command discovers models, computes +schema root from *all* entry points (before any theme filtering), classifies each entry +as model or union via `is_model_class` and `is_union_alias`, extracts specs, calls the +pipeline, and writes output. + +Schema root computation uses all entry points deliberately. Theme filtering narrows +which features appear in the output, but the directory structure must remain stable +regardless of which themes are selected. Computing the root from filtered paths would +shift output directories when themes change. + +Feature pages get Docusaurus frontmatter (`sidebar_position: 1`) prepended. The CLI +generates `_category_.json` files for sidebar navigation, assigning positions +alphabetically with feature directories first. + +The `list` command prints sorted model names -- a diagnostic tool for verifying which +models the entry point system discovers. + +--- + +## Top-down trace: Segment through the pipeline + +A reader who reached this point has seen every module in isolation. This section follows +`Segment` from discovery to rendered markdown, showing how the pieces compose. + +**Discovery.** The CLI calls `discover_models()`. The entry point +`overture:transportation:segment` loads `overture.schema.transportation:Segment` -- the +`Annotated[Union[...]]` alias. `Segment` lands in the return dict keyed by +`ModelKey(namespace="overture", theme="transportation", type="segment", +entry_point="overture.schema.transportation:Segment")`. + +**Classification.** The CLI tests each entry. `is_model_class(Segment)` returns false -- +`Segment` is not a class. `is_union_alias(Segment)` calls `analyze_type`, which peels +the `Annotated` wrapper and finds three `BaseModel` subclasses in the union. The +analyzer returns `kind=UNION`. The CLI routes Segment to `extract_union`. + +**Extraction.** `extract_union("Segment", annotation)` calls `analyze_type` again (cheap +-- the same two-iteration path), gets the three member types, and finds +`TransportationSegment` as the common base via `_find_common_base`. It extracts the +common base's fields as shared, then extracts each member's fields and partitions the +non-shared ones into `AnnotatedField` entries with variant provenance. +`extract_discriminator` finds `subtype` and builds `{"road": RoadSegment, "rail": +RailSegment, "water": WaterSegment}`. The result is a `UnionSpec` satisfying +`FeatureSpec`. + +Meanwhile, concrete models like `Building` go through `extract_model`, which calls +`analyze_type` on each field annotation. A field typed `FeatureVersion` unwraps through +two NewType layers and an `Annotated` layer, producing a `TypeInfo` with +`base_type="int32"`, `newtype_name="FeatureVersion"`, and constraint provenance linking +`ge=0` back to the `int32` NewType. Both extraction paths produce specs satisfying +`FeatureSpec`. + +**Pipeline entry.** The feature specs enter `generate_markdown_pages`. +`expand_model_tree` walks MODEL-kind fields on Segment's `UnionSpec` and populates +`FieldSpec.model` references. The shared cache ensures sub-models referenced by multiple +features (like `Sources`) extract once. Union-kind fields skip inline expansion. + +**Layout.** `partition_primitive_and_geometry_names` reads the system module's exports. +`collect_all_supplementary_types` walks Segment's expanded fields and discovers +referenced enums (like `Subtype`), semantic NewTypes (like `Id`, `Sources`), and +sub-models. The walk follows `FieldSpec.model` references down the tree, and for +UNION-kind fields, extracts and walks each member's fields separately. + +`build_placement_registry` assigns Segment's output path from its entry point: +`entry_point_module` extracts `overture.schema.transportation`, `compute_output_dir` +strips the schema root and mirrors the remaining structure. Supplementary types get +module-derived paths with `types/` inserted under feature directories. + +**Reverse references.** `compute_reverse_references` walks Segment's fields and records +that Segment references `Subtype`, `Id`, `Sources`, and other types. These references +populate "Used By" sections: the `Subtype` enum page shows that Segment uses it. + +**Rendering.** The pipeline builds a `LinkContext` from Segment's output path and the +full registry. `render_feature` dispatches to `_expand_union_fields` because the spec is +a `UnionSpec`. Shared fields from `TransportationSegment` render as plain rows. +Variant-specific fields get italic tags: `` `road_class` *(Road)* ``. The renderer +formats each field's type via `format_type`, which resolves links through the +`LinkContext` -- `Subtype` gets a relative link to its enum page, `Id` links to its +NewType page. Constraints with `source=None` annotate field rows; constraints with named +sources appear on the source NewType's page instead. + +The example loader finds `pyproject.toml` in the transportation theme package, reads +`[examples.Segment]`, validates each example against the union alias (injecting literal +fields, stripping null fields from non-selected arms), flattens to dot-notation, and +orders by field position. + +The Jinja2 template assembles the field table, optional constraints section, examples, +and "Used By" partial into markdown. + +**Output.** The pipeline returns a `RenderedPage` with Segment's content, its output +path, and `is_feature=True`. The CLI prepends Docusaurus frontmatter and writes the +file. `_category_.json` files get generated for sidebar navigation. + +**The layering principle.** At every stage, the modules that do the work never reach +back up the dependency chain. Renderers consume specs and registries but never import +extractors. Extractors consume `analyze_type` but never import renderers. The type +analyzer imports nothing from the codegen package except `clean_docstring`. Any module +can be understood, tested, and modified by reading only the modules below it. From 75ce84c783fa7509e3625818a9df8ddb82a935f6 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Fri, 27 Feb 2026 16:11:59 -0800 Subject: [PATCH 21/38] fix(codegen): store all Literal args in TypeInfo TypeInfo.literal_value discarded multi-value Literals entirely (Literal["a", "b"] got None). Renamed to literal_values as a tuple of all args so consumers decide presentation. single_literal_value() preserves its contract: returns the value for single-arg Literals, None otherwise. Callers (example_loader, union_extraction) are unchanged. Multi-value Literals render as pipe-separated quoted values in markdown tables: `"a"` \| `"b"`. --- .../docs/walkthrough.md | 2 +- .../schema/codegen/markdown_type_format.py | 6 ++-- .../overture/schema/codegen/type_analyzer.py | 27 +++++++++--------- .../tests/codegen_test_support.py | 2 +- .../tests/test_markdown_type_format.py | 5 ++++ .../tests/test_type_analyzer.py | 28 +++++++++++-------- .../tests/test_type_placement.py | 2 +- 7 files changed, 43 insertions(+), 29 deletions(-) diff --git a/packages/overture-schema-codegen/docs/walkthrough.md b/packages/overture-schema-codegen/docs/walkthrough.md index 89fdc2f74..688c6c29e 100644 --- a/packages/overture-schema-codegen/docs/walkthrough.md +++ b/packages/overture-schema-codegen/docs/walkthrough.md @@ -3,7 +3,7 @@ Pydantic's serialization machinery destroys the vocabulary that documentation needs. The codegen recovers it. -Consider the transportation schema's `Segment` type -- a discriminated union of +Consider the transportation theme's `Segment` type -- a discriminated union of `RoadSegment`, `RailSegment`, and `WaterSegment`. All three share fields inherited from `TransportationSegment`. Each adds variant-specific fields. The discriminator field `subtype` carries a `Literal` value (`"road"`, `"rail"`, `"water"`) that selects the diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py index 0879e78fb..e5e0e9069 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py @@ -90,8 +90,10 @@ def format_type( ti = field.type_info qualifiers: list[str] = [] - if ti.kind == TypeKind.LITERAL and ti.literal_value is not None: - return f'`"{ti.literal_value}"`' + if ti.kind == TypeKind.LITERAL and ti.literal_values: + if len(ti.literal_values) == 1: + return f'`"{ti.literal_values[0]}"`' + return r" \| ".join(f'`"{v}"`' for v in ti.literal_values) link_name = _linked_type_name(ti) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py index 495fc7faa..65a91230c 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py @@ -60,7 +60,7 @@ class TypeInfo: dict_key_type: TypeInfo | None = None dict_value_type: TypeInfo | None = None constraints: tuple[ConstraintSource, ...] = () - literal_value: object | None = None + literal_values: tuple[object, ...] | None = None source_type: type | None = None newtype_name: str | None = None newtype_ref: object | None = None @@ -127,7 +127,7 @@ def build_type_info( *, base_type: str, kind: TypeKind, - literal_value: object | None = None, + literal_values: tuple[object, ...] | None = None, source_type: type | None = None, union_members: tuple[type[BaseModel], ...] | None = None, ) -> TypeInfo: @@ -140,7 +140,7 @@ def build_type_info( dict_key_type=self.dict_key_type, dict_value_type=self.dict_value_type, constraints=tuple(self.constraints), - literal_value=literal_value, + literal_values=literal_values, source_type=source_type, newtype_name=self.outermost_newtype_name, newtype_ref=self.outermost_newtype_ref, @@ -271,18 +271,13 @@ def _classify_terminal(annotation: object, state: _UnwrapState) -> TypeInfo: kind=TypeKind.PRIMITIVE, ) - # Literal types (e.g., Literal["value"]) + # Literal types (e.g., Literal["value"] or Literal["a", "b"]) if get_origin(annotation) is Literal: args = get_args(annotation) - # Only expose literal_value for single-value Literals, which - # represent fixed constants (theme="buildings"). Multi-value - # Literals (Literal["a", "b"]) are enum-like and have no - # single default. - value = args[0] if len(args) == 1 else None return state.build_type_info( base_type="Literal", kind=TypeKind.LITERAL, - literal_value=value, + literal_values=tuple(args), ) if not isinstance(annotation, type): @@ -315,12 +310,18 @@ def single_literal_value(annotation: object) -> object | None: """Extract a single literal value from a type annotation, or None. Delegates to analyze_type for all unwrapping, then checks - whether the result is a single-value Literal. + whether the result is a single-value Literal. Multi-value + Literals return None — callers needing all values should use + ``analyze_type`` and read ``literal_values`` directly. """ try: ti = analyze_type(annotation) except (TypeError, UnsupportedUnionError): return None - if ti.kind == TypeKind.LITERAL: - return ti.literal_value + if ( + ti.kind == TypeKind.LITERAL + and ti.literal_values + and len(ti.literal_values) == 1 + ): + return ti.literal_values[0] return None diff --git a/packages/overture-schema-codegen/tests/codegen_test_support.py b/packages/overture-schema-codegen/tests/codegen_test_support.py index 1dbc6549d..0e845edcf 100644 --- a/packages/overture-schema-codegen/tests/codegen_test_support.py +++ b/packages/overture-schema-codegen/tests/codegen_test_support.py @@ -273,7 +273,7 @@ def assert_literal_field( """Assert a field is a single-value Literal with the expected value.""" field = find_field(spec, field_name) assert field.type_info.kind == TypeKind.LITERAL - assert field.type_info.literal_value == expected_value + assert field.type_info.literal_values == (expected_value,) def flat_specs_from_discovery( diff --git a/packages/overture-schema-codegen/tests/test_markdown_type_format.py b/packages/overture-schema-codegen/tests/test_markdown_type_format.py index 7692d06b1..53a9a9463 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_type_format.py +++ b/packages/overture-schema-codegen/tests/test_markdown_type_format.py @@ -42,6 +42,11 @@ def test_literal_renders_as_quoted_value(self) -> None: field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) assert format_type(field) == '`"places"`' + def test_multi_value_literal_renders_comma_separated(self) -> None: + ti = analyze_type(Literal["a", "b", "c"]) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + assert format_type(field) == '`"a"` \\| `"b"` \\| `"c"`' + def test_enum_without_context_renders_as_code(self) -> None: class Color(str, Enum): RED = "red" diff --git a/packages/overture-schema-codegen/tests/test_type_analyzer.py b/packages/overture-schema-codegen/tests/test_type_analyzer.py index a858e3f9c..065917e23 100644 --- a/packages/overture-schema-codegen/tests/test_type_analyzer.py +++ b/packages/overture-schema-codegen/tests/test_type_analyzer.py @@ -20,6 +20,7 @@ SnakeCaseString, ) from pydantic import BaseModel, Field, Tag +from typing_extensions import Sentinel @pytest.fixture() @@ -56,8 +57,6 @@ class TestAnalyzeTypeSentinel: @pytest.fixture() def missing_sentinel(self) -> object: - from typing_extensions import Sentinel - return Sentinel("MISSING") def test_sentinel_filtered_from_union(self, missing_sentinel: object) -> None: @@ -199,26 +198,33 @@ def test_annotated_without_constraints(self) -> None: class TestAnalyzeTypeLiteral: """Tests for Literal type analysis.""" - def test_literal_string_extracts_value(self) -> None: - """Literal["value"] returns TypeInfo with literal_value="value".""" + def test_literal_string_extracts_values(self) -> None: + """Literal["active"] stores the value in literal_values tuple.""" result = analyze_type(Literal["active"]) assert result.kind == TypeKind.LITERAL - assert result.literal_value == "active" + assert result.literal_values == ("active",) - def test_literal_int_extracts_value(self) -> None: - """Literal[42] returns TypeInfo with literal_value=42.""" + def test_literal_int_extracts_values(self) -> None: + """Literal[42] stores the value in literal_values tuple.""" result = analyze_type(Literal[42]) assert result.kind == TypeKind.LITERAL - assert result.literal_value == 42 + assert result.literal_values == (42,) + + def test_multi_value_literal_stores_all_args(self) -> None: + """Literal["a", "b"] stores all args in literal_values tuple.""" + result = analyze_type(Literal["a", "b"]) + + assert result.kind == TypeKind.LITERAL + assert result.literal_values == ("a", "b") - def test_optional_literal_extracts_value(self) -> None: + def test_optional_literal_extracts_values(self) -> None: """Optional[Literal["x"]] unwraps to Literal with is_optional set.""" result = analyze_type(Literal["x"] | None) assert result.kind == TypeKind.LITERAL - assert result.literal_value == "x" + assert result.literal_values == ("x",) assert result.is_optional is True @@ -569,7 +575,7 @@ def test_optional_union_sets_is_optional(self) -> None: class TestSingleLiteralValue: - """Tests for single_literal_value extraction.""" + """Tests for single_literal_value convenience accessor.""" def test_single_value_literal(self) -> None: """Literal["x"] returns the literal value.""" diff --git a/packages/overture-schema-codegen/tests/test_type_placement.py b/packages/overture-schema-codegen/tests/test_type_placement.py index 17661daf1..1cb0d7e3a 100644 --- a/packages/overture-schema-codegen/tests/test_type_placement.py +++ b/packages/overture-schema-codegen/tests/test_type_placement.py @@ -22,6 +22,7 @@ SupplementarySpec, ) from overture.schema.codegen.type_collection import collect_all_supplementary_types +from pydantic import BaseModel _PRIMITIVE_NAMES, _GEOMETRY_NAMES = partition_primitive_and_geometry_names( _system_primitive @@ -144,7 +145,6 @@ class TestPlacementWithUnionSpec: def test_union_spec_gets_placement(self) -> None: """UnionSpec is placed alongside ModelSpec in the registry.""" - from pydantic import BaseModel class Base(BaseModel): name: str From fed46cf2938daac558aa82c0dfbc85da8043c9c3 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Sun, 1 Mar 2026 20:45:06 -0800 Subject: [PATCH 22/38] fix(codegen): track nested list depth in TypeInfo Replace TypeInfo.is_list: bool with list_depth: int so nested lists like list[NewType("Hierarchy", list[HierarchyItem])] are handled correctly. analyze_type increments list_depth for each list[...] layer instead of setting a boolean. An is_list property preserves the boolean API for depth-unaware consumers. Markdown renderer: format_type and format_underlying_type wrap list_depth times. _expandable_list_suffix returns "[]" per nesting level for dot-notation expansion. Constraint annotation matching strips all trailing "[]" suffixes instead of one. Union extraction: _type_identity uses list_depth (int) instead of is_list (bool) so fields with different nesting depths don't incorrectly deduplicate. Update design doc and walkthrough to reflect list_depth replacing the is_list boolean throughout TypeInfo, _UnwrapState, type formatting, and union deduplication. --- .../overture-schema-codegen/docs/design.md | 9 +++---- .../docs/walkthrough.md | 20 +++++++++------- .../schema/codegen/markdown_renderer.py | 6 ++--- .../schema/codegen/markdown_type_format.py | 24 +++++++++++++------ .../overture/schema/codegen/type_analyzer.py | 13 ++++++---- .../schema/codegen/union_extraction.py | 4 ++-- .../tests/test_markdown_type_format.py | 7 +++++- .../tests/test_type_analyzer.py | 22 +++++++++++++++++ .../tests/test_type_registry.py | 6 ++--- 9 files changed, 79 insertions(+), 32 deletions(-) diff --git a/packages/overture-schema-codegen/docs/design.md b/packages/overture-schema-codegen/docs/design.md index d52ec931e..60be38223 100644 --- a/packages/overture-schema-codegen/docs/design.md +++ b/packages/overture-schema-codegen/docs/design.md @@ -119,13 +119,14 @@ layers in a fixed order, accumulating information into an `_UnwrapState`: 3. **Union**: Filters out `None` (marks optional), `Sentinel`, and `Literal` sentinel arms. If multiple concrete `BaseModel` arms remain, classifies as `UNION`; otherwise continues with the single remaining arm -4. **list / dict**: Sets collection flags, continues into element types +4. **list / dict**: Increments `list_depth` for each `list[...]` layer, sets dict flags, + continues into element types 5. **Terminal**: Classifies as `PRIMITIVE`, `LITERAL`, `ENUM`, `MODEL`, or `UNION` The result is `TypeInfo` -- a flat dataclass that fully describes the unwrapped type: -classification (`TypeKind`), optional/list/dict flags, accumulated constraints with -provenance, NewType names, source type, literal values, and (for UNION kind) the tuple -of concrete `BaseModel` member types. Dict types carry recursively analyzed `TypeInfo` +classification (`TypeKind`), optional/dict flags, `list_depth` (count of `list[...]` +layers), accumulated constraints with provenance, NewType names, source type, literal +values, and (for UNION kind) the tuple of concrete `BaseModel` member types. Dict types carry recursively analyzed `TypeInfo` for their key and value types. Multi-depth `Annotated` layers (common in practice, since NewTypes wrap `Annotated` diff --git a/packages/overture-schema-codegen/docs/walkthrough.md b/packages/overture-schema-codegen/docs/walkthrough.md index 688c6c29e..1bff215e7 100644 --- a/packages/overture-schema-codegen/docs/walkthrough.md +++ b/packages/overture-schema-codegen/docs/walkthrough.md @@ -142,7 +142,8 @@ immediately with the member tuple. Non-BaseModel multi-type unions raise The `Literal` filtering has a guard: when a union contains *only* Literal arms (like `Optional[Literal["x"]]`), the function keeps them rather than filtering everything out. -**list/dict** sets collection flags and continues into element types. Dict is the one +**list/dict** increments `list_depth` for each `list[...]` layer (so `list[list[str]]` +records depth 2), sets dict flags, and continues into element types. Dict is the one case where `analyze_type` recurses -- it calls itself for key and value types, storing the results as nested `TypeInfo` objects. @@ -183,8 +184,9 @@ rendering. ### _UnwrapState -The accumulator dataclass carries state across iterations: optional/list/dict flags, the -constraint list, both NewType name slots, and the captured description. Its +The accumulator dataclass carries state across iterations: optional/dict flags, +`list_depth` (incremented per `list[...]` layer), the constraint list, both NewType name +slots, and the captured description. Its `build_type_info` method assembles the final `TypeInfo` from accumulated state, freezing the constraint list into a tuple. @@ -345,7 +347,7 @@ base. These become shared `AnnotatedField` entries with `variant_sources=None`. Then it extracts each member: `RoadSegment`, `RailSegment`, `WaterSegment`. Fields not in the shared set are variant-specific, deduplicated by `(name, type_identity)` where -`type_identity` captures `base_type`, `kind`, `is_optional`, and `is_list`. If +`type_identity` captures `base_type`, `kind`, `is_optional`, and `list_depth`. If `RoadSegment` and `WaterSegment` both define a `width` field with the same type identity, the `AnnotatedField` accumulates both class names: `variant_sources=("RoadSegment", "WaterSegment")`. Fields unique to one member get a @@ -504,8 +506,9 @@ provenance rather than direct field reference. `format_type` handles the full range of field types. Single-value Literals render as `"value"` in backticks. Semantic NewTypes and enums/models get markdown links via `_resolve_type_link`, which checks the `LinkContext` registry and falls back to plain -code spans. Lists of linked types wrap with `` `list<` `` broken-backtick syntax when -the inner type carries a link. Dict types render as `` `map` ``. Qualifiers +code spans. Lists wrap `list_depth` times. Linked inner types use broken-backtick syntax +(`` `list<` `` ... `` `>` ``) built as a single wrapper to avoid adjacent backticks that +CommonMark would interpret as multi-backtick code span delimiters. Dict types render as `` `map` ``. Qualifiers (optional, list, map) append in parentheses. Union members format independently -- each gets its own link resolution, joined with @@ -538,8 +541,9 @@ spans. `render_feature` dispatches on spec type. `ModelSpec` gets `_expand_model_fields`, which walks the pre-populated `FieldSpec.model` tree and produces dot-notation rows. -`sources[0].dataset` appears as a single row in the flat field table, with the list -suffix `[]` appended to list-of-model fields. Expansion stops at fields marked with +`sources[0].dataset` appears as a single row in the flat field table, with `[]` +appended per nesting level to list-of-model fields (so a doubly-nested list gets +`[][]`). Expansion stops at fields marked with `starts_cycle`. `UnionSpec` gets `_expand_union_fields`, which adds italic variant tags to diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py index 25a49da48..c67521b5f 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py @@ -245,13 +245,13 @@ def _annotate_field_constraints( def _expandable_list_suffix(field_spec: FieldSpec) -> str: - """Return ``"[]"`` when a field has a list-of-model that will be expanded inline.""" + """Return ``"[]"`` per nesting level for list-of-model fields expanded inline.""" if ( field_spec.type_info.is_list and field_spec.model and not field_spec.starts_cycle ): - return "[]" + return "[]" * field_spec.type_info.list_depth return "" @@ -282,7 +282,7 @@ def _annotate_top_level_constraints( name = row["name"] if "." in name: continue - field_name = name.removesuffix("[]") + field_name = name.split("[")[0] if field_name in constraint_notes: _annotate_constraint_notes(row, constraint_notes[field_name]) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py index e5e0e9069..b57c47f12 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py @@ -32,9 +32,19 @@ def _resolve_type_link(type_name: str, ctx: LinkContext | None = None) -> str: return f"`{type_name}`" -def _wrap_list(inner: str) -> str: - """Wrap an inner type string in list<...> markdown syntax.""" - return f"`list<`{inner}`>`" +def _wrap_list_n(inner: str, depth: int) -> str: + """Wrap an inner type string in ``list<...>`` markdown syntax *depth* times. + + Builds a single broken-backtick wrapper rather than nesting iteratively. + Iterative nesting creates adjacent backticks (`````) that CommonMark + interprets as multi-backtick code span delimiters. + """ + return f"`{'list<' * depth}`{inner}`{'>' * depth}`" + + +def _plain_list_type(base: str, depth: int) -> str: + """Format a plain (unlinked) list type string for *depth* nesting levels.""" + return f"`{'list<' * depth}{base}{'>' * depth}`" def _linked_type_name(ti: TypeInfo) -> str | None: @@ -112,11 +122,11 @@ def format_type( if ti.is_list and link_name == ti.newtype_name: qualifiers.append("list") elif ti.is_list: - display = _wrap_list(display) + display = _wrap_list_n(display, ti.list_depth) else: base = resolve_type_name(ti, "markdown") if ti.is_list: - display = f"`list<{base}>`" + display = _plain_list_type(base, ti.list_depth) else: display = f"`{base}`" @@ -182,10 +192,10 @@ def format_underlying_type(ti: TypeInfo, ctx: LinkContext | None = None) -> str: if href: linked = _code_link(link_name, href) if ti.is_list: - return _wrap_list(linked) + return _wrap_list_n(linked, ti.list_depth) return linked base = link_name or resolve_type_name(ti, "markdown") if ti.is_list: - return f"`list<{base}>`" + return _plain_list_type(base, ti.list_depth) return f"`{base}`" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py index 65a91230c..5559ff453 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py @@ -55,7 +55,7 @@ class TypeInfo: base_type: str kind: TypeKind is_optional: bool = False - is_list: bool = False + list_depth: int = 0 is_dict: bool = False dict_key_type: TypeInfo | None = None dict_value_type: TypeInfo | None = None @@ -67,6 +67,11 @@ class TypeInfo: union_members: tuple[type[BaseModel], ...] | None = None description: str | None = None + @property + def is_list(self) -> bool: + """Whether this type has any list wrapping.""" + return self.list_depth > 0 + def walk_type_info(ti: TypeInfo, visitor: Callable[[TypeInfo], None]) -> None: """Call *visitor* on *ti*, then recurse into dict key/value types. @@ -109,7 +114,7 @@ class _UnwrapState: """ is_optional: bool = False - is_list: bool = False + list_depth: int = 0 is_dict: bool = False dict_key_type: TypeInfo | None = None dict_value_type: TypeInfo | None = None @@ -135,7 +140,7 @@ def build_type_info( base_type=base_type, kind=kind, is_optional=self.is_optional, - is_list=self.is_list, + list_depth=self.list_depth, is_dict=self.is_dict, dict_key_type=self.dict_key_type, dict_value_type=self.dict_value_type, @@ -238,7 +243,7 @@ def analyze_type(annotation: object) -> TypeInfo: args = get_args(annotation) if not args: raise TypeError("Bare list without type argument is not supported") - state.is_list = True + state.list_depth += 1 annotation = args[0] continue diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py index 67ab5549a..572f0427a 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py @@ -76,13 +76,13 @@ def extract_discriminator( return disc_field_name, mapping or None -_TypeIdentity = tuple[str, TypeKind, bool, bool] +_TypeIdentity = tuple[str, TypeKind, bool, int] _FieldKey = tuple[str, _TypeIdentity] def _type_identity(ti: TypeInfo) -> _TypeIdentity: """Stable identity for dedup — excludes source_type which can vary across members.""" - return (ti.base_type, ti.kind, ti.is_optional, ti.is_list) + return (ti.base_type, ti.kind, ti.is_optional, ti.list_depth) def extract_union( diff --git a/packages/overture-schema-codegen/tests/test_markdown_type_format.py b/packages/overture-schema-codegen/tests/test_markdown_type_format.py index 53a9a9463..cb0ed6358 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_type_format.py +++ b/packages/overture-schema-codegen/tests/test_markdown_type_format.py @@ -72,6 +72,11 @@ def test_list_of_primitives(self) -> None: field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) assert format_type(field) == "`list`" + def test_nested_list_of_primitives(self) -> None: + ti = analyze_type(list[list[str]]) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + assert format_type(field) == "`list>`" + def test_registered_primitive_not_linked(self) -> None: ti = analyze_type(int32) field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) @@ -135,7 +140,7 @@ def test_list_of_union_adds_qualifier(self) -> None: ti = TypeInfo( base_type="_ModelA", kind=TypeKind.UNION, - is_list=True, + list_depth=1, union_members=(_ModelA, _ModelB), ) result = format_type(_make_union_field(ti)) diff --git a/packages/overture-schema-codegen/tests/test_type_analyzer.py b/packages/overture-schema-codegen/tests/test_type_analyzer.py index 065917e23..0e0b17fc6 100644 --- a/packages/overture-schema-codegen/tests/test_type_analyzer.py +++ b/packages/overture-schema-codegen/tests/test_type_analyzer.py @@ -130,6 +130,14 @@ def test_list_str_sets_is_list(self) -> None: assert result.is_optional is False assert result.is_list is True + def test_nested_list_sets_depth_2(self) -> None: + """list[list[str]] records two levels of nesting.""" + result = analyze_type(list[list[str]]) + + assert result.list_depth == 2 + assert result.base_type == "str" + assert result.kind == TypeKind.PRIMITIVE + class TestAnalyzeTypeComposite: """Tests for composite/nested type analysis.""" @@ -340,6 +348,20 @@ def test_plain_list_has_no_newtype_name(self) -> None: assert result.newtype_name is None assert result.is_list is True + def test_newtype_wrapping_list_of_models(self) -> None: + """list[NewType wrapping list[Model]] records depth 2.""" + + class _Item(BaseModel): + name: str + + Inner = NewType("Inner", Annotated[list[_Item], Field(min_length=1)]) + result = analyze_type(list[Inner]) + + assert result.list_depth == 2 + assert result.base_type == "Inner" + assert result.kind == TypeKind.MODEL + assert result.source_type is _Item + class TestConstraintProvenance: """Tests for flattened constraints with provenance tracking.""" diff --git a/packages/overture-schema-codegen/tests/test_type_registry.py b/packages/overture-schema-codegen/tests/test_type_registry.py index b969ed7f0..88237b534 100644 --- a/packages/overture-schema-codegen/tests/test_type_registry.py +++ b/packages/overture-schema-codegen/tests/test_type_registry.py @@ -132,9 +132,9 @@ def _make_type_info(self, **kwargs: object) -> TypeInfo: defaults.update(kwargs) return TypeInfo(**defaults) # type: ignore[arg-type] - def test_ignores_is_list(self) -> None: - """resolve_type_name returns the base type regardless of is_list.""" - ti = self._make_type_info(is_list=True) + def test_ignores_list_depth(self) -> None: + """resolve_type_name returns the base type regardless of list_depth.""" + ti = self._make_type_info(list_depth=1) assert resolve_type_name(ti, "markdown") == "string" def test_ignores_is_optional(self) -> None: From 101596f878d88666b417cdb8d08b449068f19a09 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 3 Mar 2026 13:00:16 -0800 Subject: [PATCH 23/38] fix(codegen): resolve type name collisions across themes Replace bare class name keys with TypeIdentity objects across all registries. Two types with the same __name__ from different modules (e.g., Places Address vs Addresses Address) now get separate registry entries and resolve to different output paths. TypeIdentity is a frozen dataclass pairing a unique Python object (class, NewType callable, or union annotation) with its display name. Equality and hashing delegate to object identity so lookups are collision-free regardless of display name. Changes across the pipeline: - ConstraintSource stores source_ref (NewType callable) and source_name instead of a bare name string - type_collection, path_assignment, link_computation, and reverse_references all key on TypeIdentity - primitive_extraction returns TypeIdentity instead of strings - Renderers construct TypeIdentity for link resolution - Each spec type exposes an identity property via _SourceTypeIdentityMixin (or directly for UnionSpec) --- .../codegen/field_constraint_description.py | 12 +- .../schema/codegen/link_computation.py | 17 +-- .../schema/codegen/markdown_pipeline.py | 23 ++-- .../schema/codegen/markdown_renderer.py | 45 ++++---- .../schema/codegen/markdown_type_format.py | 72 ++++++------ .../schema/codegen/model_extraction.py | 2 +- .../schema/codegen/path_assignment.py | 42 +++---- .../schema/codegen/primitive_extraction.py | 34 +++--- .../schema/codegen/reverse_references.py | 85 +++++++++----- .../src/overture/schema/codegen/specs.py | 58 +++++++++- .../overture/schema/codegen/type_analyzer.py | 24 ++-- .../schema/codegen/type_collection.py | 54 +++++---- .../tests/codegen_test_support.py | 18 +++ .../overture-schema-codegen/tests/conftest.py | 4 +- .../tests/test_constraint_description.py | 60 ++++++++++ .../tests/test_golden_markdown.py | 15 +-- .../tests/test_markdown_renderer.py | 109 ++++++++++++------ .../tests/test_markdown_type_format.py | 28 +++-- .../tests/test_model_extractor.py | 4 +- .../tests/test_primitive_extraction.py | 40 ++++++- .../tests/test_reverse_references.py | 51 ++++---- .../tests/test_specs.py | 82 ++++++++++++- .../tests/test_type_analyzer.py | 34 ++++-- .../tests/test_type_collection.py | 55 +++++++-- .../tests/test_type_placement.py | 55 +++++++-- 25 files changed, 721 insertions(+), 302 deletions(-) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py b/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py index 96f2ca48b..05d116ca5 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py @@ -15,6 +15,7 @@ from overture.schema.system.ref import Reference from .docstring import first_docstring_line +from .specs import TypeIdentity from .type_analyzer import ConstraintSource __all__ = [ @@ -77,11 +78,11 @@ def _geometry_type_label(value: str) -> str: def describe_field_constraint( constraint: object, - link_fn: Callable[[str], str] | None = None, + link_fn: Callable[[TypeIdentity], str] | None = None, ) -> str: """Return a display string for a field-level constraint object. - *link_fn* resolves a type name to a markdown link string (e.g. + *link_fn* resolves a TypeIdentity to a markdown link string (e.g. `` [`Name`](path) ``). When None, names render as inline code. """ if isinstance(constraint, GeometryTypeConstraint): @@ -92,8 +93,9 @@ def describe_field_constraint( if isinstance(constraint, Reference): rel_value: str = constraint.relationship.value # type: ignore[assignment] rel_label = rel_value.replace("_", " ") - target = constraint.relatee.__name__ - target_str = link_fn(target) if link_fn else f"`{target}`" + target = constraint.relatee + target_id = TypeIdentity.of(target) + target_str = link_fn(target_id) if link_fn else f"`{target.__name__}`" return f"References {target_str} ({rel_label})" if isinstance(constraint, Interval): desc = _describe_interval(constraint) @@ -137,7 +139,7 @@ def constraint_pattern(constraint: object) -> str | None: def constraint_display_text( cs: ConstraintSource, - link_fn: Callable[[str], str] | None = None, + link_fn: Callable[[TypeIdentity], str] | None = None, ) -> str: """Build display text for a constraint, combining description/pattern when available.""" description = _constraint_class_description(cs.constraint) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py b/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py index cdf1b9a17..bb9275b71 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py @@ -4,6 +4,7 @@ from pathlib import PurePosixPath from .case_conversion import slug_filename +from .specs import TypeIdentity __all__ = ["LinkContext", "relative_link"] @@ -13,21 +14,21 @@ class LinkContext: """Placement context for resolving cross-directory markdown links.""" page_path: PurePosixPath - registry: dict[str, PurePosixPath] + registry: dict[TypeIdentity, PurePosixPath] - def resolve_link(self, name: str) -> str | None: - """Resolve *name* to a relative link if it exists in the registry.""" - if name in self.registry: - return relative_link(self.page_path, self.registry[name]) + def resolve_link(self, identity: TypeIdentity) -> str | None: + """Resolve *identity* to a relative link if it exists in the registry.""" + if identity in self.registry: + return relative_link(self.page_path, self.registry[identity]) return None - def resolve_link_or_slug(self, name: str) -> str: - """Resolve *name* to a relative link, falling back to a slug filename. + def resolve_link_or_slug(self, identity: TypeIdentity) -> str: + """Resolve *identity* to a relative link, falling back to a slug filename. Always returns a usable link string. Use when the caller needs a link regardless of whether the type has a registered page. """ - return self.resolve_link(name) or slug_filename(name) + return self.resolve_link(identity) or slug_filename(identity.name) def _is_normalized(path: PurePosixPath) -> bool: diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py index 0734a2ab9..9092dbba8 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py @@ -40,6 +40,7 @@ ModelSpec, NewTypeSpec, SupplementarySpec, + TypeIdentity, UnionSpec, ) from .type_collection import collect_all_supplementary_types @@ -82,15 +83,15 @@ def _load_model_examples( def _render_supplement( - name: str, + tid: TypeIdentity, spec: SupplementarySpec, - registry: dict[str, PurePosixPath], - reverse_refs: dict[str, list[UsedByEntry]], + registry: dict[TypeIdentity, PurePosixPath], + reverse_refs: dict[TypeIdentity, list[UsedByEntry]], ) -> RenderedPage: """Render a single supplementary page (enum, NewType, or sub-model).""" - output_path = resolve_output_path(name, registry) + output_path = resolve_output_path(tid, registry) ctx = LinkContext(output_path, registry) - used_by = reverse_refs.get(name) + used_by = reverse_refs.get(tid) if isinstance(spec, EnumSpec): content = render_enum(spec, link_ctx=ctx, used_by=used_by) @@ -131,21 +132,19 @@ def generate_markdown_pages( pages: list[RenderedPage] = [] for spec in feature_specs: - output_path = registry[spec.name] + output_path = registry[spec.identity] ctx = LinkContext(output_path, registry) examples = _load_model_examples(spec) - used_by = reverse_refs.get(spec.name) + used_by = reverse_refs.get(spec.identity) content = render_feature(spec, link_ctx=ctx, examples=examples, used_by=used_by) pages.append(RenderedPage(content=content, path=output_path, is_feature=True)) - for name, supp_spec in all_specs.items(): - pages.append(_render_supplement(name, supp_spec, registry, reverse_refs)) + for tid, supp_spec in all_specs.items(): + pages.append(_render_supplement(tid, supp_spec, registry, reverse_refs)) pages.append( RenderedPage( - content=render_primitives_from_specs( - extract_primitives(primitive_names, _system_primitive) - ), + content=render_primitives_from_specs(extract_primitives(primitive_names)), path=PRIMITIVES_PAGE, ) ) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py index c67521b5f..102669359 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py @@ -14,7 +14,11 @@ from .example_loader import ExampleRecord from .field_constraint_description import constraint_display_text from .link_computation import LinkContext -from .markdown_type_format import format_type, format_underlying_type +from .markdown_type_format import ( + format_type, + format_underlying_type, + resolve_type_link, +) from .model_constraint_description import analyze_model_constraints from .reverse_references import UsedByEntry from .specs import ( @@ -25,6 +29,7 @@ ModelSpec, NewTypeSpec, PrimitiveSpec, + TypeIdentity, UnionSpec, ) from .type_analyzer import ( @@ -40,7 +45,7 @@ ] -_LinkFn = Callable[[str], str] +_LinkFn = Callable[[TypeIdentity], str] _TEMPLATES_DIR = Path(__file__).parent / "templates" / "markdown" @@ -209,20 +214,12 @@ def _annotate_constraint_notes( def _link_fn_from_ctx(ctx: LinkContext | None) -> _LinkFn: - r"""Build a name-to-markdown-link resolver from a LinkContext. + r"""Build a TypeIdentity-to-markdown-link resolver from a LinkContext. - Returns a function that resolves a type name to ``[`Name`](href)`` - when the name has a page in the registry, or plain ``\`Name\``` otherwise. + Returns a function that resolves a TypeIdentity to ``[`Name`](href)`` + when the identity has a page in the registry, or plain ``\`Name\``` otherwise. """ - - def resolve(name: str) -> str: - if ctx: - href = ctx.resolve_link(name) - if href: - return f"[`{name}`]({href})" - return f"`{name}`" - - return resolve + return functools.partial(resolve_type_link, ctx=ctx) def _annotate_field_constraints( @@ -238,7 +235,7 @@ def _annotate_field_constraints( notes = [ constraint_display_text(cs, link_fn=link_fn) for cs in field.type_info.constraints - if cs.source is None + if cs.source_ref is None ] if notes: _annotate_constraint_notes(row, notes) @@ -437,19 +434,20 @@ class _NewTypeConstraintRow: def _format_constraint( cs: ConstraintSource, - newtype_name: str, + newtype_ref: object, ctx: LinkContext | None = None, ) -> _NewTypeConstraintRow: """Format a ConstraintSource for display in a NewType page.""" display = constraint_display_text(cs) - if not cs.source or cs.source == newtype_name: + if cs.source_ref is None or cs.source_ref is newtype_ref: return _NewTypeConstraintRow(display=display) - source = cs.source - source_link = ctx.resolve_link(source) if ctx else None + assert cs.source_name is not None # source_ref and source_name are set together + source_identity = TypeIdentity(cs.source_ref, cs.source_name) + source_link = ctx.resolve_link(source_identity) if ctx else None return _NewTypeConstraintRow( - display=display, source=source, source_link=source_link + display=display, source=cs.source_name, source_link=source_link ) @@ -469,8 +467,8 @@ def _build_used_by_context( return None return [ { - "name": entry.name, - "link": link_ctx.resolve_link(entry.name) if link_ctx else None, + "name": entry.identity.name, + "link": link_ctx.resolve_link(entry.identity) if link_ctx else None, } for entry in used_by ] @@ -486,7 +484,8 @@ def render_newtype( ti = newtype_spec.type_info underlying = format_underlying_type(ti, link_ctx) constraints = [ - _format_constraint(cs, newtype_spec.name, link_ctx) for cs in ti.constraints + _format_constraint(cs, newtype_spec.source_type, link_ctx) + for cs in ti.constraints ] return template.render( diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py index b57c47f12..5498052e9 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py @@ -5,11 +5,16 @@ from pydantic import BaseModel from .link_computation import LinkContext -from .specs import FieldSpec +from .specs import FieldSpec, TypeIdentity from .type_analyzer import TypeInfo, TypeKind from .type_registry import is_semantic_newtype, resolve_type_name -__all__ = ["format_dict_type", "format_type", "format_underlying_type"] +__all__ = [ + "format_dict_type", + "format_type", + "format_underlying_type", + "resolve_type_link", +] def _code_link(name: str, href: str) -> str: @@ -17,8 +22,8 @@ def _code_link(name: str, href: str) -> str: return f"[`{name}`]({href})" -def _resolve_type_link(type_name: str, ctx: LinkContext | None = None) -> str: - """Resolve a type name to a linked code span or plain code span. +def resolve_type_link(identity: TypeIdentity, ctx: LinkContext | None = None) -> str: + """Resolve a TypeIdentity to a linked code span or plain code span. When *ctx* is provided, links only to types in the registry (types without pages render as inline code). Without context, renders as @@ -26,10 +31,10 @@ def _resolve_type_link(type_name: str, ctx: LinkContext | None = None) -> str: compute correct relative paths. """ if ctx: - href = ctx.resolve_link(type_name) + href = ctx.resolve_link(identity) if href: - return _code_link(type_name, href) - return f"`{type_name}`" + return _code_link(identity.name, href) + return f"`{identity.name}`" def _wrap_list_n(inner: str, depth: int) -> str: @@ -47,12 +52,13 @@ def _plain_list_type(base: str, depth: int) -> str: return f"`{'list<' * depth}{base}{'>' * depth}`" -def _linked_type_name(ti: TypeInfo) -> str | None: - """Return the name to use for a markdown link, or None for non-linked types.""" - if is_semantic_newtype(ti): - return ti.newtype_name - if ti.kind in (TypeKind.ENUM, TypeKind.MODEL): - return ti.base_type +def _linked_type_identity(ti: TypeInfo) -> TypeIdentity | None: + """Return the TypeIdentity to use for a markdown link, or None for non-linked types.""" + if is_semantic_newtype(ti) and ti.newtype_ref is not None: + assert ti.newtype_name is not None # guaranteed by is_semantic_newtype + return TypeIdentity(ti.newtype_ref, ti.newtype_name) + if ti.kind in (TypeKind.ENUM, TypeKind.MODEL) and ti.source_type is not None: + return TypeIdentity(ti.source_type, ti.base_type) return None @@ -87,9 +93,7 @@ def _format_union_members( while others render as plain code spans. *separator* is inserted between members (default is ``\\|`` for table-cell safety). """ - return separator.join( - _resolve_type_link(member.__name__, ctx) for member in members - ) + return separator.join(resolve_type_link(TypeIdentity.of(m), ctx) for m in members) def format_type( @@ -105,21 +109,21 @@ def format_type( return f'`"{ti.literal_values[0]}"`' return r" \| ".join(f'`"{v}"`' for v in ti.literal_values) - link_name = _linked_type_name(ti) + identity = _linked_type_identity(ti) if ti.kind == TypeKind.UNION and ti.union_members: display = _format_union_members(ti.union_members, ctx) if ti.is_list: qualifiers.append("list") elif ti.is_dict: - if link_name: - display = _resolve_type_link(link_name, ctx) + if identity: + display = resolve_type_link(identity, ctx) qualifiers.append("map") else: display = f"`{format_dict_type(ti)}`" - elif link_name: - display = _resolve_type_link(link_name, ctx) - if ti.is_list and link_name == ti.newtype_name: + elif identity: + display = resolve_type_link(identity, ctx) + if ti.is_list and identity.name == ti.newtype_name: qualifiers.append("list") elif ti.is_list: display = _wrap_list_n(display, ti.list_depth) @@ -149,11 +153,11 @@ def _linked_or_backticked(ti: TypeInfo, ctx: LinkContext | None) -> tuple[str, b ready for broken-backtick container syntax. When False, it is a raw name that the caller embeds inside backticks. """ - link_name = _linked_type_name(ti) - if link_name and ctx: - href = ctx.resolve_link(link_name) + identity = _linked_type_identity(ti) + if identity and ctx: + href = ctx.resolve_link(identity) if href: - return _code_link(link_name, href), True + return _code_link(identity.name, href), True return _markdown_type_name(ti), False @@ -178,24 +182,22 @@ def format_underlying_type(ti: TypeInfo, ctx: LinkContext | None = None) -> str: return f"`map<`{key_str}`,`{val_str}`>`" return f"`map<{key_str}, {val_str}>`" - # Only link enums and models — skip is_semantic_newtype to avoid + # Only link enums and models -- skip is_semantic_newtype to avoid # self-linking (this TypeInfo belongs to the NewType being rendered). - # Use source_type.__name__ rather than base_type: base_type may be - # the outermost NewType name when only one NewType wraps a class. - link_name = ( - ti.source_type.__name__ + identity = ( + TypeIdentity.of(ti.source_type) if ti.kind in (TypeKind.ENUM, TypeKind.MODEL) and ti.source_type else None ) - if link_name and ctx: - href = ctx.resolve_link(link_name) + if identity and ctx: + href = ctx.resolve_link(identity) if href: - linked = _code_link(link_name, href) + linked = _code_link(identity.name, href) if ti.is_list: return _wrap_list_n(linked, ti.list_depth) return linked - base = link_name or resolve_type_name(ti, "markdown") + base = identity.name if identity else resolve_type_name(ti, "markdown") if ti.is_list: return _plain_list_type(base, ti.list_depth) return f"`{base}`" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py index 0b195859d..81c360538 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py @@ -48,7 +48,7 @@ def _merge_field_metadata(type_info: TypeInfo, field_info: FieldInfo) -> TypeInf """ if not field_info.metadata: return type_info - extra = tuple(ConstraintSource(None, m) for m in field_info.metadata) + extra = tuple(ConstraintSource(None, None, m) for m in field_info.metadata) return dataclasses.replace(type_info, constraints=type_info.constraints + extra) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py b/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py index 020f6e44b..fecae8dc6 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py @@ -9,7 +9,7 @@ from .case_conversion import slug_filename from .module_layout import compute_output_dir, output_dir_for_entry_point -from .specs import FeatureSpec, SupplementarySpec +from .specs import FeatureSpec, SupplementarySpec, TypeIdentity __all__ = [ "GEOMETRY_PAGE", @@ -25,55 +25,57 @@ def build_placement_registry( feature_specs: Sequence[FeatureSpec], - all_specs: dict[str, SupplementarySpec], - primitive_names: list[str], - geometry_names: list[str], + all_specs: dict[TypeIdentity, SupplementarySpec], + primitive_names: list[TypeIdentity], + geometry_names: list[TypeIdentity], schema_root: str, -) -> dict[str, PurePosixPath]: - """Build a mapping from type names to output file paths. +) -> dict[TypeIdentity, PurePosixPath]: + """Build a mapping from TypeIdentity to output file paths. Uses module-mirrored output directories: output paths derive from the source Python module path relative to schema_root. """ - registry: dict[str, PurePosixPath] = _aggregate_page_entries( + registry: dict[TypeIdentity, PurePosixPath] = _aggregate_page_entries( primitive_names, geometry_names ) feature_dirs: set[PurePosixPath] = set() for spec in feature_specs: spec_dir = output_dir_for_entry_point(spec.entry_point, schema_root) - registry[spec.name] = _md_path(spec_dir, spec.name) + registry[spec.identity] = _md_path(spec_dir, spec.name) feature_dirs.add(spec_dir) - for name, supp_spec in all_specs.items(): - if name in registry: + for tid, supp_spec in all_specs.items(): + if tid in registry: continue source_module = getattr(supp_spec.source_type, "__module__", None) if source_module is None: continue output_dir = compute_output_dir(source_module, schema_root) output_dir = _nest_under_types(output_dir, feature_dirs) - registry[name] = _md_path(output_dir, name) + registry[tid] = _md_path(output_dir, tid.name) return registry def resolve_output_path( - type_name: str, - registry: dict[str, PurePosixPath] | None, + identity: TypeIdentity, + registry: dict[TypeIdentity, PurePosixPath] | None, ) -> PurePosixPath: """Look up a type's output path from the registry, with flat-file fallback.""" - if registry is not None and type_name in registry: - return registry[type_name] - return PurePosixPath(slug_filename(type_name)) + if registry is not None and identity in registry: + return registry[identity] + return PurePosixPath(slug_filename(identity.name)) def _aggregate_page_entries( - primitive_names: list[str], - geometry_names: list[str], -) -> dict[str, PurePosixPath]: + primitive_names: list[TypeIdentity], + geometry_names: list[TypeIdentity], +) -> dict[TypeIdentity, PurePosixPath]: """Pre-populate registry entries for types documented on aggregate pages.""" - entries: dict[str, PurePosixPath] = dict.fromkeys(primitive_names, PRIMITIVES_PAGE) + entries: dict[TypeIdentity, PurePosixPath] = dict.fromkeys( + primitive_names, PRIMITIVES_PAGE + ) entries.update(dict.fromkeys(geometry_names, GEOMETRY_PAGE)) return entries diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/primitive_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/primitive_extraction.py index 351c5051c..ceb8ff7cd 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/primitive_extraction.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/primitive_extraction.py @@ -4,7 +4,7 @@ from .docstring import first_docstring_line from .newtype_extraction import extract_newtype -from .specs import PrimitiveSpec +from .specs import PrimitiveSpec, TypeIdentity from .type_analyzer import TypeInfo, is_newtype __all__ = [ @@ -42,24 +42,18 @@ def extract_numeric_bounds(type_info: TypeInfo) -> Interval: def extract_primitives( - primitive_names: list[str], - primitive_module: object, + primitive_ids: list[TypeIdentity], ) -> list[PrimitiveSpec]: - """Extract specifications for numeric primitive types. - - Resolves each name against the given module, extracts its NewType - spec, docstring, and numeric bounds. - """ + """Extract specifications for numeric primitive types.""" specs: list[PrimitiveSpec] = [] - for name in primitive_names: - obj = getattr(primitive_module, name) - newtype_spec = extract_newtype(obj) + for tid in primitive_ids: + newtype_spec = extract_newtype(tid.obj) bounds = extract_numeric_bounds(newtype_spec.type_info) - description = first_docstring_line(getattr(obj, "__doc__", None)) - float_bits = _extract_float_bits(name) + description = first_docstring_line(getattr(tid.obj, "__doc__", None)) + float_bits = _extract_float_bits(tid.name) specs.append( PrimitiveSpec( - name=name, + name=tid.name, description=description, bounds=bounds, float_bits=float_bits, @@ -81,21 +75,21 @@ def _extract_float_bits(name: str) -> int | None: def partition_primitive_and_geometry_names( primitive_module: object, -) -> tuple[list[str], list[str]]: - """Discover primitive and geometry type names from a module's exports. +) -> tuple[list[TypeIdentity], list[TypeIdentity]]: + """Discover primitive and geometry types from a module's exports. NewType exports are numeric primitives. Non-constraint class/enum exports are geometry types. """ module_all: list[str] = getattr(primitive_module, "__all__", []) - primitives: list[str] = [] - geometries: list[str] = [] + primitives: list[TypeIdentity] = [] + geometries: list[TypeIdentity] = [] for name in module_all: obj = getattr(primitive_module, name) if is_newtype(obj): - primitives.append(name) + primitives.append(TypeIdentity(obj, name)) elif isinstance(obj, type) and not name.endswith("Constraint"): - geometries.append(name) + geometries.append(TypeIdentity(obj, name)) return primitives, geometries diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py b/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py index 66cfa5d05..1b4a75ff6 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py @@ -12,6 +12,7 @@ ModelSpec, NewTypeSpec, SupplementarySpec, + TypeIdentity, UnionSpec, ) from .type_analyzer import TypeInfo, TypeKind, walk_type_info @@ -34,108 +35,128 @@ class UsedByKind(Enum): class UsedByEntry: """A single 'used by' entry pointing to a referrer.""" - name: str + identity: TypeIdentity kind: UsedByKind def compute_reverse_references( feature_specs: Sequence[FeatureSpec], - all_specs: Mapping[str, SupplementarySpec], -) -> dict[str, list[UsedByEntry]]: + all_specs: Mapping[TypeIdentity, SupplementarySpec], +) -> dict[TypeIdentity, list[UsedByEntry]]: """Compute reverse references from types to their referrers. - Returns a dict mapping type names to lists of UsedByEntry, sorted with + Returns a dict mapping TypeIdentity to lists of UsedByEntry, sorted with models before NewTypes, alphabetical within each group. Parameters ---------- feature_specs : Sequence[FeatureSpec] Feature-level specs (ModelSpec or UnionSpec). - all_specs : Mapping[str, SupplementarySpec] + all_specs : Mapping[TypeIdentity, SupplementarySpec] Supplementary types (enums, newtypes, sub-models). Returns ------- - dict[str, list[UsedByEntry]] - Dict mapping type names to sorted lists of UsedByEntry. + dict[TypeIdentity, list[UsedByEntry]] + Dict mapping TypeIdentity to sorted lists of UsedByEntry. """ # Track references with sets to deduplicate - references: dict[str, set[UsedByEntry]] = {} + references: dict[TypeIdentity, set[UsedByEntry]] = {} - def add_reference(target: str, referrer_name: str, kind: UsedByKind) -> None: + def add_reference( + target: TypeIdentity, referrer: TypeIdentity, kind: UsedByKind + ) -> None: """Add a reference from referrer to target, with deduplication.""" - if target == referrer_name or target not in all_specs: + if target == referrer or target not in all_specs: return - references.setdefault(target, set()).add(UsedByEntry(referrer_name, kind)) + references.setdefault(target, set()).add(UsedByEntry(referrer, kind)) def collect_from_type_info( - ti: TypeInfo, referrer_name: str, referrer_kind: UsedByKind + ti: TypeInfo, referrer: TypeIdentity, referrer_kind: UsedByKind ) -> None: """Collect references from a TypeInfo.""" def _visit(node: TypeInfo) -> None: - if node.newtype_name is not None: - add_reference(node.newtype_name, referrer_name, referrer_kind) + if node.newtype_ref is not None and node.newtype_name is not None: + add_reference( + TypeIdentity(node.newtype_ref, node.newtype_name), + referrer, + referrer_kind, + ) if ( node.kind in (TypeKind.ENUM, TypeKind.MODEL) and node.source_type is not None ): - add_reference(node.source_type.__name__, referrer_name, referrer_kind) + add_reference( + TypeIdentity.of(node.source_type), + referrer, + referrer_kind, + ) if node.union_members is not None: for member_cls in node.union_members: - add_reference(member_cls.__name__, referrer_name, referrer_kind) + add_reference( + TypeIdentity.of(member_cls), + referrer, + referrer_kind, + ) walk_type_info(ti, _visit) def collect_from_fields( - fields: list[FieldSpec], referrer_name: str, referrer_kind: UsedByKind + fields: list[FieldSpec], referrer: TypeIdentity, referrer_kind: UsedByKind ) -> None: """Collect references from model fields.""" for field_spec in fields: - collect_from_type_info(field_spec.type_info, referrer_name, referrer_kind) + collect_from_type_info(field_spec.type_info, referrer, referrer_kind) - def collect_from_model_spec(spec: ModelSpec) -> None: + def collect_from_model_spec(spec: ModelSpec, referrer: TypeIdentity) -> None: """Collect references from a ModelSpec.""" - collect_from_fields(spec.fields, spec.name, UsedByKind.MODEL) + collect_from_fields(spec.fields, referrer, UsedByKind.MODEL) def collect_from_union_spec(spec: UnionSpec) -> None: """Collect references from a UnionSpec.""" + referrer = spec.identity # Union features reference their members for member_cls in spec.members: - add_reference(member_cls.__name__, spec.name, UsedByKind.MODEL) + add_reference( + TypeIdentity.of(member_cls), + referrer, + UsedByKind.MODEL, + ) # Also walk fields for other supplementary types - collect_from_fields(spec.fields, spec.name, UsedByKind.MODEL) + collect_from_fields(spec.fields, referrer, UsedByKind.MODEL) - def collect_from_newtype_spec(spec: NewTypeSpec, referrer_name: str) -> None: + def collect_from_newtype_spec(spec: NewTypeSpec, referrer: TypeIdentity) -> None: """Collect references from a NewTypeSpec.""" - collect_from_type_info(spec.type_info, referrer_name, UsedByKind.NEWTYPE) + collect_from_type_info(spec.type_info, referrer, UsedByKind.NEWTYPE) # Collect inherited NewTypes from constraint sources for cs in spec.type_info.constraints: - if cs.source is not None: - add_reference(cs.source, referrer_name, UsedByKind.NEWTYPE) + if cs.source_ref is not None and cs.source_name is not None: + ref_id = TypeIdentity(cs.source_ref, cs.source_name) + add_reference(ref_id, referrer, UsedByKind.NEWTYPE) # Collect from features for spec in feature_specs: if isinstance(spec, ModelSpec): - collect_from_model_spec(spec) + collect_from_model_spec(spec, spec.identity) elif isinstance(spec, UnionSpec): collect_from_union_spec(spec) # Collect from supplementary specs (NewTypes and sub-models reference # other types; enums do not, so they need no processing here) - for name, supp_spec in all_specs.items(): + for tid, supp_spec in all_specs.items(): if isinstance(supp_spec, NewTypeSpec): - collect_from_newtype_spec(supp_spec, name) + collect_from_newtype_spec(supp_spec, tid) elif isinstance(supp_spec, ModelSpec): - collect_from_model_spec(supp_spec) + collect_from_model_spec(supp_spec, tid) # Sort sets into lists - result: dict[str, list[UsedByEntry]] = {} + result: dict[TypeIdentity, list[UsedByEntry]] = {} for target, ref_set in references.items(): - entries = sorted(ref_set, key=lambda e: (e.kind.value, e.name)) + entries = sorted(ref_set, key=lambda e: (e.kind.value, e.identity.name)) result[target] = entries return result diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py b/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py index 0d81f8e1f..69ff0d763 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py @@ -23,12 +23,57 @@ "NewTypeSpec", "PrimitiveSpec", "SupplementarySpec", + "TypeIdentity", "filter_model_classes", "is_model_class", "is_union_alias", ] +@dataclass(frozen=True, eq=False) +class TypeIdentity: + """Unique identity for a type in the codegen system. + + Pairs a unique Python object (class, NewType callable, or union + annotation) with its display name. Equality and hashing delegate + to ``obj`` identity so registry lookups work regardless of how + the display name was derived. + """ + + obj: object + name: str + + @classmethod + def of(cls, obj: object) -> TypeIdentity: + """Derive a TypeIdentity from a named object (class, NewType, etc.).""" + assert obj is not None + return cls(obj, obj.__name__) # type: ignore[attr-defined] + + def __eq__(self, other: object) -> bool: + return isinstance(other, TypeIdentity) and self.obj is other.obj + + def __hash__(self) -> int: + return id(self.obj) + + +class _SourceTypeIdentityMixin: + """Mixin providing ``identity`` from ``source_type`` and ``name``. + + Shared by EnumSpec, ModelSpec, and NewTypeSpec -- each has a + ``source_type`` (the Python class/callable) and a ``name``. + UnionSpec uses ``source_annotation`` instead, so it defines its + own ``identity``. + """ + + source_type: object | None + name: str + + @property + def identity(self) -> TypeIdentity: + assert self.source_type is not None + return TypeIdentity(self.source_type, self.name) + + @dataclass class EnumMemberSpec: """Specification for an enum member.""" @@ -39,7 +84,7 @@ class EnumMemberSpec: @dataclass -class EnumSpec: +class EnumSpec(_SourceTypeIdentityMixin): """Specification for an Enum class.""" name: str @@ -73,9 +118,12 @@ class FeatureSpec(Protocol): @property def fields(self) -> list[FieldSpec]: ... + @property + def identity(self) -> TypeIdentity: ... + @dataclass -class ModelSpec: +class ModelSpec(_SourceTypeIdentityMixin): """Specification for a Pydantic model.""" name: str @@ -117,9 +165,13 @@ def fields(self) -> list[FieldSpec]: """Plain field list for tree expansion and supplementary collection.""" return [af.field_spec for af in self.annotated_fields] + @property + def identity(self) -> TypeIdentity: + return TypeIdentity(self.source_annotation, self.name) + @dataclass -class NewTypeSpec: +class NewTypeSpec(_SourceTypeIdentityMixin): """Specification for a NewType.""" name: str diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py index 5559ff453..6e723453c 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py @@ -44,7 +44,8 @@ class TypeKind(Enum): class ConstraintSource: """A constraint paired with the NewType that contributed it.""" - source: str | None + source_ref: object | None + source_name: str | None constraint: object @@ -105,12 +106,13 @@ def _is_union(origin: object) -> bool: class _UnwrapState: """Accumulated state from iterative type unwrapping. - Tracks two NewType names during unwrapping: + Tracks NewType names and refs during unwrapping: - ``outermost_newtype_name`` / ``outermost_newtype_ref``: the first NewType encountered, exposed as ``TypeInfo.newtype_name`` / ``newtype_ref``. - - ``last_newtype_name``: the most recently entered NewType, used both - as constraint provenance (which NewType contributed each constraint) - and as the resolved ``base_type`` for the terminal type. + - ``last_newtype_name``: the most recently entered NewType name, used + as the resolved ``base_type`` for the terminal type. + - ``last_newtype_ref``: the most recently entered NewType callable, + used as constraint provenance (which NewType contributed each constraint). """ is_optional: bool = False @@ -122,10 +124,13 @@ class _UnwrapState: outermost_newtype_name: str | None = None outermost_newtype_ref: object | None = None last_newtype_name: str | None = None + last_newtype_ref: object | None = None description: str | None = None - def add_constraint(self, source: str | None, constraint: object) -> None: - self.constraints.append(ConstraintSource(source, constraint)) + def add_constraint(self, constraint: object) -> None: + self.constraints.append( + ConstraintSource(self.last_newtype_ref, self.last_newtype_name, constraint) + ) def build_type_info( self, @@ -169,6 +174,7 @@ def analyze_type(annotation: object) -> TypeInfo: if is_newtype(annotation): name = annotation.__name__ # type: ignore[attr-defined] state.last_newtype_name = name + state.last_newtype_ref = annotation if state.outermost_newtype_name is None: state.outermost_newtype_name = name state.outermost_newtype_ref = annotation @@ -184,9 +190,9 @@ def analyze_type(annotation: object) -> TypeInfo: if c.description is not None and state.description is None: state.description = clean_docstring(c.description) for m in c.metadata: - state.add_constraint(state.last_newtype_name, m) + state.add_constraint(m) else: - state.add_constraint(state.last_newtype_name, c) + state.add_constraint(c) continue # Handle union types (X | None or Optional[X]) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py index 7373c0fb7..d1c64e4b1 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py @@ -10,7 +10,7 @@ from .enum_extraction import extract_enum from .model_extraction import extract_model from .newtype_extraction import extract_newtype -from .specs import FeatureSpec, FieldSpec, ModelSpec, SupplementarySpec +from .specs import FeatureSpec, FieldSpec, ModelSpec, SupplementarySpec, TypeIdentity from .type_analyzer import TypeInfo, TypeKind, analyze_type, is_newtype, walk_type_info from .type_registry import is_semantic_newtype @@ -19,24 +19,36 @@ def collect_all_supplementary_types( feature_specs: Sequence[FeatureSpec], -) -> dict[str, SupplementarySpec]: +) -> dict[TypeIdentity, SupplementarySpec]: """Collect supplementary types by walking expanded feature trees. Requires that expand_model_tree has been called on all feature specs first. Walks FieldSpec.model references for sub-models (already extracted), and extracts enums and NewTypes on first encounter. - Returns a dict mapping type names to extracted specs. + Returns a dict mapping TypeIdentity to extracted specs. Two types + with the same class name from different modules are keyed separately. """ - feature_names = {spec.name for spec in feature_specs} - all_specs: dict[str, SupplementarySpec] = {} - visited_models: set[str] = set() + feature_objs: set[object] = {spec.identity.obj for spec in feature_specs} + all_specs: dict[TypeIdentity, SupplementarySpec] = {} + visited_models: set[object] = set() + + def _register_newtype(newtype_ref: object, name: str) -> bool: + """Register a NewType if not already present. Returns True if registered.""" + nt_id = TypeIdentity(newtype_ref, name) + if nt_id in all_specs: + return False + all_specs[nt_id] = extract_newtype(newtype_ref) + return True def _collect_from_model(model_spec: ModelSpec) -> None: - if model_spec.name in visited_models or model_spec.name in feature_names: + if ( + model_spec.source_type in visited_models + or model_spec.source_type in feature_objs + ): return - visited_models.add(model_spec.name) - all_specs[model_spec.name] = model_spec + visited_models.add(model_spec.source_type) + all_specs[model_spec.identity] = model_spec _collect_from_fields(model_spec.fields) def _collect_inner_newtypes(newtype_ref: object) -> None: @@ -49,11 +61,11 @@ def _collect_inner_newtypes(newtype_ref: object) -> None: if is_newtype(annotation): inner_ti = analyze_type(annotation) if ( - inner_ti.newtype_name is not None + inner_ti.newtype_ref is not None + and inner_ti.newtype_name is not None and is_semantic_newtype(inner_ti) - and inner_ti.newtype_name not in all_specs ): - all_specs[inner_ti.newtype_name] = extract_newtype(annotation) + _register_newtype(inner_ti.newtype_ref, inner_ti.newtype_name) annotation = getattr(annotation, "__supertype__", None) continue break @@ -70,27 +82,29 @@ def _visit(node: TypeInfo) -> None: if node.kind == TypeKind.UNION and node.union_members: # Walk each member's fields for supplementary types. # Members that are also top-level feature specs are skipped - # by the feature_names guard in _collect_from_model. + # by the feature_objs guard in _collect_from_model. for member_cls in node.union_members: member_spec = extract_model(member_cls) _collect_from_model(member_spec) if node.kind == TypeKind.ENUM and node.source_type is not None: - name = node.source_type.__name__ - if name not in all_specs: - all_specs[name] = extract_enum(node.source_type) + enum_id = TypeIdentity.of(node.source_type) + if enum_id not in all_specs: + all_specs[enum_id] = extract_enum(node.source_type) # Semantic NewTypes always get extracted, including intermediate # NewTypes in the wrapping chain (e.g., Id wraps NoWhitespaceString - # wraps str — both Id and NoWhitespaceString get pages). + # wraps str -- both Id and NoWhitespaceString get pages). if ( node.newtype_ref is not None and node.newtype_name is not None and is_semantic_newtype(node) - and node.newtype_name not in all_specs ): - all_specs[node.newtype_name] = extract_newtype(node.newtype_ref) - _collect_inner_newtypes(node.newtype_ref) + newly_registered = _register_newtype( + node.newtype_ref, node.newtype_name + ) + if newly_registered: + _collect_inner_newtypes(node.newtype_ref) walk_type_info(ti, _visit) diff --git a/packages/overture-schema-codegen/tests/codegen_test_support.py b/packages/overture-schema-codegen/tests/codegen_test_support.py index 0e845edcf..9f8b60e81 100644 --- a/packages/overture-schema-codegen/tests/codegen_test_support.py +++ b/packages/overture-schema-codegen/tests/codegen_test_support.py @@ -5,6 +5,7 @@ from __future__ import annotations +from collections.abc import Mapping from difflib import unified_diff from enum import Enum from pathlib import Path @@ -18,6 +19,7 @@ EnumSpec, FieldSpec, ModelSpec, + TypeIdentity, UnionSpec, is_model_class, ) @@ -267,6 +269,22 @@ def find_member(spec: EnumSpec, name: str) -> EnumMemberSpec: return next(m for m in spec.members if m.name == name) +T = TypeVar("T") + + +def lookup_by_name(mapping: dict[TypeIdentity, T], name: str) -> T: + """Look up a value in a TypeIdentity-keyed dict by name, raising KeyError if absent.""" + for tid, value in mapping.items(): + if tid.name == name: + return value + raise KeyError(name) + + +def has_name(mapping: Mapping[TypeIdentity, object], name: str) -> bool: + """Check whether a TypeIdentity-keyed mapping contains a key with the given name.""" + return any(tid.name == name for tid in mapping) + + def assert_literal_field( spec: ModelSpec, field_name: str, expected_value: object ) -> None: diff --git a/packages/overture-schema-codegen/tests/conftest.py b/packages/overture-schema-codegen/tests/conftest.py index 6b19c7454..44c33cce3 100644 --- a/packages/overture-schema-codegen/tests/conftest.py +++ b/packages/overture-schema-codegen/tests/conftest.py @@ -73,9 +73,7 @@ def division_class(all_discovered_models: dict) -> type[BaseModel]: def primitives_markdown() -> str: """Render the primitives.md page from the system primitive module.""" primitive_names, _ = partition_primitive_and_geometry_names(_system_primitive) - return render_primitives_from_specs( - extract_primitives(primitive_names, _system_primitive) - ) + return render_primitives_from_specs(extract_primitives(primitive_names)) @pytest.fixture(scope="module") diff --git a/packages/overture-schema-codegen/tests/test_constraint_description.py b/packages/overture-schema-codegen/tests/test_constraint_description.py index 9579f6147..6a6e1a0ef 100644 --- a/packages/overture-schema-codegen/tests/test_constraint_description.py +++ b/packages/overture-schema-codegen/tests/test_constraint_description.py @@ -2,11 +2,14 @@ from annotated_types import Ge, Gt, Interval, Le, Lt, MaxLen, MinLen from overture.schema.codegen.field_constraint_description import ( + constraint_display_text, describe_field_constraint, ) from overture.schema.codegen.model_constraint_description import ( analyze_model_constraints, ) +from overture.schema.codegen.specs import TypeIdentity +from overture.schema.codegen.type_analyzer import ConstraintSource from overture.schema.system.model_constraint import ( FieldEqCondition, ForbidIfConstraint, @@ -398,3 +401,60 @@ class Other(Identified): assert ( describe_field_constraint(constraint) == "References `Other` (connects to)" ) + + def test_reference_link_fn_receives_type_identity(self) -> None: + """link_fn callback receives TypeIdentity wrapping the relatee class.""" + + class Target(Identified): + pass + + received: list[TypeIdentity] = [] + + def link_fn(tid: TypeIdentity) -> str: + received.append(tid) + return f"[`{tid.name}`](link)" + + constraint = Reference(Relationship.BELONGS_TO, Target) + result = describe_field_constraint(constraint, link_fn=link_fn) + + assert len(received) == 1 + assert received[0].obj is Target + assert received[0].name == "Target" + assert result == "References [`Target`](link) (belongs to)" + + def test_reference_link_fn_used_in_output(self) -> None: + """link_fn return value appears verbatim in the description.""" + + class Target(Identified): + pass + + constraint = Reference(Relationship.CONNECTS_TO, Target) + result = describe_field_constraint( + constraint, link_fn=lambda tid: f"[`{tid.name}`](path/to/target)" + ) + assert result == "References [`Target`](path/to/target) (connects to)" + + +class TestConstraintDisplayText: + """constraint_display_text forwards link_fn to describe_field_constraint.""" + + def test_link_fn_forwarded_to_reference_constraint(self) -> None: + """link_fn is forwarded when constraint is a Reference.""" + + class Target(Identified): + pass + + constraint = Reference(Relationship.BELONGS_TO, Target) + cs = ConstraintSource(source_ref=None, source_name=None, constraint=constraint) + + received: list[TypeIdentity] = [] + + def link_fn(tid: TypeIdentity) -> str: + received.append(tid) + return f"[`{tid.name}`](link)" + + result = constraint_display_text(cs, link_fn=link_fn) + + assert len(received) == 1 + assert received[0].obj is Target + assert result == "References [`Target`](link) (belongs to)" diff --git a/packages/overture-schema-codegen/tests/test_golden_markdown.py b/packages/overture-schema-codegen/tests/test_golden_markdown.py index 2ecb6939c..556b135b7 100644 --- a/packages/overture-schema-codegen/tests/test_golden_markdown.py +++ b/packages/overture-schema-codegen/tests/test_golden_markdown.py @@ -31,6 +31,7 @@ UsedByEntry, compute_reverse_references, ) +from overture.schema.codegen.specs import TypeIdentity from overture.schema.codegen.type_collection import collect_all_supplementary_types from pydantic import BaseModel @@ -59,7 +60,7 @@ @pytest.fixture(scope="module") -def reverse_refs() -> dict[str, list[UsedByEntry]]: +def reverse_refs() -> dict[TypeIdentity, list[UsedByEntry]]: """Compute reverse references for all test models.""" feature_specs = [] for model_class, _ in FEATURE_CASES: @@ -81,11 +82,11 @@ def test_feature_golden( model_class: type[BaseModel], golden_filename: str, update_golden: bool, - reverse_refs: dict[str, list[UsedByEntry]], + reverse_refs: dict[TypeIdentity, list[UsedByEntry]], ) -> None: spec = extract_model(model_class) expand_model_tree(spec) - used_by = reverse_refs.get(spec.name) + used_by = reverse_refs.get(spec.identity) actual = render_feature(spec, used_by=used_by) assert_golden(actual, GOLDEN_DIR / golden_filename, update=update_golden) @@ -99,10 +100,10 @@ def test_enum_golden( enum_class: type[Enum], golden_filename: str, update_golden: bool, - reverse_refs: dict[str, list[UsedByEntry]], + reverse_refs: dict[TypeIdentity, list[UsedByEntry]], ) -> None: spec = extract_enum(enum_class) - used_by = reverse_refs.get(spec.name) + used_by = reverse_refs.get(spec.identity) actual = render_enum(spec, used_by=used_by) assert_golden(actual, GOLDEN_DIR / golden_filename, update=update_golden) @@ -116,9 +117,9 @@ def test_newtype_golden( newtype_callable: object, golden_filename: str, update_golden: bool, - reverse_refs: dict[str, list[UsedByEntry]], + reverse_refs: dict[TypeIdentity, list[UsedByEntry]], ) -> None: spec = extract_newtype(newtype_callable) - used_by = reverse_refs.get(spec.name) + used_by = reverse_refs.get(spec.identity) actual = render_newtype(spec, used_by=used_by) assert_golden(actual, GOLDEN_DIR / golden_filename, update=update_golden) diff --git a/packages/overture-schema-codegen/tests/test_markdown_renderer.py b/packages/overture-schema-codegen/tests/test_markdown_renderer.py index 356c3cb3a..612a2827e 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_renderer.py +++ b/packages/overture-schema-codegen/tests/test_markdown_renderer.py @@ -13,7 +13,9 @@ FeatureBase, FeatureWithAddress, FeatureWithSources, + Instrument, SimpleModel, + SourceItem, Sources, TreeNode, Venue, @@ -40,6 +42,7 @@ EnumSpec, FieldSpec, PrimitiveSpec, + TypeIdentity, ) from overture.schema.codegen.type_analyzer import ConstraintSource from overture.schema.system.field_constraint import ( @@ -50,7 +53,7 @@ from overture.schema.system.model_constraint import no_extra_fields from overture.schema.system.primitive import int32 from overture.schema.system.ref import Id -from overture.schema.system.string import HexColor +from overture.schema.system.string import HexColor, NoWhitespaceString from pydantic import BaseModel, Field _FLAT_MEMBER = EnumMemberSpec(name="FLAT", value="flat", description=None) @@ -598,7 +601,11 @@ def test_venue_reference_links_when_context_available(self) -> None: expand_model_tree(spec) ctx = LinkContext( page_path=PurePosixPath("music/venue.md"), - registry={"Instrument": PurePosixPath("music/instrument.md")}, + registry={ + TypeIdentity(Instrument, "Instrument"): PurePosixPath( + "music/instrument.md" + ) + }, ) result = render_feature(spec, link_ctx=ctx) @@ -805,10 +812,14 @@ class ModelWithColor(BaseModel): spec = extract_model(ModelWithColor) page_path = PurePosixPath("buildings/building/building.md") - registry = { - "HexColor": PurePosixPath("types/strings/hex_color.md"), - } - ctx = LinkContext(page_path, registry) + ctx = LinkContext( + page_path, + { + TypeIdentity(HexColor, "HexColor"): PurePosixPath( + "types/strings/hex_color.md" + ) + }, + ) result = render_feature(spec, link_ctx=ctx) @@ -827,10 +838,14 @@ class ModelWithRoof(BaseModel): spec = extract_model(ModelWithRoof) page_path = PurePosixPath("buildings/building/building.md") - registry = { - "RoofShape": PurePosixPath("buildings/roof_shape.md"), - } - ctx = LinkContext(page_path, registry) + ctx = LinkContext( + page_path, + { + TypeIdentity(RoofShape, "RoofShape"): PurePosixPath( + "buildings/roof_shape.md" + ) + }, + ) result = render_feature(spec, link_ctx=ctx) @@ -849,10 +864,14 @@ class ModelWithClass(BaseModel): spec = extract_model(ModelWithClass) page_path = PurePosixPath("buildings/building/building.md") - registry = { - "BuildingClass": PurePosixPath("buildings/building/building_class.md"), - } - ctx = LinkContext(page_path, registry) + ctx = LinkContext( + page_path, + { + TypeIdentity(BuildingClass, "BuildingClass"): PurePosixPath( + "buildings/building/building_class.md" + ) + }, + ) result = render_feature(spec, link_ctx=ctx) @@ -876,10 +895,14 @@ def test_newtype_underlying_type_linked_via_registry(self) -> None: """NewType header links underlying model type through placement registry.""" spec = extract_newtype(Sources) page_path = PurePosixPath("types/references/sources.md") - registry = { - "SourceItem": PurePosixPath("types/references/source_item.md"), - } - ctx = LinkContext(page_path, registry) + ctx = LinkContext( + page_path, + { + TypeIdentity(SourceItem, "SourceItem"): PurePosixPath( + "types/references/source_item.md" + ) + }, + ) result = render_newtype(spec, link_ctx=ctx) @@ -889,8 +912,7 @@ def test_newtype_underlying_type_not_linked_when_absent(self) -> None: """Underlying type stays backtick-only when missing from registry.""" spec = extract_newtype(Sources) page_path = PurePosixPath("types/references/sources.md") - registry: dict[str, PurePosixPath] = {} - ctx = LinkContext(page_path, registry) + ctx = LinkContext(page_path, {}) result = render_newtype(spec, link_ctx=ctx) @@ -902,7 +924,7 @@ def test_newtype_provenance_link_uses_registry(self) -> None: spec = extract_newtype(Id) page_path = PurePosixPath("types/references/id.md") registry = { - "NoWhitespaceString": PurePosixPath( + TypeIdentity(NoWhitespaceString, "NoWhitespaceString"): PurePosixPath( "types/strings/no_whitespace_string.md" ), } @@ -1212,37 +1234,45 @@ class TestFormatConstraintDisplay: def test_description_and_pattern(self) -> None: """Constraint with docstring and pattern renders both.""" - cs = ConstraintSource(source=None, constraint=CountryCodeAlpha2Constraint()) - result = _format_constraint(cs, "CountryCodeAlpha2") + cs = ConstraintSource( + source_ref=None, source_name=None, constraint=CountryCodeAlpha2Constraint() + ) + result = _format_constraint(cs, None) assert "Allows only ISO 3166-1 alpha-2 country codes." in result.display assert "`CountryCodeAlpha2Constraint`" in result.display assert "pattern: `^[A-Z]{2}$`" in result.display def test_description_without_pattern(self) -> None: """Constraint with docstring but no pattern renders description only.""" - cs = ConstraintSource(source=None, constraint=JsonPointerConstraint()) - result = _format_constraint(cs, "JsonPointer") + cs = ConstraintSource( + source_ref=None, source_name=None, constraint=JsonPointerConstraint() + ) + result = _format_constraint(cs, None) assert "Allows only valid JSON Pointer values (RFC 6901)." in result.display assert "`JsonPointerConstraint`" in result.display assert "pattern" not in result.display def test_no_description_falls_through(self) -> None: """Plain string metadata has no docstring and falls through.""" - cs = ConstraintSource(source=None, constraint="plain string metadata") - result = _format_constraint(cs, "SomeType") + cs = ConstraintSource( + source_ref=None, source_name=None, constraint="plain string metadata" + ) + result = _format_constraint(cs, None) assert result.display == "`plain string metadata`" def test_annotated_types_uses_operator_notation_not_docstring(self) -> None: """annotated-types constraints use operator notation, not their __doc__.""" - cs = ConstraintSource(source=None, constraint=Ge(ge=0)) - result = _format_constraint(cs, "SomeType") + cs = ConstraintSource(source_ref=None, source_name=None, constraint=Ge(ge=0)) + result = _format_constraint(cs, None) assert result.display == "`≥ 0`" assert "Ge(ge=x)" not in result.display def test_constraint_class_not_linked(self) -> None: """Constraint class name stays in backticks (no pages generated for constraints).""" - cs = ConstraintSource(source=None, constraint=CountryCodeAlpha2Constraint()) - result = _format_constraint(cs, "CountryCodeAlpha2") + cs = ConstraintSource( + source_ref=None, source_name=None, constraint=CountryCodeAlpha2Constraint() + ) + result = _format_constraint(cs, None) assert "`CountryCodeAlpha2Constraint`" in result.display assert "[`CountryCodeAlpha2Constraint`](" not in result.display @@ -1276,9 +1306,16 @@ def test_entries_render_without_links_when_no_context( render_fn: Callable[..., str], ) -> None: """Without LinkContext, 'Used By' entries render as inline code.""" + _building = object() + _building_id = object() used_by = [ - UsedByEntry(name="Building", kind=UsedByKind.MODEL), - UsedByEntry(name="BuildingId", kind=UsedByKind.NEWTYPE), + UsedByEntry( + identity=TypeIdentity(_building, "Building"), kind=UsedByKind.MODEL + ), + UsedByEntry( + identity=TypeIdentity(_building_id, "BuildingId"), + kind=UsedByKind.NEWTYPE, + ), ] result = render_fn(spec_factory(), used_by=used_by) @@ -1321,11 +1358,13 @@ def test_link_context_uses_registry( expected_link: str, ) -> None: """Used-by entries resolve links through placement registry.""" + _building = object() + _building_identity = TypeIdentity(_building, "Building") registry = { - "Building": PurePosixPath("buildings/building/building.md"), + _building_identity: PurePosixPath("buildings/building/building.md"), } ctx = LinkContext(page_path, registry) - used_by = [UsedByEntry(name="Building", kind=UsedByKind.MODEL)] + used_by = [UsedByEntry(identity=_building_identity, kind=UsedByKind.MODEL)] result = render_fn(spec_factory(), link_ctx=ctx, used_by=used_by) diff --git a/packages/overture-schema-codegen/tests/test_markdown_type_format.py b/packages/overture-schema-codegen/tests/test_markdown_type_format.py index cb0ed6358..9d6bf34ce 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_type_format.py +++ b/packages/overture-schema-codegen/tests/test_markdown_type_format.py @@ -10,7 +10,7 @@ format_type, format_underlying_type, ) -from overture.schema.codegen.specs import FieldSpec +from overture.schema.codegen.specs import FieldSpec, TypeIdentity from overture.schema.codegen.type_analyzer import TypeInfo, TypeKind, analyze_type from overture.schema.system.primitive import int32 from pydantic import BaseModel @@ -63,7 +63,9 @@ class Color(str, Enum): field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) ctx = LinkContext( page_path=PurePosixPath("buildings/building/building.md"), - registry={"Color": PurePosixPath("types/enums/color.md")}, + registry={ + TypeIdentity(Color, "Color"): PurePosixPath("types/enums/color.md") + }, ) assert format_type(field, ctx) == "[`Color`](../../types/enums/color.md)" @@ -121,8 +123,12 @@ def test_union_with_link_context_links_each_member(self) -> None: ctx = LinkContext( page_path=PurePosixPath("theme/feature/feature.md"), registry={ - "_ModelA": PurePosixPath("theme/feature/types/model_a.md"), - "_ModelB": PurePosixPath("theme/feature/types/model_b.md"), + TypeIdentity(_ModelA, "_ModelA"): PurePosixPath( + "theme/feature/types/model_a.md" + ), + TypeIdentity(_ModelB, "_ModelB"): PurePosixPath( + "theme/feature/types/model_b.md" + ), }, ) result = format_type(_make_union_field(ti), ctx) @@ -160,7 +166,11 @@ def test_union_partial_links(self) -> None: ti = analyze_type(_ModelA | _ModelB) ctx = LinkContext( page_path=PurePosixPath("theme/feature/feature.md"), - registry={"_ModelA": PurePosixPath("theme/feature/types/model_a.md")}, + registry={ + TypeIdentity(_ModelA, "_ModelA"): PurePosixPath( + "theme/feature/types/model_a.md" + ) + }, ) result = format_type(_make_union_field(ti), ctx) assert "[`_ModelA`](types/model_a.md)" in result @@ -182,8 +192,12 @@ def test_union_with_link_context(self) -> None: ctx = LinkContext( page_path=PurePosixPath("types/my_union.md"), registry={ - "_ModelA": PurePosixPath("theme/feature/types/model_a.md"), - "_ModelB": PurePosixPath("theme/feature/types/model_b.md"), + TypeIdentity(_ModelA, "_ModelA"): PurePosixPath( + "theme/feature/types/model_a.md" + ), + TypeIdentity(_ModelB, "_ModelB"): PurePosixPath( + "theme/feature/types/model_b.md" + ), }, ) result = format_underlying_type(ti, ctx) diff --git a/packages/overture-schema-codegen/tests/test_model_extractor.py b/packages/overture-schema-codegen/tests/test_model_extractor.py index e3ba788c6..eccfa8523 100644 --- a/packages/overture-schema-codegen/tests/test_model_extractor.py +++ b/packages/overture-schema-codegen/tests/test_model_extractor.py @@ -464,7 +464,7 @@ def test_geometry_type_constraint_extracted(self) -> None: assert GeometryTypeConstraint in constraint_types def test_geometry_type_constraint_has_null_source(self) -> None: - """Constraints from field_info.metadata have source=None (not from a NewType).""" + """Constraints from field_info.metadata have source_ref=None (not from a NewType).""" spec = extract_model(Venue) geometry_field = find_field(spec, "geometry") @@ -474,7 +474,7 @@ def test_geometry_type_constraint_has_null_source(self) -> None: if isinstance(cs.constraint, GeometryTypeConstraint) ] assert len(geo_constraints) == 1 - assert geo_constraints[0].source is None + assert geo_constraints[0].source_ref is None def test_metadata_constraints_not_duplicated(self) -> None: """Fields where Pydantic preserves Annotated don't get duplicate constraints. diff --git a/packages/overture-schema-codegen/tests/test_primitive_extraction.py b/packages/overture-schema-codegen/tests/test_primitive_extraction.py index 8ed54261d..2610e4f06 100644 --- a/packages/overture-schema-codegen/tests/test_primitive_extraction.py +++ b/packages/overture-schema-codegen/tests/test_primitive_extraction.py @@ -2,13 +2,51 @@ from typing import Annotated, NewType +import overture.schema.system.primitive as _system_primitive from overture.schema.codegen.newtype_extraction import extract_newtype -from overture.schema.codegen.primitive_extraction import extract_numeric_bounds +from overture.schema.codegen.primitive_extraction import ( + extract_numeric_bounds, + extract_primitives, + partition_primitive_and_geometry_names, +) +from overture.schema.codegen.specs import TypeIdentity from overture.schema.codegen.type_analyzer import analyze_type from overture.schema.system.primitive import float32, int32, int64, uint8 from pydantic import Field +class TestPartitionPrimitiveAndGeometryNames: + """Tests for partition_primitive_and_geometry_names function.""" + + def test_returns_type_identities(self) -> None: + prims, geoms = partition_primitive_and_geometry_names(_system_primitive) + assert all(isinstance(p, TypeIdentity) for p in prims) + assert all(isinstance(g, TypeIdentity) for g in geoms) + + def test_identity_obj_is_actual_callable(self) -> None: + prims, _ = partition_primitive_and_geometry_names(_system_primitive) + int32_id = next(p for p in prims if p.name == "int32") + assert int32_id.obj is _system_primitive.int32 + + +class TestExtractPrimitives: + """Tests for extract_primitives function.""" + + def test_accepts_type_identities(self) -> None: + prims, _ = partition_primitive_and_geometry_names(_system_primitive) + specs = extract_primitives(prims) + assert len(specs) > 0 + names = [s.name for s in specs] + assert "int32" in names + + def test_extracts_bounds(self) -> None: + prims, _ = partition_primitive_and_geometry_names(_system_primitive) + specs = extract_primitives(prims) + int32_spec = next(s for s in specs if s.name == "int32") + assert int32_spec.bounds.ge == -(2**31) + assert int32_spec.bounds.le == 2**31 - 1 + + class TestExtractNumericBounds: """Tests for extract_numeric_bounds function.""" diff --git a/packages/overture-schema-codegen/tests/test_reverse_references.py b/packages/overture-schema-codegen/tests/test_reverse_references.py index fdc33eb26..1502aba2d 100644 --- a/packages/overture-schema-codegen/tests/test_reverse_references.py +++ b/packages/overture-schema-codegen/tests/test_reverse_references.py @@ -9,6 +9,8 @@ RoadSegment, TreeNode, Venue, + has_name, + lookup_by_name, make_union_spec, ) from overture.schema.codegen.model_extraction import expand_model_tree, extract_model @@ -17,6 +19,7 @@ UsedByKind, compute_reverse_references, ) +from overture.schema.codegen.specs import TypeIdentity from overture.schema.codegen.type_collection import collect_all_supplementary_types from overture.schema.system.ref import Id from overture.schema.system.string import NoWhitespaceString @@ -41,33 +44,34 @@ def test_model_referencing_type_produces_used_by_entry( expand_model_tree(model_spec) all_specs = collect_all_supplementary_types([model_spec]) - assert target_name in all_specs + assert has_name(all_specs, target_name) result = compute_reverse_references([model_spec], all_specs) - assert target_name in result - entries = result[target_name] + entries = lookup_by_name(result, target_name) assert len(entries) == 1 - assert entries[0].name == model_name + assert entries[0].identity.name == model_name assert entries[0].kind == UsedByKind.MODEL def test_newtype_inheriting_from_newtype_produces_used_by_entry() -> None: """NewType inheriting constraints from another NewType produces a 'used by' entry.""" # Id wraps NoWhitespaceString, which is also a NewType - # When we extract Id, its constraints include ConstraintSource(source="NoWhitespaceString", ...) + # When we extract Id, its constraints include ConstraintSource(source_ref=NoWhitespaceString, ...) id_spec = extract_newtype(Id) nws_spec = extract_newtype(NoWhitespaceString) - all_specs = {"Id": id_spec, "NoWhitespaceString": nws_spec} + all_specs = { + TypeIdentity(Id, "Id"): id_spec, + TypeIdentity(NoWhitespaceString, "NoWhitespaceString"): nws_spec, + } result = compute_reverse_references([], all_specs) # NoWhitespaceString should have a used_by entry from Id - assert "NoWhitespaceString" in result - entries = result["NoWhitespaceString"] + entries = lookup_by_name(result, "NoWhitespaceString") assert len(entries) == 1 - assert entries[0].name == "Id" + assert entries[0].identity.name == "Id" assert entries[0].kind == UsedByKind.NEWTYPE @@ -84,14 +88,13 @@ def test_union_members_have_used_by_entries() -> None: # Extract the member road_spec = extract_model(RoadSegment) expand_model_tree(road_spec) - all_specs = {"RoadSegment": road_spec} + all_specs = {TypeIdentity(RoadSegment, "RoadSegment"): road_spec} result = compute_reverse_references([union_spec], all_specs) - assert "RoadSegment" in result - entries = result["RoadSegment"] + entries = lookup_by_name(result, "RoadSegment") assert len(entries) == 1 - assert entries[0].name == "TestSegment" + assert entries[0].identity.name == "TestSegment" assert entries[0].kind == UsedByKind.MODEL @@ -101,12 +104,13 @@ def test_self_references_filtered_out() -> None: expand_model_tree(tree_spec) # Manually add TreeNode to all_specs to test self-reference filtering - all_specs = {"TreeNode": tree_spec} + all_specs = {TypeIdentity(TreeNode, "TreeNode"): tree_spec} result = compute_reverse_references([tree_spec], all_specs) # TreeNode should not appear in result since it only references itself - assert "TreeNode" not in result + with pytest.raises(KeyError): + lookup_by_name(result, "TreeNode") def test_deduplication_same_type_multiple_fields() -> None: @@ -117,15 +121,14 @@ def test_deduplication_same_type_multiple_fields() -> None: expand_model_tree(venue_spec) all_specs = collect_all_supplementary_types([instrument_spec, venue_spec]) - assert "Id" in all_specs + assert has_name(all_specs, "Id") result = compute_reverse_references([instrument_spec, venue_spec], all_specs) - assert "Id" in result - entries = result["Id"] + entries = lookup_by_name(result, "Id") # Both Instrument and Venue reference Id assert len(entries) == 2 - names = {e.name for e in entries} + names = {e.identity.name for e in entries} assert names == {"Instrument", "Venue"} # All should be MODELs assert all(e.kind == UsedByKind.MODEL for e in entries) @@ -147,19 +150,19 @@ def test_sorting_models_before_newtypes() -> None: # Add the CustomId NewType which references Id custom_id_spec = extract_newtype(CustomId) - all_specs["CustomId"] = custom_id_spec + all_specs[TypeIdentity(CustomId, "CustomId")] = custom_id_spec result = compute_reverse_references([instrument_spec, venue_spec], all_specs) # Id should have entries from both Instrument and Venue (MODELs) and CustomId (NEWTYPE) - entries = result["Id"] + entries = lookup_by_name(result, "Id") assert len(entries) == 3 # Check sorting: MODELs first, then NEWTYPE # Within MODELs: alphabetical (Instrument, Venue) assert entries[0].kind == UsedByKind.MODEL - assert entries[0].name == "Instrument" + assert entries[0].identity.name == "Instrument" assert entries[1].kind == UsedByKind.MODEL - assert entries[1].name == "Venue" + assert entries[1].identity.name == "Venue" assert entries[2].kind == UsedByKind.NEWTYPE - assert entries[2].name == "CustomId" + assert entries[2].identity.name == "CustomId" diff --git a/packages/overture-schema-codegen/tests/test_specs.py b/packages/overture-schema-codegen/tests/test_specs.py index 258fbbfd9..8e78f6692 100644 --- a/packages/overture-schema-codegen/tests/test_specs.py +++ b/packages/overture-schema-codegen/tests/test_specs.py @@ -2,13 +2,22 @@ from typing import Annotated -from codegen_test_support import STR_TYPE, make_union_spec +import pytest +from codegen_test_support import ( + STR_TYPE, + InstrumentFamily, + SimpleModel, + make_union_spec, +) from overture.schema.codegen.model_extraction import extract_model from overture.schema.codegen.specs import ( AnnotatedField, + EnumSpec, FeatureSpec, FieldSpec, ModelSpec, + NewTypeSpec, + TypeIdentity, is_union_alias, ) from overture.schema.codegen.type_analyzer import TypeInfo, TypeKind @@ -223,3 +232,74 @@ def test_fields_property_returns_plain_field_specs(self) -> None: ], ) assert spec.fields == [fs1, fs2] + + +class TestTypeIdentity: + def test_frozen(self) -> None: + ti = TypeIdentity(obj=int, name="int") + with pytest.raises(AttributeError): + ti.obj = str # type: ignore[misc] + + def test_same_obj_equal(self) -> None: + a = TypeIdentity(obj=int, name="int") + b = TypeIdentity(obj=int, name="integer") + assert a == b + + def test_same_obj_same_hash(self) -> None: + a = TypeIdentity(obj=int, name="int") + b = TypeIdentity(obj=int, name="integer") + assert hash(a) == hash(b) + + def test_different_obj_not_equal(self) -> None: + a = TypeIdentity(obj=int, name="int") + b = TypeIdentity(obj=str, name="int") + assert a != b + + def test_works_as_dict_key(self) -> None: + ti = TypeIdentity(obj=int, name="int") + d = {ti: "value"} + assert d[TypeIdentity(obj=int, name="different")] == "value" + + def test_not_equal_to_non_identity(self) -> None: + ti = TypeIdentity(obj=int, name="int") + non_identity_type: object = int + non_identity_str: object = "int" + assert ti != non_identity_type + assert ti != non_identity_str + + +class TestSpecIdentity: + def test_model_spec_identity(self) -> None: + spec = ModelSpec(name="Foo", description=None, source_type=SimpleModel) + ident = spec.identity + assert isinstance(ident, TypeIdentity) + assert ident.obj is SimpleModel + assert ident.name == "Foo" + + def test_enum_spec_identity(self) -> None: + spec = EnumSpec(name="Color", description=None, source_type=InstrumentFamily) + ident = spec.identity + assert ident.obj is InstrumentFamily + assert ident.name == "Color" + + def test_newtype_spec_identity(self) -> None: + from overture.schema.system.primitive import int32 + + spec = NewTypeSpec( + name="int32", description=None, type_info=STR_TYPE, source_type=int32 + ) + ident = spec.identity + assert ident.obj is int32 + assert ident.name == "int32" + + def test_union_spec_identity(self) -> None: + sentinel = object() + spec = make_union_spec("TestUnion", source_annotation=sentinel) + ident = spec.identity + assert ident.obj is sentinel + assert ident.name == "TestUnion" + + def test_model_spec_satisfies_feature_protocol_with_identity(self) -> None: + spec = ModelSpec(name="Foo", description=None, source_type=SimpleModel) + feature: FeatureSpec = spec + assert feature.identity.obj is SimpleModel diff --git a/packages/overture-schema-codegen/tests/test_type_analyzer.py b/packages/overture-schema-codegen/tests/test_type_analyzer.py index 0e0b17fc6..aad451aad 100644 --- a/packages/overture-schema-codegen/tests/test_type_analyzer.py +++ b/packages/overture-schema-codegen/tests/test_type_analyzer.py @@ -17,6 +17,7 @@ from overture.schema.system.string import ( HexColor, NoWhitespaceConstraint, + NoWhitespaceString, SnakeCaseString, ) from pydantic import BaseModel, Field, Tag @@ -165,7 +166,7 @@ def test_annotated_optional_str(self) -> None: assert result.base_type == "str" assert result.is_optional is True assert len(result.constraints) == 1 - assert result.constraints[0].source is None + assert result.constraints[0].source_ref is None assert result.constraints[0].constraint == "description" def test_annotated_list_str(self) -> None: @@ -175,7 +176,7 @@ def test_annotated_list_str(self) -> None: assert result.base_type == "str" assert result.is_list is True assert len(result.constraints) == 1 - assert result.constraints[0].source is None + assert result.constraints[0].source_ref is None class TestAnalyzeTypeAnnotated: @@ -189,7 +190,7 @@ def test_annotated_int_with_ge_extracts_constraint(self) -> None: assert result.kind == TypeKind.PRIMITIVE assert len(result.constraints) == 1 cs = result.constraints[0] - assert cs.source is None + assert cs.source_ref is None assert isinstance(cs.constraint, Ge) assert cs.constraint.ge == 0 @@ -199,7 +200,7 @@ def test_annotated_without_constraints(self) -> None: assert result.base_type == "str" assert len(result.constraints) == 1 - assert result.constraints[0].source is None + assert result.constraints[0].source_ref is None assert result.constraints[0].constraint == "just a description" @@ -368,35 +369,44 @@ class TestConstraintProvenance: def test_nested_newtype_flattens_constraints(self, id_type_info: TypeInfo) -> None: """Id -> NoWhitespaceString -> str flattens all constraints with sources.""" - sources = {cs.source for cs in id_type_info.constraints} - assert "Id" in sources - assert "NoWhitespaceString" in sources + source_names = { + cs.source_name for cs in id_type_info.constraints if cs.source_name + } + assert "Id" in source_names + assert "NoWhitespaceString" in source_names def test_nested_newtype_includes_inner_constraints( self, id_type_info: TypeInfo ) -> None: """Inner NewType constraints are collected with provenance.""" nws_constraints = [ - cs for cs in id_type_info.constraints if cs.source == "NoWhitespaceString" + cs for cs in id_type_info.constraints if cs.source_ref is NoWhitespaceString ] constraint_types = {type(cs.constraint) for cs in nws_constraints} assert NoWhitespaceConstraint in constraint_types def test_direct_annotation_has_none_source(self) -> None: - """Constraints from direct Annotated (no NewType) have source=None.""" + """Constraints from direct Annotated (no NewType) have source_ref=None.""" result = analyze_type(Annotated[str, "direct"]) assert len(result.constraints) == 1 - assert result.constraints[0].source is None + assert result.constraints[0].source_ref is None assert result.constraints[0].constraint == "direct" def test_single_newtype_constraints_attributed( self, hex_color_type_info: TypeInfo ) -> None: - """HexColor constraints are attributed to HexColor.""" - assert all(cs.source == "HexColor" for cs in hex_color_type_info.constraints) + """HexColor constraints are attributed to the HexColor callable.""" + assert all(cs.source_ref is HexColor for cs in hex_color_type_info.constraints) assert len(hex_color_type_info.constraints) > 0 + def test_source_ref_is_newtype_callable( + self, hex_color_type_info: TypeInfo + ) -> None: + """source_ref is the actual NewType callable, not a string.""" + cs = hex_color_type_info.constraints[0] + assert cs.source_ref is HexColor + def test_constraint_preserves_original_object( self, hex_color_type_info: TypeInfo ) -> None: diff --git a/packages/overture-schema-codegen/tests/test_type_collection.py b/packages/overture-schema-codegen/tests/test_type_collection.py index 78ec75695..8c52468c5 100644 --- a/packages/overture-schema-codegen/tests/test_type_collection.py +++ b/packages/overture-schema-codegen/tests/test_type_collection.py @@ -1,14 +1,31 @@ """Tests for type collection module.""" -from codegen_test_support import FeatureWithAddress, FeatureWithSources, Instrument +from codegen_test_support import ( + FeatureWithAddress, + FeatureWithSources, + Instrument, + has_name, + lookup_by_name, +) from overture.schema.codegen.model_extraction import expand_model_tree, extract_model from overture.schema.codegen.specs import ( EnumSpec, ModelSpec, NewTypeSpec, SupplementarySpec, + TypeIdentity, ) from overture.schema.codegen.type_collection import collect_all_supplementary_types +from pydantic import BaseModel + + +def _make_feature_with_sub_model(sub_model: type) -> type[BaseModel]: + """Build a feature class whose only field references sub_model.""" + return type( + f"FeatureWith{sub_model.__name__}", + (BaseModel,), + {"__annotations__": {"sub": sub_model}, "sub": None}, + ) class TestCollectAllSupplementarySpecs: @@ -17,7 +34,7 @@ class TestCollectAllSupplementarySpecs: @staticmethod def _expanded_supplementary( model_class: type, - ) -> dict[str, SupplementarySpec]: + ) -> dict[TypeIdentity, SupplementarySpec]: spec = extract_model(model_class) expand_model_tree(spec) return collect_all_supplementary_types([spec]) @@ -25,20 +42,20 @@ def _expanded_supplementary( def test_returns_enum_specs(self) -> None: result = self._expanded_supplementary(Instrument) - assert "InstrumentFamily" in result - assert isinstance(result["InstrumentFamily"], EnumSpec) + assert has_name(result, "InstrumentFamily") + assert isinstance(lookup_by_name(result, "InstrumentFamily"), EnumSpec) def test_returns_newtype_specs(self) -> None: result = self._expanded_supplementary(Instrument) - assert "HexColor" in result - assert isinstance(result["HexColor"], NewTypeSpec) + assert has_name(result, "HexColor") + assert isinstance(lookup_by_name(result, "HexColor"), NewTypeSpec) def test_returns_model_specs_from_expanded_tree(self) -> None: result = self._expanded_supplementary(FeatureWithAddress) - assert "Address" in result - assert isinstance(result["Address"], ModelSpec) + assert has_name(result, "Address") + assert isinstance(lookup_by_name(result, "Address"), ModelSpec) def test_collects_transitive_types(self) -> None: """Types referenced by sub-models are also collected.""" @@ -46,5 +63,23 @@ def test_collects_transitive_types(self) -> None: # Sources is a semantic NewType; SourceItem is a sub-model # referenced transitively via the expanded tree - assert "Sources" in result - assert "SourceItem" in result + assert has_name(result, "Sources") + assert has_name(result, "SourceItem") + + def test_same_name_different_types_both_collected(self) -> None: + """Two types with the same __name__ from different modules are both collected.""" + ModelA = type("Address", (BaseModel,), {"__annotations__": {"x": str}}) + ModelB = type("Address", (BaseModel,), {"__annotations__": {"y": int}}) + + outer_a = extract_model(_make_feature_with_sub_model(ModelA)) + expand_model_tree(outer_a) + + outer_b = extract_model(_make_feature_with_sub_model(ModelB)) + expand_model_tree(outer_b) + + result = collect_all_supplementary_types([outer_a, outer_b]) + + address_entries = [ + spec for tid, spec in result.items() if tid.name == "Address" + ] + assert len(address_entries) == 2 diff --git a/packages/overture-schema-codegen/tests/test_type_placement.py b/packages/overture-schema-codegen/tests/test_type_placement.py index 1cb0d7e3a..e93635e02 100644 --- a/packages/overture-schema-codegen/tests/test_type_placement.py +++ b/packages/overture-schema-codegen/tests/test_type_placement.py @@ -3,8 +3,13 @@ from pathlib import PurePosixPath import overture.schema.system.primitive as _system_primitive -from codegen_test_support import STR_TYPE, flat_specs_from_discovery, make_union_spec -from overture.schema.codegen.link_computation import relative_link +from codegen_test_support import ( + STR_TYPE, + flat_specs_from_discovery, + lookup_by_name, + make_union_spec, +) +from overture.schema.codegen.link_computation import LinkContext, relative_link from overture.schema.codegen.model_extraction import expand_model_tree from overture.schema.codegen.path_assignment import ( GEOMETRY_PAGE, @@ -20,6 +25,7 @@ FieldSpec, ModelSpec, SupplementarySpec, + TypeIdentity, ) from overture.schema.codegen.type_collection import collect_all_supplementary_types from pydantic import BaseModel @@ -33,7 +39,7 @@ def _build_registry( feature_specs: list[ModelSpec], -) -> tuple[dict[str, PurePosixPath], dict[str, SupplementarySpec]]: +) -> tuple[dict[TypeIdentity, PurePosixPath], dict[TypeIdentity, SupplementarySpec]]: """Build placement registry with standard aggregate names.""" cache: dict[type, ModelSpec] = {} for spec in feature_specs: @@ -77,16 +83,21 @@ def test_features_at_theme_level(self) -> None: specs = flat_specs_from_discovery("buildings") registry, _ = _build_registry(specs) - assert registry["Building"] == PurePosixPath("buildings/building.md") - assert registry["BuildingPart"] == PurePosixPath("buildings/building_part.md") + assert lookup_by_name(registry, "Building") == PurePosixPath( + "buildings/building.md" + ) + assert lookup_by_name(registry, "BuildingPart") == PurePosixPath( + "buildings/building_part.md" + ) def test_shared_types_mirror_source_modules(self) -> None: """Core/system types land in directories matching their module path.""" specs = flat_specs_from_discovery("buildings") registry, _ = _build_registry(specs) - if "Names" in registry: - assert str(registry["Names"]).startswith("core/") + names = {tid.name for tid in registry} + if "Names" in names: + assert str(lookup_by_name(registry, "Names")).startswith("core/") def test_no_duplicate_paths(self) -> None: """No two individual types share an output path.""" @@ -113,7 +124,7 @@ def test_supplementary_types_nested_under_types(self) -> None: registry, _ = _build_registry(specs) # BuildingClass is a supplementary type from the buildings module - assert registry["BuildingClass"] == PurePosixPath( + assert lookup_by_name(registry, "BuildingClass") == PurePosixPath( "buildings/types/building_class.md" ) @@ -124,7 +135,7 @@ def test_submodule_supplementary_types_nested_under_types(self) -> None: # AreaClass is from overture.schema.divisions.division_area.enums, # a subdirectory of the divisions feature directory. - assert registry["AreaClass"] == PurePosixPath( + assert lookup_by_name(registry, "AreaClass") == PurePosixPath( "divisions/types/division_area/area_class.md" ) @@ -134,8 +145,9 @@ def test_shared_types_not_nested(self) -> None: registry, _ = _build_registry(specs) # Names is from overture.schema.core -- no features there, no nesting - if "Names" in registry: - path = str(registry["Names"]) + names = {tid.name for tid in registry} + if "Names" in names: + path = str(lookup_by_name(registry, "Names")) assert path.startswith("core/") assert "/types/" not in path @@ -174,4 +186,23 @@ class A(Base): registry = build_placement_registry( feature_specs, all_specs, [], [], "test.package" ) - assert "TestUnion" in registry + assert any(tid.name == "TestUnion" for tid in registry) + + +class TestLinkContextWithTypeIdentity: + """Tests for LinkContext using TypeIdentity keys.""" + + def test_same_name_different_identity_separate_paths(self) -> None: + """Two types with the same name but different objects resolve to different paths.""" + obj_a = type("Address", (), {}) + obj_b = type("Address", (), {}) + registry = { + TypeIdentity(obj_a, "Address"): PurePosixPath("places/types/address.md"), + TypeIdentity(obj_b, "Address"): PurePosixPath("addresses/address.md"), + } + ctx = LinkContext(page_path=PurePosixPath("places/place.md"), registry=registry) + assert ctx.resolve_link(TypeIdentity(obj_a, "Address")) == "types/address.md" + assert ( + ctx.resolve_link(TypeIdentity(obj_b, "Address")) + == "../addresses/address.md" + ) From 216771f6a683cf79eab7f6cfa2c3298d410ac3da Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 3 Mar 2026 15:19:38 -0800 Subject: [PATCH 24/38] fix: improve constraint description rendering MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit MinLen/MaxLen: render as prose ("Minimum length: 1") instead of wrapping the entire phrase in backticks. Math notation (≥, <) stays in backticks; English words don't belong there. UniqueItemsConstraint: reword docstring from class-description phrasing ("Ensures all items in a collection are unique") to validation-requirement phrasing ("All items must be unique"), matching model-level constraint tone. String constraints: normalize PhoneNumberConstraint, RegionCodeConstraint, and WikidataIdConstraint docstrings to the "Allows only..." pattern used by all other StringConstraint subclasses. --- packages/overture-schema-codegen/docs/walkthrough.md | 2 +- .../overture/schema/codegen/field_constraint_description.py | 4 ++-- .../overture-schema-codegen/tests/golden/markdown/id.md | 2 +- .../tests/golden/markdown/instrument.md | 2 +- .../tests/golden/markdown/sources.md | 4 ++-- .../tests/test_constraint_description.py | 6 ++---- .../overture/schema/system/field_constraint/collection.py | 2 +- .../src/overture/schema/system/field_constraint/string.py | 6 +++--- 8 files changed, 13 insertions(+), 15 deletions(-) diff --git a/packages/overture-schema-codegen/docs/walkthrough.md b/packages/overture-schema-codegen/docs/walkthrough.md index 1bff215e7..35829218b 100644 --- a/packages/overture-schema-codegen/docs/walkthrough.md +++ b/packages/overture-schema-codegen/docs/walkthrough.md @@ -377,7 +377,7 @@ Two modules convert constraint objects into human-readable text. `field_constraint_description.py` pattern-matches constraint types. `Interval` renders as `lower <= x <= upper` using Unicode comparison operators. Single-bound constraints (`Ge`, `Gt`, `Le`, `Lt`) render as `>= value` or `< value`. Length constraints -(`MinLen`, `MaxLen`) render as `minimum length: N`. `GeometryTypeConstraint` lists +(`MinLen`, `MaxLen`) render as plain prose (e.g. "Minimum length: 1"). `GeometryTypeConstraint` lists allowed geometry types by name, converting snake_case values to PascalCase. `Reference` describes the relationship and target model, using an optional `link_fn` to produce markdown links. diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py b/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py index 05d116ca5..5981528d1 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py @@ -106,9 +106,9 @@ def describe_field_constraint( if result is not None: return result if isinstance(constraint, MinLen): - return f"`minimum length: {constraint.min_length}`" + return f"Minimum length: {constraint.min_length}" if isinstance(constraint, MaxLen): - return f"`maximum length: {constraint.max_length}`" + return f"Maximum length: {constraint.max_length}" if _is_opaque_constraint(constraint): return f"`{type(constraint).__name__}`" diff --git a/packages/overture-schema-codegen/tests/golden/markdown/id.md b/packages/overture-schema-codegen/tests/golden/markdown/id.md index 993e7661e..b2bfa2995 100644 --- a/packages/overture-schema-codegen/tests/golden/markdown/id.md +++ b/packages/overture-schema-codegen/tests/golden/markdown/id.md @@ -6,7 +6,7 @@ Underlying type: `string` ## Constraints -- `minimum length: 1` +- Minimum length: 1 - Allows only strings that contain no whitespace characters. (`NoWhitespaceConstraint`, pattern: `^\S+$`) ## Used By diff --git a/packages/overture-schema-codegen/tests/golden/markdown/instrument.md b/packages/overture-schema-codegen/tests/golden/markdown/instrument.md index 5145aba43..727f1b559 100644 --- a/packages/overture-schema-codegen/tests/golden/markdown/instrument.md +++ b/packages/overture-schema-codegen/tests/golden/markdown/instrument.md @@ -17,4 +17,4 @@ by how sound is produced. | `num_strings` | `int32` (optional) | | | `family` | `InstrumentFamily` (optional) | | | `color` | `HexColor` (optional) | Body color | -| `tags` | `list` (optional) | *Ensures all items in a collection are unique. (`UniqueItemsConstraint`)* | +| `tags` | `list` (optional) | *All items must be unique. (`UniqueItemsConstraint`)* | diff --git a/packages/overture-schema-codegen/tests/golden/markdown/sources.md b/packages/overture-schema-codegen/tests/golden/markdown/sources.md index 0089f3597..ec0343cb6 100644 --- a/packages/overture-schema-codegen/tests/golden/markdown/sources.md +++ b/packages/overture-schema-codegen/tests/golden/markdown/sources.md @@ -6,8 +6,8 @@ Underlying type: `list` ## Constraints -- `minimum length: 1` -- Ensures all items in a collection are unique. (`UniqueItemsConstraint`) +- Minimum length: 1 +- All items must be unique. (`UniqueItemsConstraint`) ## Used By diff --git a/packages/overture-schema-codegen/tests/test_constraint_description.py b/packages/overture-schema-codegen/tests/test_constraint_description.py index 6a6e1a0ef..216bf75bc 100644 --- a/packages/overture-schema-codegen/tests/test_constraint_description.py +++ b/packages/overture-schema-codegen/tests/test_constraint_description.py @@ -340,12 +340,10 @@ def test_lt(self) -> None: assert describe_field_constraint(Lt(lt=100)) == "`< 100`" def test_min_len(self) -> None: - assert describe_field_constraint(MinLen(min_length=1)) == "`minimum length: 1`" + assert describe_field_constraint(MinLen(min_length=1)) == "Minimum length: 1" def test_max_len(self) -> None: - assert ( - describe_field_constraint(MaxLen(max_length=10)) == "`maximum length: 10`" - ) + assert describe_field_constraint(MaxLen(max_length=10)) == "Maximum length: 10" def test_interval_closed(self) -> None: assert describe_field_constraint(Interval(ge=0, le=100)) == "`0 ≤ x ≤ 100`" diff --git a/packages/overture-schema-system/src/overture/schema/system/field_constraint/collection.py b/packages/overture-schema-system/src/overture/schema/system/field_constraint/collection.py index a39159217..f8a699d91 100644 --- a/packages/overture-schema-system/src/overture/schema/system/field_constraint/collection.py +++ b/packages/overture-schema-system/src/overture/schema/system/field_constraint/collection.py @@ -43,7 +43,7 @@ def _is_collection_type(source: type[Any]) -> bool: class UniqueItemsConstraint(CollectionConstraint): - """Ensures all items in a collection are unique.""" + """All items must be unique.""" def validate(self, value: list[Any] | None, info: ValidationInfo) -> None: # Skip validation for None values (used with optional fields) diff --git a/packages/overture-schema-system/src/overture/schema/system/field_constraint/string.py b/packages/overture-schema-system/src/overture/schema/system/field_constraint/string.py index 8c2d90415..68737f5db 100644 --- a/packages/overture-schema-system/src/overture/schema/system/field_constraint/string.py +++ b/packages/overture-schema-system/src/overture/schema/system/field_constraint/string.py @@ -324,7 +324,7 @@ def __get_pydantic_json_schema__( class PhoneNumberConstraint(StringConstraint): - """Constraint for international phone numbers.""" + """Allows only international phone numbers.""" def __init__(self) -> None: self.pattern = re.compile(r"^\+\d{1,3}[\s\-\(\)0-9]+$") @@ -359,7 +359,7 @@ def __get_pydantic_json_schema__( class RegionCodeConstraint(StringConstraint): - """ISO 3166-2 principal subdivision code constraint.""" + """Allows only ISO 3166-2 principal subdivision codes.""" def __init__(self) -> None: self.pattern = re.compile(r"^[A-Z]{2}-[A-Z0-9]{1,3}$") @@ -392,7 +392,7 @@ def __get_pydantic_json_schema__( class WikidataIdConstraint(StringConstraint): - """Constraint for Wikidata identifiers (Q followed by digits).""" + """Allows only Wikidata identifiers (Q followed by digits).""" def __init__(self) -> None: self.pattern = re.compile(r"^Q\d+$") From 69dc977df0463d6e1aabc3f425d476b1d1bcdd0b Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Tue, 3 Mar 2026 15:19:40 -0800 Subject: [PATCH 25/38] feat(codegen): generate pages and links for Pydantic built-in types Pydantic types like HttpUrl and EmailStr appear in field annotations but previously rendered as unlinked inline code. Each referenced Pydantic type now gets its own page under pydantic// with a description, upstream Pydantic docs link, and Used By section. Discovery is reference-driven: the type collection visitor detects PRIMITIVE-kind types from pydantic modules in expanded feature trees. PydanticTypeSpec joins the SupplementarySpec union and flows through placement, reverse references, and rendering. Linking is registry-driven for all PRIMITIVE-kind types. Any primitive with a page in the placement registry gets linked, whether it's a Pydantic type (individual page) or a registered numeric primitive (aggregate page). This also links int32/float64 to the primitives page, which they weren't before. Shared is_pydantic_sourced() predicate gates collection and reverse reference tracking to pydantic-origin types without restricting the linking mechanism. --- .../schema/codegen/markdown_pipeline.py | 6 +- .../schema/codegen/markdown_renderer.py | 15 +++++ .../schema/codegen/markdown_type_format.py | 27 +++++++- .../schema/codegen/path_assignment.py | 9 ++- .../schema/codegen/pydantic_extraction.py | 33 ++++++++++ .../schema/codegen/reverse_references.py | 6 ++ .../src/overture/schema/codegen/specs.py | 41 +++++++++++- .../markdown/pydantic_type.md.jinja2 | 8 +++ .../schema/codegen/type_collection.py | 16 ++++- .../tests/codegen_test_support.py | 14 ++++- .../tests/test_integration_real_models.py | 50 +++++++++++++++ .../tests/test_markdown_renderer.py | 39 ++++++++++++ .../tests/test_markdown_type_format.py | 63 ++++++++++++++++++- .../tests/test_pydantic_extraction.py | 29 +++++++++ .../tests/test_reverse_references.py | 18 +++++- .../tests/test_type_collection.py | 46 ++++++++++---- .../tests/test_type_placement.py | 37 +++++++++++ 17 files changed, 435 insertions(+), 22 deletions(-) create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/pydantic_extraction.py create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/pydantic_type.md.jinja2 create mode 100644 packages/overture-schema-codegen/tests/test_pydantic_extraction.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py index 9092dbba8..c96005f61 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py @@ -21,6 +21,7 @@ render_geometry_from_values, render_newtype, render_primitives_from_specs, + render_pydantic_type, ) from .model_extraction import expand_model_tree from .path_assignment import ( @@ -39,6 +40,7 @@ FeatureSpec, ModelSpec, NewTypeSpec, + PydanticTypeSpec, SupplementarySpec, TypeIdentity, UnionSpec, @@ -88,7 +90,7 @@ def _render_supplement( registry: dict[TypeIdentity, PurePosixPath], reverse_refs: dict[TypeIdentity, list[UsedByEntry]], ) -> RenderedPage: - """Render a single supplementary page (enum, NewType, or sub-model).""" + """Render a single supplementary type page.""" output_path = resolve_output_path(tid, registry) ctx = LinkContext(output_path, registry) used_by = reverse_refs.get(tid) @@ -99,6 +101,8 @@ def _render_supplement( content = render_newtype(spec, ctx, used_by=used_by) elif isinstance(spec, ModelSpec): content = render_feature(spec, ctx, used_by=used_by) + elif isinstance(spec, PydanticTypeSpec): + content = render_pydantic_type(spec, link_ctx=ctx, used_by=used_by) else: raise TypeError(f"Unhandled SupplementarySpec variant: {type(spec).__name__}") diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py index 102669359..10357c330 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py @@ -29,6 +29,7 @@ ModelSpec, NewTypeSpec, PrimitiveSpec, + PydanticTypeSpec, TypeIdentity, UnionSpec, ) @@ -42,6 +43,7 @@ "render_geometry_from_values", "render_newtype", "render_primitives_from_specs", + "render_pydantic_type", ] @@ -496,6 +498,19 @@ def render_newtype( ) +def render_pydantic_type( + spec: PydanticTypeSpec, + link_ctx: LinkContext | None = None, + used_by: list[UsedByEntry] | None = None, +) -> str: + """Render a PydanticTypeSpec as Markdown documentation.""" + template = _get_jinja_env().get_template("pydantic_type.md.jinja2") + return template.render( + pydantic_type=spec, + used_by=_build_used_by_context(used_by, link_ctx), + ) + + # Matches the ge/le bounds of the int64 NewType in overture.schema.system.primitive. _INT64_MIN = -(2**63) _INT64_MAX = 2**63 - 1 diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py index 5498052e9..69d2fc05d 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py @@ -62,6 +62,26 @@ def _linked_type_identity(ti: TypeInfo) -> TypeIdentity | None: return None +def _try_primitive_link( + ti: TypeInfo, display_name: str, ctx: LinkContext | None +) -> str | None: + """Try to link a PRIMITIVE type to its page via registry lookup. + + Registered primitives (int32, Geometry) and Pydantic types (HttpUrl) + can have pages in the registry. Uses the type registry display name + (e.g. ``geometry`` not ``Geometry``) for the link text. + """ + if ti.kind != TypeKind.PRIMITIVE or not ctx: + return None + candidate = ti.newtype_ref or ti.source_type + if candidate is None: + return None + href = ctx.resolve_link(TypeIdentity(candidate, display_name)) + if href: + return _code_link(display_name, href) + return None + + def _markdown_type_name(ti: TypeInfo) -> str: """Return the markdown display name for a type. @@ -129,7 +149,12 @@ def format_type( display = _wrap_list_n(display, ti.list_depth) else: base = resolve_type_name(ti, "markdown") - if ti.is_list: + link = _try_primitive_link(ti, base, ctx) + if link and ti.is_list: + display = _wrap_list_n(link, ti.list_depth) + elif link: + display = link + elif ti.is_list: display = _plain_list_type(base, ti.list_depth) else: display = f"`{base}`" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py b/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py index fecae8dc6..616c58112 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py @@ -9,7 +9,7 @@ from .case_conversion import slug_filename from .module_layout import compute_output_dir, output_dir_for_entry_point -from .specs import FeatureSpec, SupplementarySpec, TypeIdentity +from .specs import FeatureSpec, PydanticTypeSpec, SupplementarySpec, TypeIdentity __all__ = [ "GEOMETRY_PAGE", @@ -48,6 +48,13 @@ def build_placement_registry( for tid, supp_spec in all_specs.items(): if tid in registry: continue + if isinstance(supp_spec, PydanticTypeSpec): + registry[tid] = ( + PurePosixPath("pydantic") + / supp_spec.source_module + / slug_filename(tid.name) + ) + continue source_module = getattr(supp_spec.source_type, "__module__", None) if source_module is None: continue diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/pydantic_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/pydantic_extraction.py new file mode 100644 index 000000000..120f4760d --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/pydantic_extraction.py @@ -0,0 +1,33 @@ +"""Pydantic built-in type extraction.""" + +import re + +from .docstring import first_docstring_line +from .specs import PydanticTypeSpec + +__all__ = ["extract_pydantic_type"] + +# Matches bare admonition labels like "Info:" or "Note:" with no following text. +_ADMONITION_LABEL = re.compile(r"^\w+:\s*$") + + +def _usable_description(doc: str | None) -> str | None: + """Return the first docstring line, or None if it's an admonition label.""" + line = first_docstring_line(doc) + if line is None or _ADMONITION_LABEL.match(line): + return None + return line + + +def extract_pydantic_type(cls: type) -> PydanticTypeSpec: + """Extract a PydanticTypeSpec from a Pydantic built-in type class.""" + module = getattr(cls, "__module__", "") + if not module.startswith("pydantic"): + msg = f"Expected a pydantic type, got {cls!r} from {module!r}" + raise ValueError(msg) + return PydanticTypeSpec( + name=cls.__name__, + description=_usable_description(cls.__doc__), + source_type=cls, + source_module=cls.__module__.removeprefix("pydantic."), + ) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py b/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py index 1b4a75ff6..35b754d3c 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py @@ -14,6 +14,7 @@ SupplementarySpec, TypeIdentity, UnionSpec, + is_pydantic_type, ) from .type_analyzer import TypeInfo, TypeKind, walk_type_info @@ -94,6 +95,11 @@ def _visit(node: TypeInfo) -> None: referrer_kind, ) + if is_pydantic_type(node): + add_reference( + TypeIdentity.of(node.source_type), referrer, referrer_kind + ) + if node.union_members is not None: for member_cls in node.union_members: add_reference( diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py b/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py index 69ff0d763..ff62ed19b 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py @@ -22,10 +22,13 @@ "ModelSpec", "NewTypeSpec", "PrimitiveSpec", + "PydanticTypeSpec", "SupplementarySpec", "TypeIdentity", "filter_model_classes", "is_model_class", + "is_pydantic_sourced", + "is_pydantic_type", "is_union_alias", ] @@ -59,8 +62,8 @@ def __hash__(self) -> int: class _SourceTypeIdentityMixin: """Mixin providing ``identity`` from ``source_type`` and ``name``. - Shared by EnumSpec, ModelSpec, and NewTypeSpec -- each has a - ``source_type`` (the Python class/callable) and a ``name``. + Shared by EnumSpec, ModelSpec, NewTypeSpec, and PydanticTypeSpec -- + each has a ``source_type`` (the Python class/callable) and a ``name``. UnionSpec uses ``source_annotation`` instead, so it defines its own ``identity``. """ @@ -190,7 +193,25 @@ class PrimitiveSpec: float_bits: int | None = None -SupplementarySpec = EnumSpec | NewTypeSpec | ModelSpec +@dataclass +class PydanticTypeSpec(_SourceTypeIdentityMixin): + """Specification for a Pydantic built-in type (HttpUrl, EmailStr, etc.).""" + + name: str + description: str | None + source_type: type + source_module: str + + @property + def docs_url(self) -> str: + """Pydantic documentation URL for this type.""" + return ( + f"https://docs.pydantic.dev/latest/api/{self.source_module}" + f"/#pydantic.{self.source_module}.{self.name}" + ) + + +SupplementarySpec = EnumSpec | NewTypeSpec | ModelSpec | PydanticTypeSpec """Non-feature types referenced by feature models. Excludes PrimitiveSpec and geometry types, which are extracted @@ -198,6 +219,20 @@ class PrimitiveSpec: """ +def is_pydantic_sourced(source_type: type | None) -> bool: + """Check whether *source_type* originates from the ``pydantic`` package.""" + return getattr(source_type, "__module__", "").startswith("pydantic") + + +def is_pydantic_type(ti: TypeInfo) -> bool: + """Check whether a TypeInfo represents a Pydantic built-in type.""" + return ( + ti.kind == TypeKind.PRIMITIVE + and ti.source_type is not None + and is_pydantic_sourced(ti.source_type) + ) + + def is_model_class(obj: object) -> TypeGuard[type[BaseModel]]: """Check whether *obj* is a concrete BaseModel subclass (not a type alias).""" return isinstance(obj, type) and issubclass(obj, BaseModel) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/pydantic_type.md.jinja2 b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/pydantic_type.md.jinja2 new file mode 100644 index 000000000..3185acf56 --- /dev/null +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/templates/markdown/pydantic_type.md.jinja2 @@ -0,0 +1,8 @@ +# {{ pydantic_type.name }} +{% if pydantic_type.description %} + +{{ pydantic_type.description | linkify_urls }} +{% endif %} + +See: [Pydantic docs]({{ pydantic_type.docs_url }}) +{% include '_used_by.md.jinja2' %} diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py index d1c64e4b1..88fd4158b 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py @@ -10,7 +10,15 @@ from .enum_extraction import extract_enum from .model_extraction import extract_model from .newtype_extraction import extract_newtype -from .specs import FeatureSpec, FieldSpec, ModelSpec, SupplementarySpec, TypeIdentity +from .pydantic_extraction import extract_pydantic_type +from .specs import ( + FeatureSpec, + FieldSpec, + ModelSpec, + SupplementarySpec, + TypeIdentity, + is_pydantic_type, +) from .type_analyzer import TypeInfo, TypeKind, analyze_type, is_newtype, walk_type_info from .type_registry import is_semantic_newtype @@ -106,6 +114,12 @@ def _visit(node: TypeInfo) -> None: if newly_registered: _collect_inner_newtypes(node.newtype_ref) + if is_pydantic_type(node): + assert node.source_type is not None # guaranteed by is_pydantic_type + pid = TypeIdentity.of(node.source_type) + if pid not in all_specs: + all_specs[pid] = extract_pydantic_type(node.source_type) + walk_type_info(ti, _visit) def _collect_from_fields(fields: list[FieldSpec]) -> None: diff --git a/packages/overture-schema-codegen/tests/codegen_test_support.py b/packages/overture-schema-codegen/tests/codegen_test_support.py index 9f8b60e81..1d1e81a3e 100644 --- a/packages/overture-schema-codegen/tests/codegen_test_support.py +++ b/packages/overture-schema-codegen/tests/codegen_test_support.py @@ -13,6 +13,7 @@ import pytest from overture.schema.codegen.model_extraction import extract_model +from overture.schema.codegen.pydantic_extraction import extract_pydantic_type from overture.schema.codegen.specs import ( AnnotatedField, EnumMemberSpec, @@ -37,7 +38,7 @@ ) from overture.schema.system.ref import Id, Identified, Reference, Relationship from overture.schema.system.string import HexColor, LanguageTag, StrippedString -from pydantic import BaseModel, Field +from pydantic import BaseModel, EmailStr, Field, HttpUrl STR_TYPE = TypeInfo(base_type="str", kind=TypeKind.PRIMITIVE) @@ -196,6 +197,17 @@ class FeatureWithDict( metadata: dict[str, int] = Field(description="Numeric metadata") +class FeatureWithUrl(FeatureBase[Literal["test"], Literal["linked"]]): + """A feature with Pydantic URL and email fields.""" + + website: HttpUrl | None = None + emails: list[EmailStr] | None = None + + +HTTP_URL_SPEC = extract_pydantic_type(HttpUrl) +EMAIL_STR_SPEC = extract_pydantic_type(EmailStr) + + class SegmentBase(BaseModel): """Common base for test segments.""" diff --git a/packages/overture-schema-codegen/tests/test_integration_real_models.py b/packages/overture-schema-codegen/tests/test_integration_real_models.py index 447d64ff2..630afb189 100644 --- a/packages/overture-schema-codegen/tests/test_integration_real_models.py +++ b/packages/overture-schema-codegen/tests/test_integration_real_models.py @@ -6,12 +6,17 @@ import pytest from codegen_test_support import assert_literal_field +from overture.schema.codegen.markdown_pipeline import generate_markdown_pages from overture.schema.codegen.markdown_renderer import render_feature from overture.schema.codegen.model_extraction import extract_model +from overture.schema.codegen.module_layout import entry_point_class from overture.schema.codegen.specs import ( + FeatureSpec, ModelSpec, UnionSpec, filter_model_classes, + is_model_class, + is_union_alias, ) from overture.schema.codegen.type_analyzer import TypeKind from overture.schema.codegen.union_extraction import extract_union @@ -227,3 +232,48 @@ def test_segment_common_base_is_base_model(self, segment_spec: UnionSpec) -> Non # Verify common base has expected fields assert "geometry" in segment_spec.common_base.model_fields assert "id" in segment_spec.common_base.model_fields + + +class TestPydanticTypePages: + """End-to-end: pipeline produces pages for referenced Pydantic built-in types.""" + + _SCHEMA_ROOT = "overture.schema" + + @pytest.fixture(scope="class") + def pages(self) -> list: + """Generate all pages from real discovered models.""" + models = discover_models() + feature_specs: list[FeatureSpec] = [] + for key, entry in models.items(): + if is_model_class(entry): + feature_specs.append(extract_model(entry, entry_point=key.entry_point)) + elif is_union_alias(entry): + feature_specs.append( + extract_union( + entry_point_class(key.entry_point), + entry, + entry_point=key.entry_point, + ) + ) + return generate_markdown_pages(feature_specs, self._SCHEMA_ROOT) + + def test_http_url_page_exists(self, pages: list) -> None: + """Pipeline produces a page for HttpUrl under pydantic/networks/.""" + paths = {str(p.path) for p in pages} + assert any("pydantic/networks/http_url" in path for path in paths) + + def test_email_str_page_exists(self, pages: list) -> None: + """Pipeline produces a page for EmailStr under pydantic/networks/.""" + paths = {str(p.path) for p in pages} + assert any("pydantic/networks/email_str" in path for path in paths) + + def test_http_url_page_content(self, pages: list) -> None: + """HttpUrl page has expected heading and Pydantic docs link.""" + page = next(p for p in pages if "pydantic/networks/http_url" in str(p.path)) + assert "# HttpUrl" in page.content + assert "docs.pydantic.dev" in page.content + + def test_place_links_to_http_url(self, pages: list) -> None: + """Place feature page links to the HttpUrl type page.""" + place_page = next(p for p in pages if p.path.stem == "place" and p.is_feature) + assert "HttpUrl" in place_page.content diff --git a/packages/overture-schema-codegen/tests/test_markdown_renderer.py b/packages/overture-schema-codegen/tests/test_markdown_renderer.py index 612a2827e..60bdc8936 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_renderer.py +++ b/packages/overture-schema-codegen/tests/test_markdown_renderer.py @@ -8,6 +8,8 @@ import pytest from annotated_types import Ge, Interval from codegen_test_support import ( + EMAIL_STR_SPEC, + HTTP_URL_SPEC, STR_TYPE, CommonNames, FeatureBase, @@ -32,6 +34,7 @@ render_feature, render_newtype, render_primitives_from_specs, + render_pydantic_type, ) from overture.schema.codegen.model_extraction import expand_model_tree, extract_model from overture.schema.codegen.newtype_extraction import extract_newtype @@ -1392,3 +1395,39 @@ def test_empty_used_by_omits_section( result = render_fn(spec_factory(), used_by=[]) assert "## Used By" not in result + + +class TestRenderPydanticType: + """Tests for render_pydantic_type.""" + + def test_heading_is_pascal_case(self) -> None: + result = render_pydantic_type(HTTP_URL_SPEC) + assert result.startswith("# HttpUrl\n") + + def test_description_rendered(self) -> None: + result = render_pydantic_type(HTTP_URL_SPEC) + assert "A type that will accept any http or https URL." in result + + def test_no_description_omits_paragraph(self) -> None: + result = render_pydantic_type(EMAIL_STR_SPEC) + lines = result.strip().split("\n") + assert lines[0] == "# EmailStr" + + def test_pydantic_docs_link(self) -> None: + result = render_pydantic_type(HTTP_URL_SPEC) + assert ( + "https://docs.pydantic.dev/latest/api/networks/#pydantic.networks.HttpUrl" + in result + ) + + def test_used_by_section(self) -> None: + place_cls = type("Place", (), {}) + place_id = TypeIdentity(place_cls, "Place") + used_by = [UsedByEntry(place_id, UsedByKind.MODEL)] + ctx = LinkContext( + page_path=PurePosixPath("pydantic/networks/http_url.md"), + registry={place_id: PurePosixPath("places/place/place.md")}, + ) + result = render_pydantic_type(HTTP_URL_SPEC, link_ctx=ctx, used_by=used_by) + assert "## Used By" in result + assert "Place" in result diff --git a/packages/overture-schema-codegen/tests/test_markdown_type_format.py b/packages/overture-schema-codegen/tests/test_markdown_type_format.py index 9d6bf34ce..e06121815 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_type_format.py +++ b/packages/overture-schema-codegen/tests/test_markdown_type_format.py @@ -13,7 +13,7 @@ from overture.schema.codegen.specs import FieldSpec, TypeIdentity from overture.schema.codegen.type_analyzer import TypeInfo, TypeKind, analyze_type from overture.schema.system.primitive import int32 -from pydantic import BaseModel +from pydantic import BaseModel, HttpUrl class _ModelA(BaseModel): @@ -179,6 +179,67 @@ def test_union_partial_links(self) -> None: assert "[`_ModelB`]" not in result +class TestPydanticTypeLinking: + """Tests for PRIMITIVE types with pages getting linked.""" + + def test_pydantic_type_linked_when_in_registry(self) -> None: + ti = analyze_type(HttpUrl) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + ctx = LinkContext( + page_path=PurePosixPath("places/place/place.md"), + registry={ + TypeIdentity(HttpUrl, "HttpUrl"): PurePosixPath( + "pydantic/networks/http_url.md" + ) + }, + ) + result = format_type(field, ctx) + assert "[`HttpUrl`]" in result + assert "pydantic/networks/http_url.md" in result + + def test_pydantic_type_unlinked_without_registry_entry(self) -> None: + ti = analyze_type(HttpUrl) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + ctx = LinkContext( + page_path=PurePosixPath("places/place/place.md"), + registry={}, + ) + result = format_type(field, ctx) + assert result == "`HttpUrl`" + assert "[" not in result + + def test_list_of_pydantic_type_linked(self) -> None: + ti = analyze_type(list[HttpUrl]) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + ctx = LinkContext( + page_path=PurePosixPath("places/place/place.md"), + registry={ + TypeIdentity(HttpUrl, "HttpUrl"): PurePosixPath( + "pydantic/networks/http_url.md" + ) + }, + ) + result = format_type(field, ctx) + assert "HttpUrl" in result + assert "pydantic/networks/http_url.md" in result + + def test_registered_primitive_links_to_aggregate_page(self) -> None: + """int32 links to the primitives aggregate page when in registry.""" + ti = analyze_type(int32) + field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + ctx = LinkContext( + page_path=PurePosixPath("places/place/place.md"), + registry={ + TypeIdentity(int32, "int32"): PurePosixPath( + "system/primitive/primitives.md" + ) + }, + ) + result = format_type(field, ctx) + assert "[`int32`]" in result + assert "system/primitive/primitives.md" in result + + class TestFormatUnderlyingUnionType: """Tests for UNION-kind TypeInfo in format_underlying_type.""" diff --git a/packages/overture-schema-codegen/tests/test_pydantic_extraction.py b/packages/overture-schema-codegen/tests/test_pydantic_extraction.py new file mode 100644 index 000000000..c8a0af2a3 --- /dev/null +++ b/packages/overture-schema-codegen/tests/test_pydantic_extraction.py @@ -0,0 +1,29 @@ +"""Tests for Pydantic type extraction.""" + +from overture.schema.codegen.pydantic_extraction import extract_pydantic_type +from overture.schema.codegen.specs import PydanticTypeSpec +from pydantic import EmailStr, HttpUrl + + +class TestExtractPydanticType: + def test_extracts_http_url(self) -> None: + spec = extract_pydantic_type(HttpUrl) + assert isinstance(spec, PydanticTypeSpec) + assert spec.name == "HttpUrl" + assert spec.source_type is HttpUrl + assert spec.source_module == "networks" + assert spec.description is not None + assert "http" in spec.description.lower() + + def test_extracts_email_str(self) -> None: + spec = extract_pydantic_type(EmailStr) + assert isinstance(spec, PydanticTypeSpec) + assert spec.name == "EmailStr" + assert spec.source_type is EmailStr + assert spec.source_module == "networks" + + def test_admonition_label_filtered_from_description(self) -> None: + spec = extract_pydantic_type(EmailStr) + # EmailStr.__doc__ starts with "Info:" (bare admonition label). + # _usable_description filters this, returning None. + assert spec.description is None diff --git a/packages/overture-schema-codegen/tests/test_reverse_references.py b/packages/overture-schema-codegen/tests/test_reverse_references.py index 1502aba2d..da8851ec1 100644 --- a/packages/overture-schema-codegen/tests/test_reverse_references.py +++ b/packages/overture-schema-codegen/tests/test_reverse_references.py @@ -5,6 +5,7 @@ import pytest from codegen_test_support import ( FeatureWithAddress, + FeatureWithUrl, Instrument, RoadSegment, TreeNode, @@ -19,7 +20,7 @@ UsedByKind, compute_reverse_references, ) -from overture.schema.codegen.specs import TypeIdentity +from overture.schema.codegen.specs import PydanticTypeSpec, TypeIdentity from overture.schema.codegen.type_collection import collect_all_supplementary_types from overture.schema.system.ref import Id from overture.schema.system.string import NoWhitespaceString @@ -134,6 +135,21 @@ def test_deduplication_same_type_multiple_fields() -> None: assert all(e.kind == UsedByKind.MODEL for e in entries) +def test_pydantic_type_has_used_by_from_feature() -> None: + """Pydantic type in all_specs gets used-by entries from features referencing it.""" + model_spec = extract_model(FeatureWithUrl, entry_point="FeatureWithUrl") + expand_model_tree(model_spec) + all_specs = collect_all_supplementary_types([model_spec]) + + assert has_name(all_specs, "HttpUrl") + assert isinstance(lookup_by_name(all_specs, "HttpUrl"), PydanticTypeSpec) + + result = compute_reverse_references([model_spec], all_specs) + + entries = lookup_by_name(result, "HttpUrl") + assert any(e.identity.name == "FeatureWithUrl" for e in entries) + + def test_sorting_models_before_newtypes() -> None: """Sorting produces models before NewTypes, alphabetical within groups.""" # Create a test where the same type (Id) is referenced by: diff --git a/packages/overture-schema-codegen/tests/test_type_collection.py b/packages/overture-schema-codegen/tests/test_type_collection.py index 8c52468c5..38f93fa72 100644 --- a/packages/overture-schema-codegen/tests/test_type_collection.py +++ b/packages/overture-schema-codegen/tests/test_type_collection.py @@ -3,6 +3,7 @@ from codegen_test_support import ( FeatureWithAddress, FeatureWithSources, + FeatureWithUrl, Instrument, has_name, lookup_by_name, @@ -12,6 +13,7 @@ EnumSpec, ModelSpec, NewTypeSpec, + PydanticTypeSpec, SupplementarySpec, TypeIdentity, ) @@ -28,38 +30,36 @@ def _make_feature_with_sub_model(sub_model: type) -> type[BaseModel]: ) +def _expanded_supplementary(model_class: type) -> dict[TypeIdentity, SupplementarySpec]: + spec = extract_model(model_class) + expand_model_tree(spec) + return collect_all_supplementary_types([spec]) + + class TestCollectAllSupplementarySpecs: """Tests for collect_all_supplementary_types returning specs from expanded trees.""" - @staticmethod - def _expanded_supplementary( - model_class: type, - ) -> dict[TypeIdentity, SupplementarySpec]: - spec = extract_model(model_class) - expand_model_tree(spec) - return collect_all_supplementary_types([spec]) - def test_returns_enum_specs(self) -> None: - result = self._expanded_supplementary(Instrument) + result = _expanded_supplementary(Instrument) assert has_name(result, "InstrumentFamily") assert isinstance(lookup_by_name(result, "InstrumentFamily"), EnumSpec) def test_returns_newtype_specs(self) -> None: - result = self._expanded_supplementary(Instrument) + result = _expanded_supplementary(Instrument) assert has_name(result, "HexColor") assert isinstance(lookup_by_name(result, "HexColor"), NewTypeSpec) def test_returns_model_specs_from_expanded_tree(self) -> None: - result = self._expanded_supplementary(FeatureWithAddress) + result = _expanded_supplementary(FeatureWithAddress) assert has_name(result, "Address") assert isinstance(lookup_by_name(result, "Address"), ModelSpec) def test_collects_transitive_types(self) -> None: """Types referenced by sub-models are also collected.""" - result = self._expanded_supplementary(FeatureWithSources) + result = _expanded_supplementary(FeatureWithSources) # Sources is a semantic NewType; SourceItem is a sub-model # referenced transitively via the expanded tree @@ -83,3 +83,25 @@ def test_same_name_different_types_both_collected(self) -> None: spec for tid, spec in result.items() if tid.name == "Address" ] assert len(address_entries) == 2 + + +class TestCollectPydanticTypes: + """Tests for Pydantic built-in type collection.""" + + def test_collects_pydantic_type_from_field(self) -> None: + """Pydantic types referenced in fields are collected.""" + result = _expanded_supplementary(FeatureWithUrl) + assert has_name(result, "HttpUrl") + assert isinstance(lookup_by_name(result, "HttpUrl"), PydanticTypeSpec) + + def test_collects_pydantic_type_inside_list(self) -> None: + """Pydantic types wrapped in list[] are collected.""" + result = _expanded_supplementary(FeatureWithUrl) + assert has_name(result, "EmailStr") + assert isinstance(lookup_by_name(result, "EmailStr"), PydanticTypeSpec) + + def test_does_not_collect_builtin_primitives(self) -> None: + """Plain primitives like str are not collected as PydanticTypeSpec.""" + result = _expanded_supplementary(FeatureWithUrl) + assert not has_name(result, "str") + assert not has_name(result, "int") diff --git a/packages/overture-schema-codegen/tests/test_type_placement.py b/packages/overture-schema-codegen/tests/test_type_placement.py index e93635e02..4cef8ef21 100644 --- a/packages/overture-schema-codegen/tests/test_type_placement.py +++ b/packages/overture-schema-codegen/tests/test_type_placement.py @@ -4,6 +4,8 @@ import overture.schema.system.primitive as _system_primitive from codegen_test_support import ( + EMAIL_STR_SPEC, + HTTP_URL_SPEC, STR_TYPE, flat_specs_from_discovery, lookup_by_name, @@ -206,3 +208,38 @@ def test_same_name_different_identity_separate_paths(self) -> None: ctx.resolve_link(TypeIdentity(obj_b, "Address")) == "../addresses/address.md" ) + + +class TestPydanticTypePlacement: + """Tests for placement of Pydantic built-in types.""" + + def test_pydantic_type_placed_under_module_dir(self) -> None: + registry = build_placement_registry( + feature_specs=[], + all_specs={HTTP_URL_SPEC.identity: HTTP_URL_SPEC}, + primitive_names=[], + geometry_names=[], + schema_root="overture.schema", + ) + assert lookup_by_name(registry, "HttpUrl") == PurePosixPath( + "pydantic/networks/http_url.md" + ) + + def test_multiple_pydantic_types_same_module(self) -> None: + specs: dict[TypeIdentity, SupplementarySpec] = { + HTTP_URL_SPEC.identity: HTTP_URL_SPEC, + EMAIL_STR_SPEC.identity: EMAIL_STR_SPEC, + } + registry = build_placement_registry( + feature_specs=[], + all_specs=specs, + primitive_names=[], + geometry_names=[], + schema_root="overture.schema", + ) + assert lookup_by_name(registry, "HttpUrl") == PurePosixPath( + "pydantic/networks/http_url.md" + ) + assert lookup_by_name(registry, "EmailStr") == PurePosixPath( + "pydantic/networks/email_str.md" + ) From 6a1c70c320a4188b1756a262f9c830207eb2b203 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 4 Mar 2026 09:27:27 -0800 Subject: [PATCH 26/38] fix(codegen): include bbox in examples Remove bbox from default skip keys so it renders in example output like any other field. --- .../overture/schema/codegen/example_loader.py | 2 +- .../tests/test_example_loader.py | 21 ++++++++++--------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py b/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py index e1cf02a05..7d66823ef 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py @@ -139,7 +139,7 @@ def validate_example( return denulled -_DEFAULT_SKIP_KEYS: frozenset[str] = frozenset({"bbox"}) +_DEFAULT_SKIP_KEYS: frozenset[str] = frozenset() def _flatten_value(prefix: str, value: object) -> list[tuple[str, Any]]: diff --git a/packages/overture-schema-codegen/tests/test_example_loader.py b/packages/overture-schema-codegen/tests/test_example_loader.py index fc418e67c..36434d5af 100644 --- a/packages/overture-schema-codegen/tests/test_example_loader.py +++ b/packages/overture-schema-codegen/tests/test_example_loader.py @@ -50,21 +50,20 @@ def test_list_of_dicts(self) -> None: ("sources[0].record_id", "w123"), ] - def test_skip_bbox_at_top_level(self) -> None: - """Skip bbox field at top level.""" + def test_bbox_flattened_at_top_level(self) -> None: + """Bbox fields are flattened like any other nested dict.""" raw = { "id": "123", "bbox": {"xmin": -176.6, "xmax": -176.64}, "version": 1, } result = flatten_example(raw) - assert result == [("id", "123"), ("version", 1)] - - def test_nested_bbox_not_skipped(self) -> None: - """Nested bbox fields are not skipped.""" - raw = {"outer": {"bbox": {"xmin": 1.0}}} - result = flatten_example(raw) - assert result == [("outer.bbox.xmin", 1.0)] + assert result == [ + ("id", "123"), + ("bbox.xmin", -176.6), + ("bbox.xmax", -176.64), + ("version", 1), + ] def test_plain_list_kept_as_value(self) -> None: """Plain lists (non-dict items) are kept as values.""" @@ -390,7 +389,7 @@ class MockModel(BaseModel): names: dict[str, object] sources: list[dict[str, object]] - field_names = ["id", "names", "sources", "version"] + field_names = ["id", "bbox", "names", "sources", "version"] result = load_examples(MockModel, "Building", field_names) assert len(result) == 1 @@ -399,6 +398,8 @@ class MockModel(BaseModel): assert record.rows == [ ("id", "123"), + ("bbox.xmin", 1.0), + ("bbox.xmax", 2.0), ("names.primary", "Tower"), ("sources[0].dataset", "OSM"), ("sources[0].record_id", "w456"), From 8960caaa3922866599cc65bdc7d365a84f87f9be Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 4 Mar 2026 09:44:57 -0800 Subject: [PATCH 27/38] fix(codegen): stabilize Used By sort order After resolving type name collisions across themes (101596f8), two referrers from different modules can share a display name. The sort key (kind, name) produced ties, and Python's sorted() preserved set iteration order for tied elements -- which depends on id()-based hashing and varies across process invocations. Add the source module as a tiebreaker: (kind, name, module). Expose TypeIdentity.module property to encapsulate the getattr(obj, "__module__") access pattern. --- .../schema/codegen/reverse_references.py | 9 ++++- .../src/overture/schema/codegen/specs.py | 5 +++ .../tests/test_reverse_references.py | 38 +++++++++++++++++++ 3 files changed, 50 insertions(+), 2 deletions(-) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py b/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py index 35b754d3c..b3351e873 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py @@ -159,10 +159,15 @@ def collect_from_newtype_spec(spec: NewTypeSpec, referrer: TypeIdentity) -> None elif isinstance(supp_spec, ModelSpec): collect_from_model_spec(supp_spec, tid) - # Sort sets into lists + # Sort into deterministic lists. (kind, name) handles the common case; + # module breaks ties when two referrers share the same display name + # (e.g. identically-named types from different themes/modules). result: dict[TypeIdentity, list[UsedByEntry]] = {} for target, ref_set in references.items(): - entries = sorted(ref_set, key=lambda e: (e.kind.value, e.identity.name)) + entries = sorted( + ref_set, + key=lambda e: (e.kind.value, e.identity.name, e.identity.module), + ) result[target] = entries return result diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py b/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py index ff62ed19b..801e69c23 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py @@ -58,6 +58,11 @@ def __eq__(self, other: object) -> bool: def __hash__(self) -> int: return id(self.obj) + @property + def module(self) -> str: + """Source module of the underlying object, or empty string.""" + return getattr(self.obj, "__module__", "") + class _SourceTypeIdentityMixin: """Mixin providing ``identity`` from ``source_type`` and ``name``. diff --git a/packages/overture-schema-codegen/tests/test_reverse_references.py b/packages/overture-schema-codegen/tests/test_reverse_references.py index da8851ec1..2b7e19b4d 100644 --- a/packages/overture-schema-codegen/tests/test_reverse_references.py +++ b/packages/overture-schema-codegen/tests/test_reverse_references.py @@ -1,5 +1,6 @@ """Tests for reverse reference computation.""" +from enum import Enum as PyEnum from typing import NewType import pytest @@ -14,6 +15,7 @@ lookup_by_name, make_union_spec, ) +from overture.schema.codegen.enum_extraction import extract_enum from overture.schema.codegen.model_extraction import expand_model_tree, extract_model from overture.schema.codegen.newtype_extraction import extract_newtype from overture.schema.codegen.reverse_references import ( @@ -24,6 +26,7 @@ from overture.schema.codegen.type_collection import collect_all_supplementary_types from overture.schema.system.ref import Id from overture.schema.system.string import NoWhitespaceString +from pydantic import BaseModel @pytest.mark.parametrize( @@ -150,6 +153,41 @@ def test_pydantic_type_has_used_by_from_feature() -> None: assert any(e.identity.name == "FeatureWithUrl" for e in entries) +def test_sort_tiebreaker_uses_module_for_same_name_referrers() -> None: + """Referrers with the same name sort deterministically by module.""" + + # Two model classes named "Feature" from different modules. + class SharedEnum(PyEnum): + A = "a" + + class FeatureAlpha(BaseModel): + value: SharedEnum + + class FeatureBeta(BaseModel): + value: SharedEnum + + FeatureAlpha.__name__ = "Feature" + FeatureAlpha.__module__ = "alpha.models" + FeatureBeta.__name__ = "Feature" + FeatureBeta.__module__ = "beta.models" + + spec_a = extract_model(FeatureAlpha, entry_point="Feature") + spec_b = extract_model(FeatureBeta, entry_point="Feature") + expand_model_tree(spec_a) + expand_model_tree(spec_b) + + enum_id = TypeIdentity(SharedEnum, "SharedEnum") + all_specs = {enum_id: extract_enum(SharedEnum)} + + result = compute_reverse_references([spec_a, spec_b], all_specs) + + entries = lookup_by_name(result, "SharedEnum") + assert len(entries) == 2 + # Both named "Feature" — module provides the tiebreaker + modules = [e.identity.module for e in entries] + assert modules == ["alpha.models", "beta.models"] + + def test_sorting_models_before_newtypes() -> None: """Sorting produces models before NewTypes, alphabetical within groups.""" # Create a test where the same type (Id) is referenced by: From 4a1dc06080f882ee2cc9d55f84381122da0f3844 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 4 Mar 2026 09:55:24 -0800 Subject: [PATCH 28/38] fix(codegen): add visual break before constraints Constraint annotations in table description cells ran directly into the preceding description text with only a single
. Double the break so constraints read as a separate paragraph. --- .../src/overture/schema/codegen/markdown_renderer.py | 2 +- .../overture-schema-codegen/tests/golden/markdown/venue.md | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py index 10357c330..602f85d4b 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py @@ -210,7 +210,7 @@ def _annotate_constraint_notes( """Append italic constraint descriptions to a field's description cell.""" formatted = "
".join(f"*{note}*" for note in notes) if row["description"]: - row["description"] = f"{row['description']}
{formatted}" + row["description"] = f"{row['description']}

{formatted}" else: row["description"] = formatted diff --git a/packages/overture-schema-codegen/tests/golden/markdown/venue.md b/packages/overture-schema-codegen/tests/golden/markdown/venue.md index f712b8201..edb0578ef 100644 --- a/packages/overture-schema-codegen/tests/golden/markdown/venue.md +++ b/packages/overture-schema-codegen/tests/golden/markdown/venue.md @@ -11,11 +11,11 @@ A location where musical performances take place. | `id` | `Id` | Unique identifier | | `category` | `"music"` | | | `kind` | `"venue"` | | -| `name` | `string` (optional) | Venue name
*At least one of `name`, `description` must be set* | +| `name` | `string` (optional) | Venue name

*At least one of `name`, `description` must be set* | | `description` | `string` (optional) | *At least one of `name`, `description` must be set* | | `geometry` | `geometry` | *Allowed geometry types: Point, Polygon* | | `capacity` | `int64` (optional) | *`≥ 1`* | -| `resident_ensemble` | `Id` (optional) | A unique identifier
*References `Instrument` (belongs to)* | +| `resident_ensemble` | `Id` (optional) | A unique identifier

*References `Instrument` (belongs to)* | ## Constraints From 48df0ab9ef23da91058a9a1ce34e5900e28caf87 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 4 Mar 2026 13:57:34 -0800 Subject: [PATCH 29/38] fix(codegen): render list[NewType] as list MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit list[PhoneNumber] rendered as "PhoneNumber (list)" — implying PhoneNumber itself is a list type. The root cause: format_type couldn't distinguish list layers outside a NewType from list layers inside one. Add newtype_outer_list_depth to TypeInfo, snapshotted from list_depth when the type analyzer enters the first NewType. The renderer uses this to choose list syntax (list wraps the NewType) vs a (list) qualifier (NewType wraps a list internally). Non-NewType identities (enums, models) continue using list. --- .../overture-schema-codegen/docs/design.md | 7 +- .../docs/walkthrough.md | 33 ++++-- .../schema/codegen/markdown_type_format.py | 12 +- .../overture/schema/codegen/type_analyzer.py | 6 + .../tests/test_markdown_type_format.py | 107 +++++++++++++----- .../tests/test_type_analyzer.py | 48 +++++++- 6 files changed, 166 insertions(+), 47 deletions(-) diff --git a/packages/overture-schema-codegen/docs/design.md b/packages/overture-schema-codegen/docs/design.md index 60be38223..4698d441e 100644 --- a/packages/overture-schema-codegen/docs/design.md +++ b/packages/overture-schema-codegen/docs/design.md @@ -125,9 +125,10 @@ layers in a fixed order, accumulating information into an `_UnwrapState`: The result is `TypeInfo` -- a flat dataclass that fully describes the unwrapped type: classification (`TypeKind`), optional/dict flags, `list_depth` (count of `list[...]` -layers), accumulated constraints with provenance, NewType names, source type, literal -values, and (for UNION kind) the tuple of concrete `BaseModel` member types. Dict types carry recursively analyzed `TypeInfo` -for their key and value types. +layers), `newtype_outer_list_depth` (list layers outside the outermost NewType boundary), +accumulated constraints with provenance, NewType names, source type, literal values, and +(for UNION kind) the tuple of concrete `BaseModel` member types. Dict types carry +recursively analyzed `TypeInfo` for their key and value types. Multi-depth `Annotated` layers (common in practice, since NewTypes wrap `Annotated` types that wrap further NewTypes) are handled naturally by the loop -- each iteration diff --git a/packages/overture-schema-codegen/docs/walkthrough.md b/packages/overture-schema-codegen/docs/walkthrough.md index 35829218b..39141b2ea 100644 --- a/packages/overture-schema-codegen/docs/walkthrough.md +++ b/packages/overture-schema-codegen/docs/walkthrough.md @@ -122,9 +122,11 @@ The function runs a single `while True` loop that peels layers in fixed order. E iteration handles one wrapper: **NewType** records names at two levels. The first NewType encountered becomes -`outermost_newtype_name` (the user-facing identity, e.g. "FeatureVersion"). Subsequent -NewTypes update `last_newtype_name` (the innermost, used for constraint provenance and -as the terminal `base_type`). The loop unwraps via `__supertype__` and continues. +`outermost_newtype_name` (the user-facing identity, e.g. "FeatureVersion") and snapshots +the current `list_depth` into `newtype_outer_list_depth` -- capturing how many list +layers appeared before the NewType boundary. Subsequent NewTypes update +`last_newtype_name` (the innermost, used for constraint provenance and as the terminal +`base_type`). The loop unwraps via `__supertype__` and continues. **Annotated** collects every metadata object as a `ConstraintSource`, tagging each with whichever NewType was most recently entered. This is how constraint provenance survives: @@ -168,7 +170,8 @@ member separately. int32)` where `int32 = NewType("int32", Annotated[int, Field(ge=0, le=2147483647)])`. Iteration 1 sees `FeatureVersion`. It's a NewType -- record -`outermost_newtype_name="FeatureVersion"`, unwrap to `int32`, continue. Iteration 2 sees +`outermost_newtype_name="FeatureVersion"`, snapshot `newtype_outer_list_depth=0` (no list +layers yet), unwrap to `int32`, continue. Iteration 2 sees `int32`. Also a NewType -- update `last_newtype_name="int32"`, unwrap to `Annotated[int, Field(ge=0, ...)]`, continue. Iteration 3 sees `Annotated`. Collect `ConstraintSource(source="int32", constraint=)`, unwrap to `int`. The @@ -185,10 +188,10 @@ rendering. ### _UnwrapState The accumulator dataclass carries state across iterations: optional/dict flags, -`list_depth` (incremented per `list[...]` layer), the constraint list, both NewType name -slots, and the captured description. Its -`build_type_info` method assembles the final `TypeInfo` from accumulated state, freezing -the constraint list into a tuple. +`list_depth` (incremented per `list[...]` layer), `newtype_outer_list_depth` (snapshotted +from `list_depth` when the first NewType is entered), the constraint list, both NewType +name slots, and the captured description. Its `build_type_info` method assembles the +final `TypeInfo` from accumulated state, freezing the constraint list into a tuple. ### walk_type_info @@ -506,10 +509,16 @@ provenance rather than direct field reference. `format_type` handles the full range of field types. Single-value Literals render as `"value"` in backticks. Semantic NewTypes and enums/models get markdown links via `_resolve_type_link`, which checks the `LinkContext` registry and falls back to plain -code spans. Lists wrap `list_depth` times. Linked inner types use broken-backtick syntax -(`` `list<` `` ... `` `>` ``) built as a single wrapper to avoid adjacent backticks that -CommonMark would interpret as multi-backtick code span delimiters. Dict types render as `` `map` ``. Qualifiers -(optional, list, map) append in parentheses. +code spans. For types with a linked identity (semantic NewTypes, enums, models), list +rendering depends on where the list layers sit relative to the NewType boundary. +`newtype_outer_list_depth > 0` means the list wraps the NewType (`list[PhoneNumber]`) and +renders as `list`. `is_list` with `newtype_name` set means the NewType +wraps a list internally (`Sources` wrapping `list[SourceItem]`) and renders with a +`(list)` qualifier. Non-NewType identities (enums, models) use `list` syntax. Linked +inner types use broken-backtick syntax (`` `list<` `` ... `` `>` ``) built as a single +wrapper to avoid adjacent backticks that CommonMark would interpret as multi-backtick +code span delimiters. Dict types render as `` `map` ``. Qualifiers (optional, list, +map) append in parentheses. Union members format independently -- each gets its own link resolution, joined with pipe separators escaped for table-cell safety. diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py index 69d2fc05d..47863f039 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_type_format.py @@ -143,11 +143,21 @@ def format_type( display = f"`{format_dict_type(ti)}`" elif identity: display = resolve_type_link(identity, ctx) - if ti.is_list and identity.name == ti.newtype_name: + # List layers outside a NewType wrap with list<> syntax (e.g., list[PhoneNumber] + # renders as list). List layers inside a NewType use a (list) + # qualifier instead (e.g., Sources wrapping list[SourceItem] renders as + # Sources (list)), since the list-ness is an implementation detail of the type. + if ti.newtype_outer_list_depth > 0: + assert ti.is_list # outer list layers are a subset of total list layers + display = _wrap_list_n(display, ti.newtype_outer_list_depth) + elif ti.is_list and ti.newtype_name is not None: # list is inside the NewType qualifiers.append("list") elif ti.is_list: display = _wrap_list_n(display, ti.list_depth) else: + # Fallback: types without a linked identity. Registered primitives (int32, + # Geometry) and Pydantic types (HttpUrl) may still link to aggregate pages + # via the placement registry. Unregistered primitives render as plain code. base = resolve_type_name(ti, "markdown") link = _try_primitive_link(ti, base, ctx) if link and ti.is_list: diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py index 6e723453c..fd4c9a06b 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py @@ -57,6 +57,7 @@ class TypeInfo: kind: TypeKind is_optional: bool = False list_depth: int = 0 + newtype_outer_list_depth: int = 0 is_dict: bool = False dict_key_type: TypeInfo | None = None dict_value_type: TypeInfo | None = None @@ -113,10 +114,13 @@ class _UnwrapState: as the resolved ``base_type`` for the terminal type. - ``last_newtype_ref``: the most recently entered NewType callable, used as constraint provenance (which NewType contributed each constraint). + - ``newtype_outer_list_depth``: list layers accumulated before entering + the outermost NewType boundary. """ is_optional: bool = False list_depth: int = 0 + newtype_outer_list_depth: int = 0 is_dict: bool = False dict_key_type: TypeInfo | None = None dict_value_type: TypeInfo | None = None @@ -146,6 +150,7 @@ def build_type_info( kind=kind, is_optional=self.is_optional, list_depth=self.list_depth, + newtype_outer_list_depth=self.newtype_outer_list_depth, is_dict=self.is_dict, dict_key_type=self.dict_key_type, dict_value_type=self.dict_value_type, @@ -176,6 +181,7 @@ def analyze_type(annotation: object) -> TypeInfo: state.last_newtype_name = name state.last_newtype_ref = annotation if state.outermost_newtype_name is None: + state.newtype_outer_list_depth = state.list_depth state.outermost_newtype_name = name state.outermost_newtype_ref = annotation annotation = annotation.__supertype__ # type: ignore[attr-defined] diff --git a/packages/overture-schema-codegen/tests/test_markdown_type_format.py b/packages/overture-schema-codegen/tests/test_markdown_type_format.py index e06121815..17c8a7def 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_type_format.py +++ b/packages/overture-schema-codegen/tests/test_markdown_type_format.py @@ -29,38 +29,33 @@ class TestFormatType: def test_plain_str_renders_as_string(self) -> None: ti = analyze_type(str) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) - assert format_type(field) == "`string`" + assert format_type(_make_field(ti)) == "`string`" def test_optional_adds_qualifier(self) -> None: ti = analyze_type(str | None) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=False) - assert format_type(field) == "`string` (optional)" + assert format_type(_make_field(ti, is_required=False)) == "`string` (optional)" def test_literal_renders_as_quoted_value(self) -> None: ti = analyze_type(Literal["places"]) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) - assert format_type(field) == '`"places"`' + assert format_type(_make_field(ti)) == '`"places"`' def test_multi_value_literal_renders_comma_separated(self) -> None: ti = analyze_type(Literal["a", "b", "c"]) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) - assert format_type(field) == '`"a"` \\| `"b"` \\| `"c"`' + assert format_type(_make_field(ti)) == '`"a"` \\| `"b"` \\| `"c"`' def test_enum_without_context_renders_as_code(self) -> None: class Color(str, Enum): RED = "red" ti = analyze_type(Color) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) - assert format_type(field) == "`Color`" + assert format_type(_make_field(ti)) == "`Color`" def test_enum_with_link_context(self) -> None: class Color(str, Enum): RED = "red" ti = analyze_type(Color) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + field = _make_field(ti) ctx = LinkContext( page_path=PurePosixPath("buildings/building/building.md"), registry={ @@ -71,18 +66,15 @@ class Color(str, Enum): def test_list_of_primitives(self) -> None: ti = analyze_type(list[str]) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) - assert format_type(field) == "`list`" + assert format_type(_make_field(ti)) == "`list`" def test_nested_list_of_primitives(self) -> None: ti = analyze_type(list[list[str]]) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) - assert format_type(field) == "`list>`" + assert format_type(_make_field(ti)) == "`list>`" def test_registered_primitive_not_linked(self) -> None: ti = analyze_type(int32) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) - result = format_type(field) + result = format_type(_make_field(ti)) assert result == "`int32`" assert "](int32.md)" not in result @@ -102,9 +94,11 @@ def test_dict_with_newtype_shows_semantic_name(self) -> None: assert result == "map" -def _make_union_field(ti: TypeInfo, *, is_required: bool = True) -> FieldSpec: - """Build a FieldSpec wrapping a union TypeInfo for test convenience.""" - return FieldSpec(name="x", type_info=ti, description=None, is_required=is_required) +def _make_field( + ti: TypeInfo, *, name: str = "x", is_required: bool = True +) -> FieldSpec: + """Build a FieldSpec for test convenience.""" + return FieldSpec(name=name, type_info=ti, description=None, is_required=is_required) class TestFormatUnionType: @@ -112,7 +106,7 @@ class TestFormatUnionType: def test_union_renders_all_members(self) -> None: ti = analyze_type(_ModelA | _ModelB) - result = format_type(_make_union_field(ti)) + result = format_type(_make_field(ti)) assert "`_ModelA`" in result assert "`_ModelB`" in result # Pipe separator escaped for table cells @@ -131,13 +125,13 @@ def test_union_with_link_context_links_each_member(self) -> None: ), }, ) - result = format_type(_make_union_field(ti), ctx) + result = format_type(_make_field(ti), ctx) assert "[`_ModelA`](types/model_a.md)" in result assert "[`_ModelB`](types/model_b.md)" in result def test_optional_union_adds_qualifier(self) -> None: ti = analyze_type(_ModelA | _ModelB | None) - result = format_type(_make_union_field(ti, is_required=False)) + result = format_type(_make_field(ti, is_required=False)) assert "(optional)" in result assert "`_ModelA`" in result assert "`_ModelB`" in result @@ -149,14 +143,14 @@ def test_list_of_union_adds_qualifier(self) -> None: list_depth=1, union_members=(_ModelA, _ModelB), ) - result = format_type(_make_union_field(ti)) + result = format_type(_make_field(ti)) assert "(list)" in result assert "`_ModelA`" in result assert "`_ModelB`" in result def test_union_members_unlinked_without_context(self) -> None: ti = analyze_type(_ModelA | _ModelB) - result = format_type(_make_union_field(ti)) + result = format_type(_make_field(ti)) # No markdown links without context assert "]()" not in result assert "[`" not in result @@ -172,7 +166,7 @@ def test_union_partial_links(self) -> None: ) }, ) - result = format_type(_make_union_field(ti), ctx) + result = format_type(_make_field(ti), ctx) assert "[`_ModelA`](types/model_a.md)" in result assert "`_ModelB`" in result # _ModelB should NOT be linked @@ -184,7 +178,7 @@ class TestPydanticTypeLinking: def test_pydantic_type_linked_when_in_registry(self) -> None: ti = analyze_type(HttpUrl) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + field = _make_field(ti) ctx = LinkContext( page_path=PurePosixPath("places/place/place.md"), registry={ @@ -199,7 +193,7 @@ def test_pydantic_type_linked_when_in_registry(self) -> None: def test_pydantic_type_unlinked_without_registry_entry(self) -> None: ti = analyze_type(HttpUrl) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + field = _make_field(ti) ctx = LinkContext( page_path=PurePosixPath("places/place/place.md"), registry={}, @@ -210,7 +204,7 @@ def test_pydantic_type_unlinked_without_registry_entry(self) -> None: def test_list_of_pydantic_type_linked(self) -> None: ti = analyze_type(list[HttpUrl]) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + field = _make_field(ti) ctx = LinkContext( page_path=PurePosixPath("places/place/place.md"), registry={ @@ -226,7 +220,7 @@ def test_list_of_pydantic_type_linked(self) -> None: def test_registered_primitive_links_to_aggregate_page(self) -> None: """int32 links to the primitives aggregate page when in registry.""" ti = analyze_type(int32) - field = FieldSpec(name="x", type_info=ti, description=None, is_required=True) + field = _make_field(ti) ctx = LinkContext( page_path=PurePosixPath("places/place/place.md"), registry={ @@ -240,6 +234,59 @@ def test_registered_primitive_links_to_aggregate_page(self) -> None: assert "system/primitive/primitives.md" in result +class TestListOfSemanticNewtype: + """Tests for list[SemanticNewType] rendering. + + When a scalar NewType appears inside list[], the type renders as + list rather than NewTypeName (list). The (list) qualifier + is reserved for NewTypes that internally wrap a list. + """ + + def test_list_of_scalar_newtype_renders_list_syntax(self) -> None: + """list[ScalarNewType] renders as list, not Name (list).""" + ScalarNT = NewType("ScalarNT", str) + ti = analyze_type(list[ScalarNT]) + result = format_type(_make_field(ti)) + assert "list<" in result + assert "ScalarNT" in result + assert "(list)" not in result + + def test_newtype_wrapping_list_renders_qualifier(self) -> None: + """NewType wrapping list[X] renders as Name (list).""" + ListNT = NewType("ListNT", list[str]) + ti = analyze_type(ListNT) + result = format_type(_make_field(ti)) + assert "(list)" in result + assert "ListNT" in result + + def test_list_of_scalar_newtype_with_link(self) -> None: + """list[ScalarNewType] with link context renders linked list.""" + ScalarNT = NewType("ScalarNT", str) + ti = analyze_type(list[ScalarNT]) + field = _make_field(ti) + ctx = LinkContext( + page_path=PurePosixPath("places/place/place.md"), + registry={ + TypeIdentity(ScalarNT, "ScalarNT"): PurePosixPath("system/scalar_nt.md") + }, + ) + result = format_type(field, ctx) + assert "list<" in result + assert "ScalarNT" in result + assert "system/scalar_nt.md" in result + assert "(list)" not in result + + def test_nested_list_of_scalar_newtype_renders_nested_list_syntax(self) -> None: + """list[list[ScalarNewType]] renders as list>.""" + ScalarNT = NewType("ScalarNT", str) + ti = analyze_type(list[list[ScalarNT]]) + result = format_type(_make_field(ti)) + assert "list<" in result + assert "list<`" in result or "`list None: assert result.is_list is True def test_newtype_wrapping_list_of_models(self) -> None: - """list[NewType wrapping list[Model]] records depth 2.""" + """list[NewType wrapping list[Model]] records depth 2, outer depth 1.""" class _Item(BaseModel): name: str @@ -359,11 +359,57 @@ class _Item(BaseModel): result = analyze_type(list[Inner]) assert result.list_depth == 2 + assert result.newtype_outer_list_depth == 1 assert result.base_type == "Inner" assert result.kind == TypeKind.MODEL assert result.source_type is _Item +class TestNewtypeOuterListDepth: + """Tests for newtype_outer_list_depth tracking.""" + + def test_list_of_scalar_newtype_has_outer_depth(self) -> None: + """list[ScalarNewType] records the list layer as outside the NewType.""" + ScalarNT = NewType("ScalarNT", str) + result = analyze_type(list[ScalarNT]) + + assert result.newtype_outer_list_depth == 1 + assert result.list_depth == 1 + + def test_newtype_wrapping_list_has_zero_outer_depth(self) -> None: + """NewType wrapping list[X] records no list layers outside the NewType.""" + ListNT = NewType("ListNT", Annotated[list[str], Field(min_length=1)]) + result = analyze_type(ListNT) + + assert result.newtype_outer_list_depth == 0 + assert result.list_depth == 1 + + @pytest.mark.parametrize( + "annotation", + [ + list[str], # list without NewType + int32, # scalar NewType + str, # plain type + ], + ids=["plain_list", "scalar_newtype", "plain_type"], + ) + def test_zero_outer_depth_without_newtype_boundary( + self, annotation: object + ) -> None: + """Types without a NewType inside a list have newtype_outer_list_depth=0.""" + result = analyze_type(annotation) + + assert result.newtype_outer_list_depth == 0 + + def test_nested_list_of_scalar_newtype_has_outer_depth_2(self) -> None: + """list[list[ScalarNewType]] records two outer list layers.""" + ScalarNT = NewType("ScalarNT", str) + result = analyze_type(list[list[ScalarNT]]) + + assert result.newtype_outer_list_depth == 2 + assert result.list_depth == 2 + + class TestConstraintProvenance: """Tests for flattened constraints with provenance tracking.""" From 14e6ac7815da400b4cdbfab88e33e6d7f948535e Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 4 Mar 2026 14:40:19 -0800 Subject: [PATCH 30/38] fix(codegen): include ellipsis in truncation limit _truncate() produced strings up to 103 chars (100 + "..."). Account for the 3-char ellipsis so output stays within the 100-char limit. --- .../src/overture/schema/codegen/markdown_renderer.py | 2 +- .../tests/test_markdown_renderer.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py index 602f85d4b..7799bbca6 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py @@ -156,7 +156,7 @@ def _sanitize_for_table_cell(text: str) -> str: def _truncate(text: str) -> str: """Truncate text to ``_EXAMPLE_TRUNCATION_LIMIT`` chars, adding ellipsis.""" if len(text) > _EXAMPLE_TRUNCATION_LIMIT: - return text[:_EXAMPLE_TRUNCATION_LIMIT] + "..." + return text[: _EXAMPLE_TRUNCATION_LIMIT - 3] + "..." return text diff --git a/packages/overture-schema-codegen/tests/test_markdown_renderer.py b/packages/overture-schema-codegen/tests/test_markdown_renderer.py index 60bdc8936..3435941fc 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_renderer.py +++ b/packages/overture-schema-codegen/tests/test_markdown_renderer.py @@ -976,7 +976,8 @@ def test_long_string_truncated(self) -> None: long = "x" * 150 result = _format_example_value(long) - assert result == f"`{'x' * 100}...`" + assert result == f"`{'x' * 97}...`" + assert len(result) == 100 + 2 # 100 content + 2 backticks def test_integer_has_backticks(self) -> None: """Integers render with backticks.""" @@ -1004,9 +1005,8 @@ def test_long_list_truncated(self) -> None: result = _format_example_value(long_list) assert result.startswith("`[0, 1, 2,") assert result.endswith("...`") - # Content between backticks is at most 103 chars (100 + "...") inner = result[1:-1] # strip backticks - assert len(inner) <= 103 + assert len(inner) <= 100 def test_long_dict_truncated(self) -> None: """Dicts longer than truncation limit are truncated with ellipsis.""" @@ -1015,7 +1015,7 @@ def test_long_dict_truncated(self) -> None: assert result.startswith("`{key_0:") assert result.endswith("...`") inner = result[1:-1] - assert len(inner) <= 103 + assert len(inner) <= 100 def test_pipe_character_not_escaped_in_backticks(self) -> None: """Pipe characters need no escaping inside backticks.""" From 3f4bb7238ecc80fcddd050d8911528cf1e88c952 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 4 Mar 2026 14:43:22 -0800 Subject: [PATCH 31/38] fix(codegen): use repr() for list items in examples str() on string list items renders as [a, b], indistinguishable from bare identifiers. repr() renders as ['a', 'b'] so strings are visually distinct from numbers. --- .../src/overture/schema/codegen/markdown_renderer.py | 2 +- .../overture-schema-codegen/tests/test_markdown_renderer.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py index 7799bbca6..425c98dc0 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py @@ -178,7 +178,7 @@ def _format_example_value(value: object) -> str: return f"`{_truncate(value)}`" if isinstance(value, list): - items = ", ".join(str(item) for item in value) + items = ", ".join(repr(item) for item in value) return f"`{_truncate(f'[{items}]')}`" if isinstance(value, dict): diff --git a/packages/overture-schema-codegen/tests/test_markdown_renderer.py b/packages/overture-schema-codegen/tests/test_markdown_renderer.py index 3435941fc..11559cd14 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_renderer.py +++ b/packages/overture-schema-codegen/tests/test_markdown_renderer.py @@ -996,7 +996,7 @@ def test_list_renders_comma_separated(self) -> None: """Lists render as backtick-wrapped comma-separated values.""" assert _format_example_value([1, 2, 3]) == "`[1, 2, 3]`" - assert _format_example_value(["a", "b"]) == "`[a, b]`" + assert _format_example_value(["a", "b"]) == "`['a', 'b']`" assert _format_example_value([]) == "`[]`" def test_long_list_truncated(self) -> None: From 531621bf9d858899f98da3290b49df582e426b68 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 4 Mar 2026 14:55:55 -0800 Subject: [PATCH 32/38] fix(codegen): expand union member trees before collecting types extract_model() on union members produced ModelSpecs with model=None on MODEL-kind fields. _collect_from_fields then hit the RuntimeError guard when it encountered those unexpanded references. Call expand_model_tree() on each member before walking its fields. No current union members have sub-model fields, so this was latent. --- .../schema/codegen/type_collection.py | 3 ++- .../tests/codegen_test_support.py | 20 +++++++++++++++++++ .../tests/test_type_collection.py | 16 +++++++++++++++ 3 files changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py index 88fd4158b..dd398642c 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py @@ -8,7 +8,7 @@ from typing import Annotated, get_args, get_origin from .enum_extraction import extract_enum -from .model_extraction import extract_model +from .model_extraction import expand_model_tree, extract_model from .newtype_extraction import extract_newtype from .pydantic_extraction import extract_pydantic_type from .specs import ( @@ -93,6 +93,7 @@ def _visit(node: TypeInfo) -> None: # by the feature_objs guard in _collect_from_model. for member_cls in node.union_members: member_spec = extract_model(member_cls) + expand_model_tree(member_spec) _collect_from_model(member_spec) if node.kind == TypeKind.ENUM and node.source_type is not None: diff --git a/packages/overture-schema-codegen/tests/codegen_test_support.py b/packages/overture-schema-codegen/tests/codegen_test_support.py index 1d1e81a3e..b613ebebd 100644 --- a/packages/overture-schema-codegen/tests/codegen_test_support.py +++ b/packages/overture-schema-codegen/tests/codegen_test_support.py @@ -237,6 +237,26 @@ class WaterSegment(SegmentBase): ] +class ContactInfo(BaseModel): + """Contact information for a venue.""" + + email: str = Field(description="Email address") + phone: str | None = Field(None, description="Phone number") + + +class VenueWithContact(SegmentBase): + """A segment variant with a nested sub-model field.""" + + subtype: Literal["venue"] + contact: ContactInfo + + +TestSegmentWithSubModel = Annotated[ + RoadSegment | VenueWithContact, + Field(description="Test segment union with sub-model member"), +] + + def make_union_spec( name: str = "TestUnion", *, diff --git a/packages/overture-schema-codegen/tests/test_type_collection.py b/packages/overture-schema-codegen/tests/test_type_collection.py index 38f93fa72..9dbb0c5d3 100644 --- a/packages/overture-schema-codegen/tests/test_type_collection.py +++ b/packages/overture-schema-codegen/tests/test_type_collection.py @@ -5,6 +5,7 @@ FeatureWithSources, FeatureWithUrl, Instrument, + TestSegmentWithSubModel, has_name, lookup_by_name, ) @@ -85,6 +86,21 @@ def test_same_name_different_types_both_collected(self) -> None: assert len(address_entries) == 2 +class TestCollectUnionMemberSubModels: + """Tests for union members with nested sub-model fields.""" + + def test_union_member_with_sub_model_collects_sub_model(self) -> None: + """Sub-models inside union members are collected without RuntimeError.""" + + class FeatureWithUnionSubModel(BaseModel): + segment: TestSegmentWithSubModel + + result = _expanded_supplementary(FeatureWithUnionSubModel) + + assert has_name(result, "ContactInfo") + assert isinstance(lookup_by_name(result, "ContactInfo"), ModelSpec) + + class TestCollectPydanticTypes: """Tests for Pydantic built-in type collection.""" From 1132e483cf688bcfde00c27d3b22ddc96d06dc69 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Wed, 4 Mar 2026 19:45:00 -0800 Subject: [PATCH 33/38] fix(codegen): treat dict-typed fields as leaf values in examples flatten_example recursed into all dicts, splitting dict-typed fields like `tags: dict[str, str]` into dot-notation rows. Now collect_dict_paths walks the FieldSpec tree to identify dict-typed field paths, and _flatten_value checks membership before recursing. Indexed runtime paths (items[0].tags) are normalized to schema notation (items[].tags) for matching. The pipeline computes dict_paths from spec.fields and threads them through load_examples. Also: clarify mutual exclusion in type visitor elif chains (reverse_references, type_collection) and rename _TypeIdentity to _TypeShape in union_extraction to avoid shadowing specs.TypeIdentity. --- .../overture-schema-codegen/docs/design.md | 6 + .../docs/walkthrough.md | 14 +- .../overture/schema/codegen/example_loader.py | 67 ++++++- .../schema/codegen/markdown_pipeline.py | 4 +- .../schema/codegen/markdown_renderer.py | 5 +- .../schema/codegen/reverse_references.py | 8 +- .../schema/codegen/type_collection.py | 17 +- .../schema/codegen/union_extraction.py | 10 +- .../tests/test_example_loader.py | 184 ++++++++++++++++++ .../tests/test_markdown_renderer.py | 4 +- 10 files changed, 288 insertions(+), 31 deletions(-) diff --git a/packages/overture-schema-codegen/docs/design.md b/packages/overture-schema-codegen/docs/design.md index 4698d441e..aca0eddef 100644 --- a/packages/overture-schema-codegen/docs/design.md +++ b/packages/overture-schema-codegen/docs/design.md @@ -240,6 +240,12 @@ Loads example data from theme `pyproject.toml` files, validates against Pydantic and flattens to dot-notation rows for display in feature pages. Also provides a starting point for generated test data. +`collect_dict_paths` walks the `FieldSpec` tree to identify dict-typed fields (like +`tags: dict[str, str]`), returning their dot-paths as a `frozenset`. `flatten_example` +checks this set before recursing into dicts -- paths in the set are kept as leaf values +rather than being split into dot-notation rows. The pipeline computes `dict_paths` from +`spec.fields` and threads it through `load_examples`. + ## Extension Points **Adding a new output target** (Arrow schemas next, PySpark expressions after): Add a diff --git a/packages/overture-schema-codegen/docs/walkthrough.md b/packages/overture-schema-codegen/docs/walkthrough.md index 39141b2ea..b067f1585 100644 --- a/packages/overture-schema-codegen/docs/walkthrough.md +++ b/packages/overture-schema-codegen/docs/walkthrough.md @@ -609,8 +609,15 @@ non-selected variant arms. `_strip_null_unknown_fields` removes null-valued fiel in the common base's field set, so the selected arm's validator accepts the data without choking on fields that belong to sibling variants. +`collect_dict_paths` walks the `FieldSpec` tree to identify dict-typed fields (like +`tags: dict[str, str]`), returning their dot-paths as a `frozenset`. Schema-notation +paths use empty brackets (`items[].tags`) while runtime paths carry indices +(`items[0].tags`); `_normalize_path` strips indices before membership checks. + `flatten_example` converts nested dicts to dot-notation. Nested dicts become -`parent.child`, lists of dicts become `parent[0].child`. `order_example_rows` sorts by +`parent.child`, lists of dicts become `parent[0].child`. Dicts at paths in `dict_paths` +are kept as leaf values -- a `tags` field typed as `dict[str, str]` renders as a whole +map rather than being split into `tags.color`, `tags.size`. `order_example_rows` sorts by field position in the documentation's field order using a stable sort, so sub-fields maintain their original relative order. @@ -732,8 +739,9 @@ sources appear on the source NewType's page instead. The example loader finds `pyproject.toml` in the transportation theme package, reads `[examples.Segment]`, validates each example against the union alias (injecting literal -fields, stripping null fields from non-selected arms), flattens to dot-notation, and -orders by field position. +fields, stripping null fields from non-selected arms), computes `dict_paths` from +`spec.fields` to identify dict-typed fields, flattens to dot-notation (keeping dict-typed +fields as leaf values), and orders by field position. The Jinja2 template assembles the field table, optional constraints section, examples, and "Used By" partial into markdown. diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py b/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py index 7d66823ef..18d71ea8b 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py @@ -1,6 +1,7 @@ """Load and process example data from theme pyproject.toml files.""" import logging +import re import sys from dataclasses import dataclass from pathlib import Path @@ -10,11 +11,12 @@ from pydantic.fields import FieldInfo from .model_extraction import resolve_field_alias +from .specs import FieldSpec from .type_analyzer import single_literal_value log = logging.getLogger(__name__) -__all__ = ["ExampleRecord", "load_examples", "validate_example"] +__all__ = ["ExampleRecord", "collect_dict_paths", "load_examples", "validate_example"] # tomllib is stdlib from 3.11+; tomli is the backport for 3.10. try: @@ -140,19 +142,63 @@ def validate_example( _DEFAULT_SKIP_KEYS: frozenset[str] = frozenset() +_DEFAULT_DICT_PATHS: frozenset[str] = frozenset() +_INDEXED_BRACKET = re.compile(r"\[\d+\]") -def _flatten_value(prefix: str, value: object) -> list[tuple[str, Any]]: + +def _normalize_path(path: str) -> str: + """Replace indexed brackets with empty brackets for dict_paths matching. + + ``collect_dict_paths`` produces schema-notation paths like + ``items[].tags``, while ``_flatten_value`` builds runtime paths like + ``items[0].tags``. Normalizing before membership testing makes them + comparable. + """ + return _INDEXED_BRACKET.sub("[]", path) + + +def collect_dict_paths(fields: list[FieldSpec], prefix: str = "") -> frozenset[str]: + """Collect dot-paths of dict-typed fields from a FieldSpec tree. + + Walks the ``FieldSpec.model`` tree (same structure the renderer walks + for inline expansion) and returns paths where ``type_info.is_dict`` + is True. These paths tell ``flatten_example`` which dicts are maps + (keep as leaf) vs. models (recurse into). + + Parameters + ---------- + fields : list[FieldSpec] + Fields to walk. + prefix : str + Dot-notation prefix accumulated from parent fields. + """ + paths: set[str] = set() + for f in fields: + path = f"{prefix}{f.name}" if prefix else f.name + if f.type_info.is_dict: + paths.add(path) + elif f.model and not f.starts_cycle: + suffix = "[]" * f.type_info.list_depth if f.type_info.is_list else "" + paths |= collect_dict_paths(f.model.fields, f"{path}{suffix}.") + return frozenset(paths) + + +def _flatten_value( + prefix: str, value: object, dict_paths: frozenset[str] +) -> list[tuple[str, Any]]: """Recursively flatten a value into dot/bracket-notation rows.""" if isinstance(value, dict): + if _normalize_path(prefix) in dict_paths: + return [(prefix, value)] result: list[tuple[str, Any]] = [] for k, v in value.items(): - result.extend(_flatten_value(f"{prefix}.{k}", v)) + result.extend(_flatten_value(f"{prefix}.{k}", v, dict_paths)) return result if isinstance(value, list) and value and isinstance(value[0], (dict, list)): result = [] for i, item in enumerate(value): - result.extend(_flatten_value(f"{prefix}[{i}]", item)) + result.extend(_flatten_value(f"{prefix}[{i}]", item, dict_paths)) return result return [(prefix, value)] @@ -161,6 +207,7 @@ def flatten_example( raw: dict[str, Any], *, skip_keys: frozenset[str] = _DEFAULT_SKIP_KEYS, + dict_paths: frozenset[str] = _DEFAULT_DICT_PATHS, ) -> list[tuple[str, Any]]: """Flatten nested example dict to dot-notation key-value pairs. @@ -168,12 +215,16 @@ def flatten_example( ``"parent[0].child"``; lists of lists of dicts use double-index notation ``"parent[0][1].child"``. Keys in *skip_keys* are dropped at the top level only. Plain lists are kept as values. + + Dicts at paths in *dict_paths* are kept as leaf values instead of + being recursed into. Use ``collect_dict_paths`` to compute this set + from a FieldSpec tree. """ result: list[tuple[str, Any]] = [] for key, value in raw.items(): if key in skip_keys: continue - result.extend(_flatten_value(key, value)) + result.extend(_flatten_value(key, value, dict_paths)) return result @@ -257,6 +308,7 @@ def load_examples( *, pyproject_source: type | None = None, model_fields: dict[str, FieldInfo] | None = None, + dict_paths: frozenset[str] = _DEFAULT_DICT_PATHS, ) -> list[ExampleRecord]: """Load examples for a model, flattened and ordered by *field_names*. @@ -278,6 +330,9 @@ def load_examples( model_fields : dict[str, FieldInfo] or None Field info dict for Literal injection. If None, infers from validation_type if it's a BaseModel class. + dict_paths : frozenset[str] + Dot-paths of dict-typed fields to keep as leaf values. + Use ``collect_dict_paths`` to compute from a FieldSpec tree. """ source_type = pyproject_source if pyproject_source is not None else validation_type if not isinstance(source_type, type): @@ -308,7 +363,7 @@ def load_examples( e, ) continue - flat_rows = flatten_example(denulled) + flat_rows = flatten_example(denulled, dict_paths=dict_paths) ordered_rows = order_example_rows(flat_rows, field_names) records.append(ExampleRecord(rows=ordered_rows)) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py index c96005f61..fa56b4cea 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py @@ -13,7 +13,7 @@ import overture.schema.system.primitive as _system_primitive from overture.schema.system.primitive import GeometryType -from .example_loader import ExampleRecord, load_examples +from .example_loader import ExampleRecord, collect_dict_paths, load_examples from .link_computation import LinkContext from .markdown_renderer import ( render_enum, @@ -74,12 +74,14 @@ def _load_model_examples( if not pyproject_source: return None field_names = [f.name for f in spec.fields] + dict_paths = collect_dict_paths(spec.fields) examples = load_examples( validation_type, spec.name, field_names, pyproject_source=pyproject_source, model_fields=model_fields, + dict_paths=dict_paths, ) return examples or None diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py index 425c98dc0..090347d5c 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py @@ -1,6 +1,7 @@ """Markdown renderer for Pydantic model documentation.""" import functools +import json import re from collections.abc import Callable from dataclasses import dataclass @@ -178,11 +179,11 @@ def _format_example_value(value: object) -> str: return f"`{_truncate(value)}`" if isinstance(value, list): - items = ", ".join(repr(item) for item in value) + items = ", ".join(json.dumps(item) for item in value) return f"`{_truncate(f'[{items}]')}`" if isinstance(value, dict): - pairs = ", ".join(f"{k}: {v}" for k, v in value.items()) + pairs = ", ".join(f"{json.dumps(k)}: {json.dumps(v)}" for k, v in value.items()) return f"`{_truncate(f'{{{pairs}}}')}`" return f"`{value}`" diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py b/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py index b3351e873..8e8c58171 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/reverse_references.py @@ -85,6 +85,8 @@ def _visit(node: TypeInfo) -> None: referrer_kind, ) + # ENUM, MODEL, pydantic (PRIMITIVE), and UNION are mutually + # exclusive by TypeKind. if ( node.kind in (TypeKind.ENUM, TypeKind.MODEL) and node.source_type is not None @@ -94,13 +96,11 @@ def _visit(node: TypeInfo) -> None: referrer, referrer_kind, ) - - if is_pydantic_type(node): + elif is_pydantic_type(node): add_reference( TypeIdentity.of(node.source_type), referrer, referrer_kind ) - - if node.union_members is not None: + elif node.union_members is not None: for member_cls in node.union_members: add_reference( TypeIdentity.of(member_cls), diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py index dd398642c..9d00cb4b2 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py @@ -87,6 +87,9 @@ def _collect_from_type_info(ti: TypeInfo) -> None: """ def _visit(node: TypeInfo) -> None: + # UNION, ENUM, and pydantic (PRIMITIVE) are mutually exclusive + # by TypeKind. NewType extraction is orthogonal -- a node can be + # a NewType-wrapped ENUM, for instance. if node.kind == TypeKind.UNION and node.union_members: # Walk each member's fields for supplementary types. # Members that are also top-level feature specs are skipped @@ -95,11 +98,15 @@ def _visit(node: TypeInfo) -> None: member_spec = extract_model(member_cls) expand_model_tree(member_spec) _collect_from_model(member_spec) - - if node.kind == TypeKind.ENUM and node.source_type is not None: + elif node.kind == TypeKind.ENUM and node.source_type is not None: enum_id = TypeIdentity.of(node.source_type) if enum_id not in all_specs: all_specs[enum_id] = extract_enum(node.source_type) + elif is_pydantic_type(node): + assert node.source_type is not None # guaranteed by is_pydantic_type + pid = TypeIdentity.of(node.source_type) + if pid not in all_specs: + all_specs[pid] = extract_pydantic_type(node.source_type) # Semantic NewTypes always get extracted, including intermediate # NewTypes in the wrapping chain (e.g., Id wraps NoWhitespaceString @@ -115,12 +122,6 @@ def _visit(node: TypeInfo) -> None: if newly_registered: _collect_inner_newtypes(node.newtype_ref) - if is_pydantic_type(node): - assert node.source_type is not None # guaranteed by is_pydantic_type - pid = TypeIdentity.of(node.source_type) - if pid not in all_specs: - all_specs[pid] = extract_pydantic_type(node.source_type) - walk_type_info(ti, _visit) def _collect_from_fields(fields: list[FieldSpec]) -> None: diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py index 572f0427a..6cd4d73d5 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py @@ -76,12 +76,12 @@ def extract_discriminator( return disc_field_name, mapping or None -_TypeIdentity = tuple[str, TypeKind, bool, int] -_FieldKey = tuple[str, _TypeIdentity] +_TypeShape = tuple[str, TypeKind, bool, int] +_FieldKey = tuple[str, _TypeShape] -def _type_identity(ti: TypeInfo) -> _TypeIdentity: - """Stable identity for dedup — excludes source_type which can vary across members.""" +def _type_shape(ti: TypeInfo) -> _TypeShape: + """Structural shape for dedup -- excludes source_type which varies across members.""" return (ti.base_type, ti.kind, ti.is_optional, ti.list_depth) @@ -117,7 +117,7 @@ def extract_union( for fs in member_spec.fields: if fs.name in shared_field_names: continue - key = (fs.name, _type_identity(fs.type_info)) + key = (fs.name, _type_shape(fs.type_info)) existing = seen.get(key) prior_sources = existing.variant_sources or () if existing else () seen[key] = AnnotatedField( diff --git a/packages/overture-schema-codegen/tests/test_example_loader.py b/packages/overture-schema-codegen/tests/test_example_loader.py index 36434d5af..79205a98e 100644 --- a/packages/overture-schema-codegen/tests/test_example_loader.py +++ b/packages/overture-schema-codegen/tests/test_example_loader.py @@ -13,6 +13,7 @@ ExampleRecord, _denull, _inject_literal_fields, + collect_dict_paths, flatten_example, load_examples, load_examples_from_toml, @@ -20,6 +21,8 @@ resolve_pyproject_path, validate_example, ) +from overture.schema.codegen.specs import FieldSpec, ModelSpec +from overture.schema.codegen.type_analyzer import TypeInfo, TypeKind from pydantic import BaseModel, ConfigDict, Field, Tag, ValidationError @@ -117,6 +120,54 @@ def test_multiple_list_items(self) -> None: ("sources[1].confidence", 0.8), ] + def test_dict_field_kept_as_leaf(self) -> None: + """Dict values at dict_paths are kept as leaf values.""" + raw = { + "name": "test", + "tags": {"color": "red", "size": "large"}, + } + result = flatten_example(raw, dict_paths=frozenset({"tags"})) + assert result == [ + ("name", "test"), + ("tags", {"color": "red", "size": "large"}), + ] + + def test_nested_dict_path_kept_as_leaf(self) -> None: + """Dict values at nested dict_paths are kept as leaf values.""" + raw = { + "names": { + "primary": "Tower", + "common": {"en": "Tower", "fr": "Tour"}, + }, + } + result = flatten_example(raw, dict_paths=frozenset({"names.common"})) + assert result == [ + ("names.primary", "Tower"), + ("names.common", {"en": "Tower", "fr": "Tour"}), + ] + + def test_empty_dict_paths_preserves_behavior(self) -> None: + """Empty dict_paths (default) recurses all dicts as before.""" + raw = {"tags": {"color": "red"}} + result = flatten_example(raw) + assert result == [("tags.color", "red")] + + def test_dict_inside_list_kept_as_leaf(self) -> None: + """Dict at indexed path matches schema path in dict_paths.""" + raw = { + "items": [ + {"name": "a", "tags": {"color": "red"}}, + {"name": "b", "tags": {"size": "large"}}, + ], + } + result = flatten_example(raw, dict_paths=frozenset({"items[].tags"})) + assert result == [ + ("items[0].name", "a"), + ("items[0].tags", {"color": "red"}), + ("items[1].name", "b"), + ("items[1].tags", {"size": "large"}), + ] + class TestOrderExampleRows: """Tests for order_example_rows function.""" @@ -467,6 +518,40 @@ class MockModel(BaseModel): for record in caplog.records ) + def test_dict_paths_keep_dicts_as_leaves(self, mock_project: MockProject) -> None: + """Dict fields listed in dict_paths stay as leaf values.""" + mock_project.write_pyproject( + dedent(""" + [project] + name = "test" + + [[examples.MockModel]] + name = "Tower" + + [examples.MockModel.tags] + color = "red" + size = "large" + """) + ) + + class MockModel(BaseModel): + __module__ = mock_project.mod_name + name: str + tags: dict[str, str] + + result = load_examples( + MockModel, + "MockModel", + ["name", "tags"], + dict_paths=frozenset({"tags"}), + ) + + assert len(result) == 1 + assert result[0].rows == [ + ("name", "Tower"), + ("tags", {"color": "red", "size": "large"}), + ] + def test_denulled_values_in_output(self, mock_project: MockProject) -> None: """Flattened output contains None not "null" strings.""" mock_project.write_pyproject( @@ -820,3 +905,102 @@ def test_real_segment_examples_validate(self) -> None: model_fields=TransportationSegment.model_fields, ) assert isinstance(validated, dict), f"Example {idx}: Expected dict result" + + +def _field( + name: str, + *, + kind: TypeKind = TypeKind.PRIMITIVE, + base_type: str = "str", + is_dict: bool = False, + list_depth: int = 0, + is_required: bool = True, + model: ModelSpec | None = None, + starts_cycle: bool = False, +) -> FieldSpec: + """Build a FieldSpec with sensible defaults for testing.""" + return FieldSpec( + name=name, + type_info=TypeInfo( + base_type=base_type, kind=kind, is_dict=is_dict, list_depth=list_depth + ), + description=None, + is_required=is_required, + model=model, + starts_cycle=starts_cycle, + ) + + +class TestCollectDictPaths: + """Tests for collect_dict_paths.""" + + def test_no_dict_fields(self) -> None: + """Model with only primitive fields returns empty set.""" + fields = [_field("name")] + assert collect_dict_paths(fields) == frozenset() + + def test_top_level_dict_field(self) -> None: + """Dict field at top level is collected.""" + fields = [ + _field("name"), + _field("tags", is_dict=True, is_required=False), + ] + assert collect_dict_paths(fields) == frozenset({"tags"}) + + def test_nested_dict_in_sub_model(self) -> None: + """Dict field inside a sub-model produces dotted path.""" + inner_fields = [ + _field("primary"), + _field("common", is_dict=True, is_required=False), + ] + inner_model = ModelSpec(name="Names", description=None, fields=inner_fields) + fields = [ + _field("names", kind=TypeKind.MODEL, base_type="Names", model=inner_model) + ] + assert collect_dict_paths(fields) == frozenset({"names.common"}) + + def test_list_of_model_with_dict(self) -> None: + """Dict inside list-of-model uses [] in path.""" + inner_fields = [_field("tags", is_dict=True, is_required=False)] + inner_model = ModelSpec(name="Item", description=None, fields=inner_fields) + fields = [ + _field( + "items", + kind=TypeKind.MODEL, + base_type="Item", + list_depth=1, + model=inner_model, + ), + ] + assert collect_dict_paths(fields) == frozenset({"items[].tags"}) + + def test_nested_list_depth(self) -> None: + """list[list[Model]] produces [][] in path.""" + inner_fields = [_field("tags", is_dict=True)] + inner_model = ModelSpec(name="Item", description=None, fields=inner_fields) + fields = [ + _field( + "items", + kind=TypeKind.MODEL, + base_type="Item", + list_depth=2, + model=inner_model, + ), + ] + assert collect_dict_paths(fields) == frozenset({"items[][].tags"}) + + def test_cycle_stops_recursion(self) -> None: + """Fields with starts_cycle=True are not recursed into.""" + inner_fields = [_field("data", is_dict=True, is_required=False)] + inner_model = ModelSpec(name="Node", description=None, fields=inner_fields) + fields = [ + _field( + "child", + kind=TypeKind.MODEL, + base_type="Node", + is_required=False, + model=inner_model, + starts_cycle=True, + ), + ] + assert collect_dict_paths(fields) == frozenset() diff --git a/packages/overture-schema-codegen/tests/test_markdown_renderer.py b/packages/overture-schema-codegen/tests/test_markdown_renderer.py index 11559cd14..6e4696567 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_renderer.py +++ b/packages/overture-schema-codegen/tests/test_markdown_renderer.py @@ -996,7 +996,7 @@ def test_list_renders_comma_separated(self) -> None: """Lists render as backtick-wrapped comma-separated values.""" assert _format_example_value([1, 2, 3]) == "`[1, 2, 3]`" - assert _format_example_value(["a", "b"]) == "`['a', 'b']`" + assert _format_example_value(["a", "b"]) == '`["a", "b"]`' assert _format_example_value([]) == "`[]`" def test_long_list_truncated(self) -> None: @@ -1012,7 +1012,7 @@ def test_long_dict_truncated(self) -> None: """Dicts longer than truncation limit are truncated with ellipsis.""" long_dict = {f"key_{i}": f"value_{i}" for i in range(50)} result = _format_example_value(long_dict) - assert result.startswith("`{key_0:") + assert result.startswith('`{"key_0":') assert result.endswith("...`") inner = result[1:-1] assert len(inner) <= 100 From 224a8f73927fb6fd7830acca1c2d49d97f2c52f3 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Thu, 5 Mar 2026 10:27:09 -0800 Subject: [PATCH 34/38] refactor(codegen): reorganize flat layout into sub-packages MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Move modules into three sub-packages matching the architecture layers: - extraction/ (14 modules): type analysis, specs, extractors, constraints - layout/ (2 modules): module layout, type collection - markdown/ (6 modules + templates): pipeline, renderer, type formatting, links, paths, reverse references Three modules renamed to drop redundant prefixes: field_constraint_description → extraction/field_constraints model_constraint_description → extraction/model_constraints example_loader → extraction/examples Templates flattened from templates/markdown/ to markdown/templates/. --- packages/overture-schema-codegen/README.md | 4 +- .../overture-schema-codegen/docs/design.md | 44 +++++++++---------- .../docs/walkthrough.md | 20 ++++----- .../src/overture/schema/codegen/cli.py | 18 ++++---- .../schema/codegen/extraction/__init__.py | 0 .../{ => extraction}/case_conversion.py | 0 .../codegen/{ => extraction}/docstring.py | 0 .../{ => extraction}/enum_extraction.py | 0 .../examples.py} | 0 .../field_constraints.py} | 0 .../model_constraints.py} | 0 .../{ => extraction}/model_extraction.py | 0 .../{ => extraction}/newtype_extraction.py | 0 .../{ => extraction}/primitive_extraction.py | 0 .../{ => extraction}/pydantic_extraction.py | 0 .../schema/codegen/{ => extraction}/specs.py | 0 .../codegen/{ => extraction}/type_analyzer.py | 0 .../codegen/{ => extraction}/type_registry.py | 0 .../{ => extraction}/union_extraction.py | 0 .../schema/codegen/layout/__init__.py | 0 .../codegen/{ => layout}/module_layout.py | 0 .../codegen/{ => layout}/type_collection.py | 20 ++++++--- .../schema/codegen/markdown/__init__.py | 0 .../{ => markdown}/link_computation.py | 4 +- .../codegen/{ => markdown}/path_assignment.py | 11 +++-- .../pipeline.py} | 42 +++++++++--------- .../renderer.py} | 26 +++++------ .../{ => markdown}/reverse_references.py | 4 +- .../templates}/_used_by.md.jinja2 | 0 .../templates}/enum.md.jinja2 | 0 .../templates}/feature.md.jinja2 | 0 .../templates}/geometry.md.jinja2 | 0 .../templates}/newtype.md.jinja2 | 0 .../templates}/primitives.md.jinja2 | 0 .../templates}/pydantic_type.md.jinja2 | 0 .../type_format.py} | 6 +-- .../tests/codegen_test_support.py | 8 ++-- .../overture-schema-codegen/tests/conftest.py | 14 +++--- .../overture-schema-codegen/tests/test_cli.py | 2 +- .../tests/test_constraint_description.py | 8 ++-- .../tests/test_enum_extraction.py | 4 +- .../tests/test_example_loader.py | 6 +-- .../tests/test_golden_markdown.py | 19 +++++--- .../tests/test_integration_real_models.py | 14 +++--- .../tests/test_markdown_renderer.py | 33 +++++++------- .../tests/test_markdown_type_format.py | 12 +++-- .../tests/test_model_extractor.py | 7 ++- .../tests/test_module_layout.py | 2 +- .../tests/test_naming.py | 2 +- .../tests/test_newtype_extraction.py | 4 +- .../tests/test_primitive_extraction.py | 8 ++-- .../tests/test_pydantic_extraction.py | 4 +- .../tests/test_reverse_references.py | 17 ++++--- .../tests/test_specs.py | 6 +-- .../tests/test_type_analyzer.py | 2 +- .../tests/test_type_collection.py | 11 +++-- .../tests/test_type_placement.py | 22 +++++----- .../tests/test_type_registry.py | 4 +- .../tests/test_union_extraction.py | 4 +- 59 files changed, 225 insertions(+), 187 deletions(-) create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/extraction/__init__.py rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => extraction}/case_conversion.py (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => extraction}/docstring.py (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => extraction}/enum_extraction.py (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{example_loader.py => extraction/examples.py} (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{field_constraint_description.py => extraction/field_constraints.py} (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{model_constraint_description.py => extraction/model_constraints.py} (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => extraction}/model_extraction.py (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => extraction}/newtype_extraction.py (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => extraction}/primitive_extraction.py (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => extraction}/pydantic_extraction.py (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => extraction}/specs.py (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => extraction}/type_analyzer.py (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => extraction}/type_registry.py (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => extraction}/union_extraction.py (100%) create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/layout/__init__.py rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => layout}/module_layout.py (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => layout}/type_collection.py (92%) create mode 100644 packages/overture-schema-codegen/src/overture/schema/codegen/markdown/__init__.py rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => markdown}/link_computation.py (95%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => markdown}/path_assignment.py (93%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{markdown_pipeline.py => markdown/pipeline.py} (94%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{markdown_renderer.py => markdown/renderer.py} (98%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{ => markdown}/reverse_references.py (98%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{templates/markdown => markdown/templates}/_used_by.md.jinja2 (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{templates/markdown => markdown/templates}/enum.md.jinja2 (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{templates/markdown => markdown/templates}/feature.md.jinja2 (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{templates/markdown => markdown/templates}/geometry.md.jinja2 (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{templates/markdown => markdown/templates}/newtype.md.jinja2 (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{templates/markdown => markdown/templates}/primitives.md.jinja2 (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{templates/markdown => markdown/templates}/pydantic_type.md.jinja2 (100%) rename packages/overture-schema-codegen/src/overture/schema/codegen/{markdown_type_format.py => markdown/type_format.py} (97%) diff --git a/packages/overture-schema-codegen/README.md b/packages/overture-schema-codegen/README.md index ce7bf5c59..f09467f77 100644 --- a/packages/overture-schema-codegen/README.md +++ b/packages/overture-schema-codegen/README.md @@ -67,13 +67,13 @@ module structure. Link computation and reverse references enable cross-page navi Jinja2 templates for feature pages (with field tables, constraint sections, and examples), enum pages, NewType pages, and aggregate primitive/geometry reference pages. -`markdown_pipeline.py` orchestrates the full pipeline without I/O, returning +`markdown/pipeline.py` orchestrates the full pipeline without I/O, returning `list[RenderedPage]`. The CLI writes files to disk with Docusaurus frontmatter. ## Programmatic use ```python -from overture.schema.codegen.type_analyzer import analyze_type, TypeKind +from overture.schema.codegen.extraction.type_analyzer import analyze_type, TypeKind info = analyze_type(some_annotation) assert info.kind == TypeKind.PRIMITIVE diff --git a/packages/overture-schema-codegen/docs/design.md b/packages/overture-schema-codegen/docs/design.md index aca0eddef..f541fb359 100644 --- a/packages/overture-schema-codegen/docs/design.md +++ b/packages/overture-schema-codegen/docs/design.md @@ -60,7 +60,7 @@ Extraction TypeInfo, FieldSpec, ModelSpec, EnumSpec, ... Discovery discover_models() from overture-schema-core ``` -`markdown_pipeline.py` orchestrates the pipeline without I/O: it expands feature trees, +`markdown/pipeline.py` orchestrates the pipeline without I/O: it expands feature trees, collects supplementary types, builds placement registries, computes reverse references, and calls renderers -- returning `RenderedPage` objects. The CLI (`cli.py`) is a thin Click wrapper that calls `generate_markdown_pages()` and writes files to disk. @@ -74,26 +74,26 @@ graph TD DM -->|"dict[ModelKey, type]"| EX subgraph Extraction - EX["type_analyzer / extractors"] + EX["extraction/type_analyzer / extractors"] EX -->|"ModelSpec, UnionSpec"| TREE["expand_model_tree()"] end TREE -->|"FeatureSpec[]"| OL subgraph "Output Layout" - OL["type_collection"] - OL -->|"SupplementarySpec{}"| PA["path_assignment"] - PA -->|"dict[str, Path]"| LC["link_computation"] - RR["reverse_references"] + OL["layout/type_collection"] + OL -->|"SupplementarySpec{}"| PA["markdown/path_assignment"] + PA -->|"dict[str, Path]"| LC["markdown/link_computation"] + RR["markdown/reverse_references"] end subgraph Rendering - R["markdown_renderer"] - TR["type_registry"] -.->|"type name resolution"| R + R["markdown/renderer"] + TR["extraction/type_registry"] -.->|"type name resolution"| R end subgraph Orchestration - MP["markdown_pipeline"] + MP["markdown/pipeline"] end OL --> MP @@ -139,12 +139,12 @@ NewType active at that depth. Extraction is split by entity kind: -- `model_extraction.py`: Pydantic model -> `ModelSpec` (fields in MRO-aware +- `extraction/model_extraction.py`: Pydantic model -> `ModelSpec` (fields in MRO-aware documentation order, alias-resolved names, model-level constraints) -- `enum_extraction.py`: Enum class -> `EnumSpec` -- `newtype_extraction.py`: NewType -> `NewTypeSpec` -- `union_extraction.py`: Discriminated union alias -> `UnionSpec` -- `primitive_extraction.py`: Numeric primitives -> `PrimitiveSpec` +- `extraction/enum_extraction.py`: Enum class -> `EnumSpec` +- `extraction/newtype_extraction.py`: NewType -> `NewTypeSpec` +- `extraction/union_extraction.py`: Discriminated union alias -> `UnionSpec` +- `extraction/primitive_extraction.py`: Numeric primitives -> `PrimitiveSpec` Each calls `analyze_type()` for field types. Tree expansion (`expand_model_tree()`) walks MODEL-kind fields to populate nested model references, with a shared cache and @@ -212,7 +212,7 @@ syntax. Extraction and the type registry carry no presentation logic. ### Type registry -`type_registry.py` maps type names to per-target string representations via +`extraction/type_registry.py` maps type names to per-target string representations via `TypeMapping`. `format_type_string()` wraps the resolved name with list/optional qualifiers. `is_semantic_newtype()` distinguishes NewTypes that deserve their own identity (like `FeatureVersion` wrapping `int32`) from pass-through aliases to @@ -223,12 +223,12 @@ registered primitives. Jinja2 templates for feature, enum, NewType, primitives, and geometry pages. `render_feature()` expands MODEL-kind fields inline with dot-notation (e.g., `sources[].dataset`), stopping at cycle boundaries. `format_type()` in -`markdown_type_format.py` converts `TypeInfo` into link-aware display strings using +`markdown/type_format.py` converts `TypeInfo` into link-aware display strings using `LinkContext`. ### Constraint prose -`field_constraint_description.py` and `model_constraint_description.py` convert +`extraction/field_constraints.py` and `extraction/model_constraints.py` convert constraint objects into human-readable descriptions. Field constraints produce inline text. Model constraints produce section-level descriptions and per-field notes, with consolidation for related conditional constraints (`require_if` / `forbid_if` grouped by @@ -249,13 +249,13 @@ rather than being split into dot-notation rows. The pipeline computes `dict_path ## Extension Points **Adding a new output target** (Arrow schemas next, PySpark expressions after): Add a -column to `TypeMapping` in `type_registry.py` for type-name resolution. Write a new -renderer module that consumes specs and the type registry. The extraction layer and +column to `TypeMapping` in `extraction/type_registry.py` for type-name resolution. Write +a new renderer module that consumes specs and the type registry. The extraction layer and output layout are target-independent. -**Adding a new type kind**: Add a variant to `TypeKind` in `type_analyzer.py`. Handle it -in the terminal classification of `analyze_type()`. Add an extraction function and spec -dataclass if needed. Update renderers to handle the new kind. +**Adding a new type kind**: Add a variant to `TypeKind` in `extraction/type_analyzer.py`. +Handle it in the terminal classification of `analyze_type()`. Add an extraction function +and spec dataclass if needed. Update renderers to handle the new kind. **Adding a new constraint type**: The iterative unwrapper collects it automatically (any `Annotated` metadata becomes a `ConstraintSource`). Add a case to diff --git a/packages/overture-schema-codegen/docs/walkthrough.md b/packages/overture-schema-codegen/docs/walkthrough.md index b067f1585..b51e3f0a4 100644 --- a/packages/overture-schema-codegen/docs/walkthrough.md +++ b/packages/overture-schema-codegen/docs/walkthrough.md @@ -71,7 +71,7 @@ unions. From that point forward both model features and union features satisfy t Two modules with no internal dependencies. Both serve multiple layers. -### case_conversion.py +### extraction/case_conversion.py Converts PascalCase to snake_case with two compiled regexes. `_ACRONYM_BOUNDARY` inserts an underscore between an uppercase run and a capitalized word start: `HTMLParser` @@ -87,7 +87,7 @@ the system passes through this function. 'hex_color.md' ``` -### docstring.py +### extraction/docstring.py Distinguishes author-written docstrings from auto-generated ones. Both `Enum` and `NewType` produce default docstrings that vary across Python versions. Rather than @@ -202,7 +202,7 @@ handle them directly. ## 4. Data structures -`specs.py` defines the vocabulary shared between extraction and rendering. Every spec is +`extraction/specs.py` defines the vocabulary shared between extraction and rendering. Every spec is a dataclass with no methods beyond field access and, in `UnionSpec`'s case, one cached property. @@ -240,7 +240,7 @@ individual ones. ### Classification functions -Three functions at the bottom of `specs.py` classify discovery results. `is_model_class` +Three functions at the bottom of `extraction/specs.py` classify discovery results. `is_model_class` is a `TypeGuard` that checks `isinstance(obj, type) and issubclass(obj, BaseModel)`. `is_union_alias` calls `analyze_type` and checks for `UNION` kind -- the only place outside the type analyzer that touches Python type annotations. `filter_model_classes` @@ -377,7 +377,7 @@ Two modules convert constraint objects into human-readable text. ### Field constraints -`field_constraint_description.py` pattern-matches constraint types. `Interval` renders +`extraction/field_constraints.py` pattern-matches constraint types. `Interval` renders as `lower <= x <= upper` using Unicode comparison operators. Single-bound constraints (`Ge`, `Gt`, `Le`, `Lt`) render as `>= value` or `< value`. Length constraints (`MinLen`, `MaxLen`) render as plain prose (e.g. "Minimum length: 1"). `GeometryTypeConstraint` lists @@ -396,7 +396,7 @@ docstring, class name, and pattern. Otherwise it delegates to ### Model constraints -`model_constraint_description.py` handles model-level constraints from decorators. +`extraction/model_constraints.py` handles model-level constraints from decorators. `analyze_model_constraints` returns two things in one pass: a list of section-level descriptions and a dict mapping field names to the constraint descriptions that reference them. @@ -504,7 +504,7 @@ provenance rather than direct field reference. ## 13. Markdown type formatting -`markdown_type_format.py` converts `TypeInfo` into display strings for markdown output. +`markdown/type_format.py` converts `TypeInfo` into display strings for markdown output. `format_type` handles the full range of field types. Single-value Literals render as `"value"` in backticks. Semantic NewTypes and enums/models get markdown links via @@ -530,11 +530,11 @@ function uses `source_type.__name__` rather than `base_type` for link resolution ## 14. Markdown rendering -`markdown_renderer.py` is the template driver. +`markdown/renderer.py` is the template driver. ### Templates -Six Jinja2 templates in `templates/markdown/`. `feature.md.jinja2` renders a field table +Six Jinja2 templates in `markdown/templates/`. `feature.md.jinja2` renders a field table with Name, Type, and Description columns, an optional Constraints section, an optional Examples section, and a "Used By" partial. `enum.md.jinja2` renders a bullet list of values. `newtype.md.jinja2` shows underlying type and constraints with provenance links. @@ -629,7 +629,7 @@ skip rather than failing the pipeline. ### The pipeline -`generate_markdown_pages` in `markdown_pipeline.py` is the "main" function. It takes +`generate_markdown_pages` in `markdown/pipeline.py` is the "main" function. It takes feature specs and a schema root, returns rendered pages without touching the filesystem. Eight steps: diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/cli.py b/packages/overture-schema-codegen/src/overture/schema/codegen/cli.py index ed0a15486..0a24c7348 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/cli.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/cli.py @@ -8,20 +8,20 @@ from overture.schema.core.discovery import discover_models -from .markdown_pipeline import generate_markdown_pages -from .model_extraction import extract_model -from .module_layout import ( +from .extraction.model_extraction import extract_model +from .extraction.specs import ( + FeatureSpec, + is_model_class, + is_union_alias, +) +from .extraction.union_extraction import extract_union +from .layout.module_layout import ( OUTPUT_ROOT, compute_schema_root, entry_point_class, entry_point_module, ) -from .specs import ( - FeatureSpec, - is_model_class, - is_union_alias, -) -from .union_extraction import extract_union +from .markdown.pipeline import generate_markdown_pages log = logging.getLogger(__name__) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/__init__.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/case_conversion.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/case_conversion.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/case_conversion.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/case_conversion.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/docstring.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/docstring.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/docstring.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/docstring.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/enum_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/enum_extraction.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/enum_extraction.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/enum_extraction.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/example_loader.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/field_constraints.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/field_constraint_description.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/field_constraints.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_constraints.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/model_constraint_description.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_constraints.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_extraction.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/model_extraction.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_extraction.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/newtype_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/newtype_extraction.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/newtype_extraction.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/newtype_extraction.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/primitive_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/primitive_extraction.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/primitive_extraction.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/primitive_extraction.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/pydantic_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/pydantic_extraction.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/pydantic_extraction.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/pydantic_extraction.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/specs.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/specs.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_analyzer.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/type_analyzer.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_analyzer.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_registry.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_registry.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/type_registry.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_registry.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/union_extraction.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/union_extraction.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/union_extraction.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/layout/__init__.py b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/module_layout.py b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/module_layout.py similarity index 100% rename from packages/overture-schema-codegen/src/overture/schema/codegen/module_layout.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/layout/module_layout.py diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/type_collection.py similarity index 92% rename from packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/layout/type_collection.py index 9d00cb4b2..7d7cf95f7 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/type_collection.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/type_collection.py @@ -7,11 +7,11 @@ from collections.abc import Sequence from typing import Annotated, get_args, get_origin -from .enum_extraction import extract_enum -from .model_extraction import expand_model_tree, extract_model -from .newtype_extraction import extract_newtype -from .pydantic_extraction import extract_pydantic_type -from .specs import ( +from ..extraction.enum_extraction import extract_enum +from ..extraction.model_extraction import expand_model_tree, extract_model +from ..extraction.newtype_extraction import extract_newtype +from ..extraction.pydantic_extraction import extract_pydantic_type +from ..extraction.specs import ( FeatureSpec, FieldSpec, ModelSpec, @@ -19,8 +19,14 @@ TypeIdentity, is_pydantic_type, ) -from .type_analyzer import TypeInfo, TypeKind, analyze_type, is_newtype, walk_type_info -from .type_registry import is_semantic_newtype +from ..extraction.type_analyzer import ( + TypeInfo, + TypeKind, + analyze_type, + is_newtype, + walk_type_info, +) +from ..extraction.type_registry import is_semantic_newtype __all__ = ["collect_all_supplementary_types"] diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/__init__.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/link_computation.py similarity index 95% rename from packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/markdown/link_computation.py index bb9275b71..f05fa6fd7 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/link_computation.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/link_computation.py @@ -3,8 +3,8 @@ from dataclasses import dataclass from pathlib import PurePosixPath -from .case_conversion import slug_filename -from .specs import TypeIdentity +from ..extraction.case_conversion import slug_filename +from ..extraction.specs import TypeIdentity __all__ = ["LinkContext", "relative_link"] diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py similarity index 93% rename from packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py index 616c58112..2700d5a9e 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/path_assignment.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py @@ -7,9 +7,14 @@ from collections.abc import Sequence from pathlib import PurePosixPath -from .case_conversion import slug_filename -from .module_layout import compute_output_dir, output_dir_for_entry_point -from .specs import FeatureSpec, PydanticTypeSpec, SupplementarySpec, TypeIdentity +from ..extraction.case_conversion import slug_filename +from ..extraction.specs import ( + FeatureSpec, + PydanticTypeSpec, + SupplementarySpec, + TypeIdentity, +) +from ..layout.module_layout import compute_output_dir, output_dir_for_entry_point __all__ = [ "GEOMETRY_PAGE", diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py similarity index 94% rename from packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py index fa56b4cea..0bd143c56 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_pipeline.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py @@ -13,29 +13,13 @@ import overture.schema.system.primitive as _system_primitive from overture.schema.system.primitive import GeometryType -from .example_loader import ExampleRecord, collect_dict_paths, load_examples -from .link_computation import LinkContext -from .markdown_renderer import ( - render_enum, - render_feature, - render_geometry_from_values, - render_newtype, - render_primitives_from_specs, - render_pydantic_type, -) -from .model_extraction import expand_model_tree -from .path_assignment import ( - GEOMETRY_PAGE, - PRIMITIVES_PAGE, - build_placement_registry, - resolve_output_path, -) -from .primitive_extraction import ( +from ..extraction.examples import ExampleRecord, collect_dict_paths, load_examples +from ..extraction.model_extraction import expand_model_tree +from ..extraction.primitive_extraction import ( extract_primitives, partition_primitive_and_geometry_names, ) -from .reverse_references import UsedByEntry, compute_reverse_references -from .specs import ( +from ..extraction.specs import ( EnumSpec, FeatureSpec, ModelSpec, @@ -45,7 +29,23 @@ TypeIdentity, UnionSpec, ) -from .type_collection import collect_all_supplementary_types +from ..layout.type_collection import collect_all_supplementary_types +from .link_computation import LinkContext +from .path_assignment import ( + GEOMETRY_PAGE, + PRIMITIVES_PAGE, + build_placement_registry, + resolve_output_path, +) +from .renderer import ( + render_enum, + render_feature, + render_geometry_from_values, + render_newtype, + render_primitives_from_specs, + render_pydantic_type, +) +from .reverse_references import UsedByEntry, compute_reverse_references __all__ = ["RenderedPage", "generate_markdown_pages"] diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py similarity index 98% rename from packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py index 090347d5c..6ef448eb5 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown_renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py @@ -12,17 +12,10 @@ from jinja2 import Environment, FileSystemLoader from typing_extensions import NotRequired -from .example_loader import ExampleRecord -from .field_constraint_description import constraint_display_text -from .link_computation import LinkContext -from .markdown_type_format import ( - format_type, - format_underlying_type, - resolve_type_link, -) -from .model_constraint_description import analyze_model_constraints -from .reverse_references import UsedByEntry -from .specs import ( +from ..extraction.examples import ExampleRecord +from ..extraction.field_constraints import constraint_display_text +from ..extraction.model_constraints import analyze_model_constraints +from ..extraction.specs import ( AnnotatedField, EnumSpec, FeatureSpec, @@ -34,9 +27,16 @@ TypeIdentity, UnionSpec, ) -from .type_analyzer import ( +from ..extraction.type_analyzer import ( ConstraintSource, ) +from .link_computation import LinkContext +from .reverse_references import UsedByEntry +from .type_format import ( + format_type, + format_underlying_type, + resolve_type_link, +) __all__ = [ "render_enum", @@ -50,7 +50,7 @@ _LinkFn = Callable[[TypeIdentity], str] -_TEMPLATES_DIR = Path(__file__).parent / "templates" / "markdown" +_TEMPLATES_DIR = Path(__file__).parent / "templates" _BARE_URL_RE = re.compile( r"(? Date: Mon, 9 Mar 2026 22:30:13 -0700 Subject: [PATCH 35/38] feat(codegen): rewrite example pipeline with model instances Replace dict-walking flatten machinery with Pydantic model-instance traversal. validate_example returns a BaseModel instance; flatten_model_instance walks it via isinstance checks to produce dot-notation key-value pairs, eliminating the need for external schema information (collect_dict_paths). augment_missing_fields adds cross-arm union fields as None. Remove "null" sentinel convention from TOML examples. Pydantic fills None defaults for omitted fields, making the _denull pipeline stage unnecessary. Fix BBox dict validation (missing return in __get_pydantic_core_schema__), BBox flattening via __slots__ property detection, datetime isoformat rendering, and non-string value truncation for Geometry objects. --- .../pyproject.toml | 7 - .../overture-schema-base-theme/pyproject.toml | 39 - .../pyproject.toml | 35 - .../overture-schema-codegen/docs/design.md | 19 +- .../docs/walkthrough.md | 42 +- .../schema/codegen/extraction/examples.py | 272 ++++--- .../schema/codegen/markdown/pipeline.py | 4 +- .../schema/codegen/markdown/renderer.py | 16 +- .../schema/codegen/markdown/type_format.py | 4 +- .../tests/test_example_loader.py | 704 ++++++++---------- .../pyproject.toml | 25 - .../pyproject.toml | 9 - .../overture/schema/system/primitive/bbox.py | 2 +- .../pyproject.toml | 23 - 14 files changed, 497 insertions(+), 704 deletions(-) diff --git a/packages/overture-schema-addresses-theme/pyproject.toml b/packages/overture-schema-addresses-theme/pyproject.toml index ad840f40d..e794b90d8 100644 --- a/packages/overture-schema-addresses-theme/pyproject.toml +++ b/packages/overture-schema-addresses-theme/pyproject.toml @@ -43,11 +43,8 @@ testpaths = ["tests"] id = "416ab01c-d836-4c4f-aedc-2f30941ce94d" geometry = "POINT (-176.5637854 -43.9471955)" country = "NZ" -postcode = "null" street = "Tikitiki Hill Road" number = "54" -unit = "null" -postal_city = "null" version = 1 theme = "addresses" type = "address" @@ -67,7 +64,3 @@ value = "Chatham Island" [[examples.Address.sources]] property = "" dataset = "OpenAddresses/LINZ" -record_id = "null" -update_time = "null" -confidence = "null" -between = "null" diff --git a/packages/overture-schema-base-theme/pyproject.toml b/packages/overture-schema-base-theme/pyproject.toml index dedd9e0db..aa9d3ba39 100644 --- a/packages/overture-schema-base-theme/pyproject.toml +++ b/packages/overture-schema-base-theme/pyproject.toml @@ -60,25 +60,16 @@ ymax = -75.64299774169922 property = "" dataset = "ETOPO/GLOBathy" record_id = "2024-12-09T00:00:00.000Z" -update_time = "null" -confidence = "null" -between = "null" [examples.Bathymetry.cartography] -prominence = "null" -min_zoom = "null" -max_zoom = "null" sort_key = 12 [[examples.Infrastructure]] id = "e9e3d506-89c0-3473-8cee-5e5ac6596d6c" geometry = "POINT (-179.9999994 -82.42408)" version = 0 -level = "null" subtype = "pedestrian" class = "information" -height = "null" -surface = "null" wikidata = "Q800558" theme = "base" type = "infrastructure" @@ -94,13 +85,9 @@ property = "" dataset = "OpenStreetMap" record_id = "n7674174803@2" update_time = "2023-04-07T17:37:48.000Z" -confidence = "null" -between = "null" [examples.Infrastructure.names] primary = "1306 km to South Pole" -common = "null" -rules = "null" [examples.Infrastructure.source_tags] description = "1036 km to South Pole." @@ -114,12 +101,9 @@ wikipedia = "en:South Pole Traverse" id = "70fc3596-a987-3fea-820c-c016c0a2f0da" geometry = "POINT (-178.7 -85.45)" version = 0 -level = "null" subtype = "physical" class = "cliff" -surface = "null" wikidata = "Q5282342" -elevation = "null" theme = "base" type = "land" @@ -134,13 +118,9 @@ property = "" dataset = "OpenStreetMap" record_id = "n11693475112@1" update_time = "2024-03-05T09:23:39.000Z" -confidence = "null" -between = "null" [examples.Land.names] primary = "Dismal Buttress" -common = "null" -rules = "null" [examples.Land.source_tags] natural = "cliff" @@ -164,13 +144,9 @@ ymax = 65.96218872070312 [[examples.LandCover.sources]] property = "" dataset = "ESA WorldCover" -record_id = "null" update_time = "2024-11-07T00:00:00.000Z" -confidence = "null" -between = "null" [examples.LandCover.cartography] -prominence = "null" min_zoom = 8 max_zoom = 15 sort_key = 3 @@ -179,12 +155,8 @@ sort_key = 3 id = "1e1f6095-5bd2-3fdb-a422-41351b848e9d" geometry = "POLYGON ((-176.5623454 -43.9567812, -176.5627644 -43.9561272, -176.5626898 -43.9557432, -176.5624297 -43.9553592, -176.562679 -43.9551603, -176.5629058 -43.9552064, -176.5631441 -43.9551769, -176.5632428 -43.9550676, -176.5633066 -43.9548702, -176.5634402 -43.9548071, -176.5639052 -43.9546682, -176.5642479 -43.9544118, -176.5647302 -43.9542142, -176.5651547 -43.954277, -176.5658293 -43.9545243, -176.5659454 -43.9543521, -176.566934 -43.9547987, -176.5669179 -43.955018, -176.5682465 -43.9553205, -176.5671004 -43.9579593, -176.5662034 -43.9600044, -176.5655366 -43.9597247, -176.5646109 -43.9595326, -176.564467 -43.9592563, -176.5639885 -43.9589226, -176.5637013 -43.9586925, -176.563223 -43.9586237, -176.5623454 -43.9567812))" version = 0 -level = "null" subtype = "golf" class = "golf_course" -surface = "null" -wikidata = "null" -elevation = "null" theme = "base" type = "land_use" @@ -199,13 +171,9 @@ property = "" dataset = "OpenStreetMap" record_id = "w56117029@3" update_time = "2010-04-24T22:35:13.000Z" -confidence = "null" -between = "null" [examples.LandUse.names] primary = "Chatham Islands Golf Club" -common = "null" -rules = "null" [examples.LandUse.source_tags] "LINZ:source_version" = "V16" @@ -217,12 +185,9 @@ source_ref = "http://www.linz.govt.nz/topography/topo-maps/" id = "6bbb5fe5-bf26-3efa-b120-0a7079b60840" geometry = "POINT (-177.031799 -84.934793)" version = 0 -level = "null" subtype = "physical" class = "cape" wikidata = "Q33140589" -is_salt = "null" -is_intermittent = "null" theme = "base" type = "water" @@ -237,13 +202,9 @@ property = "" dataset = "OpenStreetMap" record_id = "n11109190647@2" update_time = "2024-02-11T05:52:05.000Z" -confidence = "null" -between = "null" [examples.Water.names] primary = "Thanksgiving Point" -common = "null" -rules = "null" [examples.Water.source_tags] natural = "cape" diff --git a/packages/overture-schema-buildings-theme/pyproject.toml b/packages/overture-schema-buildings-theme/pyproject.toml index 8d169f4f4..07e418faa 100644 --- a/packages/overture-schema-buildings-theme/pyproject.toml +++ b/packages/overture-schema-buildings-theme/pyproject.toml @@ -42,25 +42,8 @@ packages = ["src/overture"] id = "148f35b1-7bc1-4180-9280-10d39b13883b" geometry = "POLYGON ((-176.6435004 -43.9938042, -176.6435738 -43.9937107, -176.6437726 -43.9937913, -176.6436992 -43.9938849, -176.6435004 -43.9938042))" version = 1 -level = "null" -subtype = "null" -class = "null" -height = "null" -names = "null" has_parts = false is_underground = false -num_floors = "null" -num_floors_underground = "null" -min_height = "null" -min_floor = "null" -facade_color = "null" -facade_material = "null" -roof_material = "null" -roof_shape = "null" -roof_direction = "null" -roof_orientation = "null" -roof_color = "null" -roof_height = "null" theme = "buildings" type = "building" @@ -75,29 +58,13 @@ property = "" dataset = "OpenStreetMap" record_id = "w519166507@1" update_time = "2017-08-27T21:39:50.000Z" -confidence = "null" -between = "null" [[examples.BuildingPart]] id = "19412d64-51ac-3d6a-ac2f-8a8c8b91bb60" geometry = "POLYGON ((-73.2462509 -39.8108937, -73.2462755 -39.8109047, -73.246291 -39.8109182, -73.2463022 -39.8109382, -73.2463039 -39.810959, -73.2462962 -39.81098, -73.2462796 -39.8109977, -73.2462674 -39.8110052, -73.2462281 -39.8110153, -73.2461998 -39.811013, -73.2461743 -39.8110034, -73.2461566 -39.8109898, -73.246144 -39.8109702, -73.2461418 -39.8109427, -73.2461511 -39.8109221, -73.2461669 -39.8109066, -73.2461908 -39.8108947, -73.2462184 -39.8108898, -73.2462509 -39.8108937))" version = 0 level = 3 -height = "null" -names = "null" is_underground = false -num_floors = "null" -num_floors_underground = "null" -min_height = "null" -min_floor = "null" -facade_color = "null" -facade_material = "null" -roof_material = "null" -roof_shape = "null" -roof_direction = "null" -roof_orientation = "null" -roof_color = "null" -roof_height = "null" building_id = "bd663bd4-1844-4d7d-a400-114de051cf49" theme = "buildings" type = "building_part" @@ -113,5 +80,3 @@ property = "" dataset = "OpenStreetMap" record_id = "w223076787@2" update_time = "2014-10-31T22:55:36.000Z" -confidence = "null" -between = "null" diff --git a/packages/overture-schema-codegen/docs/design.md b/packages/overture-schema-codegen/docs/design.md index f541fb359..7a4237749 100644 --- a/packages/overture-schema-codegen/docs/design.md +++ b/packages/overture-schema-codegen/docs/design.md @@ -213,10 +213,10 @@ syntax. Extraction and the type registry carry no presentation logic. ### Type registry `extraction/type_registry.py` maps type names to per-target string representations via -`TypeMapping`. `format_type_string()` wraps the resolved name with list/optional -qualifiers. `is_semantic_newtype()` distinguishes NewTypes that deserve their own -identity (like `FeatureVersion` wrapping `int32`) from pass-through aliases to -registered primitives. +`TypeMapping`. `resolve_type_name()` looks up the registry and returns the display +string for a given target. `is_semantic_newtype()` distinguishes NewTypes that deserve +their own identity (like `FeatureVersion` wrapping `int32`) from pass-through aliases +to registered primitives. ### Markdown renderer @@ -240,11 +240,12 @@ Loads example data from theme `pyproject.toml` files, validates against Pydantic and flattens to dot-notation rows for display in feature pages. Also provides a starting point for generated test data. -`collect_dict_paths` walks the `FieldSpec` tree to identify dict-typed fields (like -`tags: dict[str, str]`), returning their dot-paths as a `frozenset`. `flatten_example` -checks this set before recursing into dicts -- paths in the set are kept as leaf values -rather than being split into dot-notation rows. The pipeline computes `dict_paths` from -`spec.fields` and threads it through `load_examples`. +`validate_example` returns a Pydantic model instance. `flatten_model_instance` walks the +instance recursively using `isinstance(value, BaseModel)` to distinguish model fields +(recurse with dot notation) from dict fields (keep as leaf values). This eliminates the +need for external schema information -- the model instance itself encodes the type +structure. `augment_missing_fields` appends `(name, None)` entries for union cross-arm +fields absent from the concrete variant instance. ## Extension Points diff --git a/packages/overture-schema-codegen/docs/walkthrough.md b/packages/overture-schema-codegen/docs/walkthrough.md index b51e3f0a4..dacf2b25b 100644 --- a/packages/overture-schema-codegen/docs/walkthrough.md +++ b/packages/overture-schema-codegen/docs/walkthrough.md @@ -594,36 +594,36 @@ schema. `resolve_pyproject_path` walks up from a model's module file to find `pyproject.toml`. `load_examples_from_toml` reads the `[examples.ModelName]` TOML section. -Validation requires three preprocessing steps that handle TOML's limitations and -flat-schema conventions. - -TOML has no null literal, so examples use the string `"null"` as a stand-in. `_denull` -replaces these recursively, walking nested dicts and lists. +Validation requires two preprocessing steps that handle flat-schema conventions. Literal fields (like `theme="buildings"`) are omitted from examples since they carry constant values. `_inject_literal_fields` adds them back before validation by scanning `model_fields` for single-value `Literal` annotations via `single_literal_value`. -Discriminated union examples from flat parquet schemas include null fields from +Discriminated union examples from flat Parquet schemas include null fields from non-selected variant arms. `_strip_null_unknown_fields` removes null-valued fields not in the common base's field set, so the selected arm's validator accepts the data without choking on fields that belong to sibling variants. -`collect_dict_paths` walks the `FieldSpec` tree to identify dict-typed fields (like -`tags: dict[str, str]`), returning their dot-paths as a `frozenset`. Schema-notation -paths use empty brackets (`items[].tags`) while runtime paths carry indices -(`items[0].tags`); `_normalize_path` strips indices before membership checks. +`validate_example` returns a Pydantic model instance. `flatten_model_instance` walks the +instance recursively using `isinstance(value, BaseModel)` to distinguish model fields +(recurse with dot notation) from dict fields (keep as leaf values). Lists of models +use bracket notation (`sources[0].dataset`), nested lists use double-index notation +(`hierarchies[0][1].name`). The model instance itself encodes the type structure, +eliminating the need for external schema information. + +For discriminated unions, the concrete variant instance lacks fields from other arms. +`augment_missing_fields` compares base field names against the union's merged field list +and appends `(name, None)` for absent fields, matching the flat Parquet schema where all +variant columns exist. -`flatten_example` converts nested dicts to dot-notation. Nested dicts become -`parent.child`, lists of dicts become `parent[0].child`. Dicts at paths in `dict_paths` -are kept as leaf values -- a `tags` field typed as `dict[str, str]` renders as a whole -map rather than being split into `tags.color`, `tags.size`. `order_example_rows` sorts by -field position in the documentation's field order using a stable sort, so sub-fields -maintain their original relative order. +`order_example_rows` sorts by field position in the documentation's field order using a +stable sort, so sub-fields maintain their original relative order. `load_examples` orchestrates the full flow: find the pyproject.toml, load the TOML -section, validate each example, flatten, and order. Invalid examples log a warning and -skip rather than failing the pipeline. +section, validate each example, flatten via `flatten_model_instance`, augment missing +fields, and order. Invalid examples log a warning and skip rather than failing the +pipeline. ## 16. Orchestration and CLI @@ -739,9 +739,9 @@ sources appear on the source NewType's page instead. The example loader finds `pyproject.toml` in the transportation theme package, reads `[examples.Segment]`, validates each example against the union alias (injecting literal -fields, stripping null fields from non-selected arms), computes `dict_paths` from -`spec.fields` to identify dict-typed fields, flattens to dot-notation (keeping dict-typed -fields as leaf values), and orders by field position. +fields, stripping null fields from non-selected arms), flattens the model instance to +dot-notation via `flatten_model_instance`, augments missing cross-arm fields, and orders +by field position. The Jinja2 template assembles the field table, optional constraints section, examples, and "Used By" partial into markdown. diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py index 18d71ea8b..811a836c3 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py @@ -1,7 +1,6 @@ -"""Load and process example data from theme pyproject.toml files.""" +"""Load, validate, and flatten example data for schema documentation.""" import logging -import re import sys from dataclasses import dataclass from pathlib import Path @@ -11,12 +10,17 @@ from pydantic.fields import FieldInfo from .model_extraction import resolve_field_alias -from .specs import FieldSpec from .type_analyzer import single_literal_value log = logging.getLogger(__name__) -__all__ = ["ExampleRecord", "collect_dict_paths", "load_examples", "validate_example"] +__all__ = [ + "ExampleRecord", + "augment_missing_fields", + "flatten_model_instance", + "load_examples", + "validate_example", +] # tomllib is stdlib from 3.11+; tomli is the backport for 3.10. try: @@ -58,30 +62,6 @@ def _inject_literal_fields( return result -def _denull_value(value: object) -> object: - """Convert a single value, replacing ``"null"`` strings with ``None``.""" - if value == "null": - return None - if isinstance(value, dict): - return _denull(value) - if isinstance(value, list): - return [_denull_value(item) for item in value] - return value - - -def _denull(data: dict[str, Any]) -> dict[str, Any]: - """Convert ``"null"`` sentinel strings to ``None``. - - TOML has no null literal, so example data uses the string ``"null"`` - as a stand-in. This recursively walks *data* (including nested dicts, - lists of dicts, and plain lists) and replaces every ``"null"`` value - with ``None``. - - Returns a new dict; the original is not mutated. - """ - return {key: _denull_value(value) for key, value in data.items()} - - def _known_field_keys(model_fields_dict: dict[str, FieldInfo]) -> frozenset[str]: """Alias-resolved field keys from a model_fields dict.""" return frozenset( @@ -110,24 +90,14 @@ def validate_example( raw: dict[str, Any], *, model_fields: dict[str, FieldInfo] | None = None, -) -> dict[str, Any]: +) -> BaseModel: """Validate example data against a model or union type. - Uses TypeAdapter for validation, supporting both concrete models - and discriminated union aliases. - - Preprocesses *raw* data by: - 1. Converting "null" strings to None - 2. Injecting missing Literal fields for validation (if model_fields provided) - 3. Stripping null-valued fields not in *model_fields* (handles - flat-schema examples from discriminated unions where fields from - non-selected arms appear as nulls) - - Returns the denulled dict (not the preprocessed one with injected - literals). Lets ValidationError propagate on validation failure. + Returns the validated model instance. Preprocesses *raw* data by: + 1. Injecting missing Literal fields for validation (if model_fields provided) + 2. Stripping null-valued fields not in *model_fields* (handles + flat-schema examples from discriminated unions) """ - denulled = _denull(raw) - if model_fields is None: if isinstance(validation_type, type) and issubclass(validation_type, BaseModel): model_fields = validation_type.model_fields @@ -135,96 +105,10 @@ def validate_example( model_fields = {} known_keys = _known_field_keys(model_fields) - preprocessed = _inject_literal_fields(model_fields, denulled) + preprocessed = _inject_literal_fields(model_fields, raw) preprocessed = _strip_null_unknown_fields(preprocessed, known_keys) - TypeAdapter(validation_type).validate_python(preprocessed) - return denulled - - -_DEFAULT_SKIP_KEYS: frozenset[str] = frozenset() -_DEFAULT_DICT_PATHS: frozenset[str] = frozenset() - -_INDEXED_BRACKET = re.compile(r"\[\d+\]") - - -def _normalize_path(path: str) -> str: - """Replace indexed brackets with empty brackets for dict_paths matching. - - ``collect_dict_paths`` produces schema-notation paths like - ``items[].tags``, while ``_flatten_value`` builds runtime paths like - ``items[0].tags``. Normalizing before membership testing makes them - comparable. - """ - return _INDEXED_BRACKET.sub("[]", path) - - -def collect_dict_paths(fields: list[FieldSpec], prefix: str = "") -> frozenset[str]: - """Collect dot-paths of dict-typed fields from a FieldSpec tree. - - Walks the ``FieldSpec.model`` tree (same structure the renderer walks - for inline expansion) and returns paths where ``type_info.is_dict`` - is True. These paths tell ``flatten_example`` which dicts are maps - (keep as leaf) vs. models (recurse into). - - Parameters - ---------- - fields : list[FieldSpec] - Fields to walk. - prefix : str - Dot-notation prefix accumulated from parent fields. - """ - paths: set[str] = set() - for f in fields: - path = f"{prefix}{f.name}" if prefix else f.name - if f.type_info.is_dict: - paths.add(path) - elif f.model and not f.starts_cycle: - suffix = "[]" * f.type_info.list_depth if f.type_info.is_list else "" - paths |= collect_dict_paths(f.model.fields, f"{path}{suffix}.") - return frozenset(paths) - - -def _flatten_value( - prefix: str, value: object, dict_paths: frozenset[str] -) -> list[tuple[str, Any]]: - """Recursively flatten a value into dot/bracket-notation rows.""" - if isinstance(value, dict): - if _normalize_path(prefix) in dict_paths: - return [(prefix, value)] - result: list[tuple[str, Any]] = [] - for k, v in value.items(): - result.extend(_flatten_value(f"{prefix}.{k}", v, dict_paths)) - return result - if isinstance(value, list) and value and isinstance(value[0], (dict, list)): - result = [] - for i, item in enumerate(value): - result.extend(_flatten_value(f"{prefix}[{i}]", item, dict_paths)) - return result - return [(prefix, value)] - - -def flatten_example( - raw: dict[str, Any], - *, - skip_keys: frozenset[str] = _DEFAULT_SKIP_KEYS, - dict_paths: frozenset[str] = _DEFAULT_DICT_PATHS, -) -> list[tuple[str, Any]]: - """Flatten nested example dict to dot-notation key-value pairs. - - Nested dicts become ``"parent.child"``; lists of dicts become - ``"parent[0].child"``; lists of lists of dicts use double-index - notation ``"parent[0][1].child"``. Keys in *skip_keys* are dropped - at the top level only. Plain lists are kept as values. - - Dicts at paths in *dict_paths* are kept as leaf values instead of - being recursed into. Use ``collect_dict_paths`` to compute this set - from a FieldSpec tree. - """ - result: list[tuple[str, Any]] = [] - for key, value in raw.items(): - if key in skip_keys: - continue - result.extend(_flatten_value(key, value, dict_paths)) + result: BaseModel = TypeAdapter(validation_type).validate_python(preprocessed) + assert isinstance(result, BaseModel) return result @@ -264,6 +148,121 @@ def sort_key(row: tuple[str, Any]) -> int: return sorted(flat_rows, key=sort_key) +def _structured_fields(value: object) -> list[tuple[str, Any]] | None: + """Extract named fields from ``__slots__``-based types like BBox. + + Returns a list of ``(name, value)`` pairs for types that expose + public properties backed by private slots (``_name`` -> ``name``). + Returns ``None`` for types without this pattern. + """ + cls = type(value) + slots = getattr(cls, "__slots__", ()) + if not slots: + return None + fields: list[tuple[str, Any]] = [] + for slot in slots: + attr = slot.lstrip("_") + if attr != slot and isinstance(getattr(cls, attr, None), property): + fields.append((attr, getattr(value, attr))) + return fields if len(fields) >= 2 else None + + +def _needs_recursion(items: list[Any]) -> bool: + """Check whether list items contain models or nested lists.""" + return bool(items) and isinstance(items[0], (BaseModel, list)) + + +def _flatten_list_items(key: str, items: list[Any]) -> list[tuple[str, Any]]: + """Flatten list items, recursing into BaseModel and nested list items. + + Returns the list as a single leaf value when no items need recursion. + Pydantic model fields produce homogeneous lists, so the first item's + type determines the flattening strategy. + """ + if not _needs_recursion(items): + return [(key, items)] + rows: list[tuple[str, Any]] = [] + for i, item in enumerate(items): + if isinstance(item, BaseModel): + rows.extend(flatten_model_instance(item, f"{key}[{i}].")) + elif isinstance(item, list): + rows.extend(_flatten_list_items(f"{key}[{i}]", item)) + else: + rows.append((f"{key}[{i}]", item)) + return rows + + +def flatten_model_instance( + instance: BaseModel, + prefix: str = "", +) -> list[tuple[str, Any]]: + """Flatten a Pydantic model instance to dot-notation key-value pairs. + + Walks model fields recursively. BaseModel values recurse with dot + notation, lists of BaseModel recurse with bracket notation, and + everything else (dicts, primitives, None) is a leaf value. + + Parameters + ---------- + instance + The Pydantic model instance to flatten. + prefix + Dot-notation prefix accumulated from parent fields. + + Returns + ------- + list[tuple[str, Any]] + Flattened key-value pairs in field declaration order. + """ + rows: list[tuple[str, Any]] = [] + for field_name, field_info in type(instance).model_fields.items(): + key = resolve_field_alias(field_name, field_info) + value = getattr(instance, field_name) + full_key = f"{prefix}{key}" if prefix else key + + if isinstance(value, BaseModel): + rows.extend(flatten_model_instance(value, f"{full_key}.")) + elif isinstance(value, list): + rows.extend(_flatten_list_items(full_key, value)) + elif (sub_fields := _structured_fields(value)) is not None: + for name, v in sub_fields: + rows.append((f"{full_key}.{name}", v)) + else: + rows.append((full_key, value)) + return rows + + +def augment_missing_fields( + rows: list[tuple[str, Any]], + field_names: list[str], +) -> list[tuple[str, Any]]: + """Add (name, None) entries for fields absent from *rows*. + + Compares base field names (via ``extract_base_field``) against + *field_names*. Fields in *field_names* not represented in *rows* + are appended as ``(name, None)``. Handles dot-notation and bracket- + notation keys correctly. + + Parameters + ---------- + rows + Flattened key-value pairs from a concrete model instance. + field_names + Merged field name list from the union spec. + + Returns + ------- + list[tuple[str, Any]] + Original rows with (name, None) entries appended for absent fields. + """ + present = {extract_base_field(key) for key, _ in rows} + augmented = list(rows) + for name in field_names: + if name not in present: + augmented.append((name, None)) + return augmented + + def load_examples_from_toml( pyproject_path: Path, model_name: str, @@ -308,7 +307,6 @@ def load_examples( *, pyproject_source: type | None = None, model_fields: dict[str, FieldInfo] | None = None, - dict_paths: frozenset[str] = _DEFAULT_DICT_PATHS, ) -> list[ExampleRecord]: """Load examples for a model, flattened and ordered by *field_names*. @@ -330,9 +328,6 @@ def load_examples( model_fields : dict[str, FieldInfo] or None Field info dict for Literal injection. If None, infers from validation_type if it's a BaseModel class. - dict_paths : frozenset[str] - Dot-paths of dict-typed fields to keep as leaf values. - Use ``collect_dict_paths`` to compute from a FieldSpec tree. """ source_type = pyproject_source if pyproject_source is not None else validation_type if not isinstance(source_type, type): @@ -354,7 +349,7 @@ def load_examples( records = [] for raw in raw_examples: try: - denulled = validate_example(validation_type, raw, model_fields=model_fields) + instance = validate_example(validation_type, raw, model_fields=model_fields) except ValidationError as e: log.warning( "Skipping invalid example for %s in %s: %s", @@ -363,7 +358,8 @@ def load_examples( e, ) continue - flat_rows = flatten_example(denulled, dict_paths=dict_paths) + flat_rows = flatten_model_instance(instance) + flat_rows = augment_missing_fields(flat_rows, field_names) ordered_rows = order_example_rows(flat_rows, field_names) records.append(ExampleRecord(rows=ordered_rows)) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py index 0bd143c56..dbd21f0f4 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py @@ -13,7 +13,7 @@ import overture.schema.system.primitive as _system_primitive from overture.schema.system.primitive import GeometryType -from ..extraction.examples import ExampleRecord, collect_dict_paths, load_examples +from ..extraction.examples import ExampleRecord, load_examples from ..extraction.model_extraction import expand_model_tree from ..extraction.primitive_extraction import ( extract_primitives, @@ -74,14 +74,12 @@ def _load_model_examples( if not pyproject_source: return None field_names = [f.name for f in spec.fields] - dict_paths = collect_dict_paths(spec.fields) examples = load_examples( validation_type, spec.name, field_names, pyproject_source=pyproject_source, model_fields=model_fields, - dict_paths=dict_paths, ) return examples or None diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py index 6ef448eb5..0dac5c258 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py @@ -1,5 +1,6 @@ """Markdown renderer for Pydantic model documentation.""" +import datetime import functools import json import re @@ -173,20 +174,29 @@ def _format_example_value(value: object) -> str: if isinstance(value, bool): return "`true`" if value else "`false`" + if isinstance(value, datetime.datetime): + return f"`{value.isoformat()}`" + + if isinstance(value, datetime.date): + return f"`{value.isoformat()}`" + if isinstance(value, str): if value == "": return "" return f"`{_truncate(value)}`" if isinstance(value, list): - items = ", ".join(json.dumps(item) for item in value) + items = ", ".join(json.dumps(item, default=str) for item in value) return f"`{_truncate(f'[{items}]')}`" if isinstance(value, dict): - pairs = ", ".join(f"{json.dumps(k)}: {json.dumps(v)}" for k, v in value.items()) + pairs = ", ".join( + f"{json.dumps(k, default=str)}: {json.dumps(v, default=str)}" + for k, v in value.items() + ) return f"`{_truncate(f'{{{pairs}}}')}`" - return f"`{value}`" + return f"`{_truncate(str(value))}`" def _field_template_context( diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py index 0cc047e6e..4b7132145 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py @@ -107,11 +107,11 @@ def _format_union_members( ctx: LinkContext | None, separator: str = r" \| ", ) -> str: - """Format union members as individually linked/backticked names. + r"""Format union members as individually linked/backticked names. Each member is resolved independently so members with pages get linked while others render as plain code spans. *separator* is inserted between - members (default is ``\\|`` for table-cell safety). + members (default is ``\|`` for table-cell safety). """ return separator.join(resolve_type_link(TypeIdentity.of(m), ctx) for m in members) diff --git a/packages/overture-schema-codegen/tests/test_example_loader.py b/packages/overture-schema-codegen/tests/test_example_loader.py index 19a562676..780efc089 100644 --- a/packages/overture-schema-codegen/tests/test_example_loader.py +++ b/packages/overture-schema-codegen/tests/test_example_loader.py @@ -1,4 +1,4 @@ -"""Tests for example_loader module.""" +"""Tests for examples module.""" import logging import sys @@ -11,164 +11,18 @@ import pytest from overture.schema.codegen.extraction.examples import ( ExampleRecord, - _denull, _inject_literal_fields, - collect_dict_paths, - flatten_example, + augment_missing_fields, + flatten_model_instance, load_examples, load_examples_from_toml, order_example_rows, resolve_pyproject_path, validate_example, ) -from overture.schema.codegen.extraction.specs import FieldSpec, ModelSpec -from overture.schema.codegen.extraction.type_analyzer import TypeInfo, TypeKind from pydantic import BaseModel, ConfigDict, Field, Tag, ValidationError -class TestFlattenExample: - """Tests for flatten_example function.""" - - def test_simple_fields(self) -> None: - """Flatten simple key-value pairs.""" - raw = {"id": "123", "version": 1, "name": "test"} - result = flatten_example(raw) - assert result == [("id", "123"), ("version", 1), ("name", "test")] - - def test_nested_dict(self) -> None: - """Flatten nested dict to dot notation.""" - raw = {"names": {"primary": "foo", "common": {"en": "bar"}}} - result = flatten_example(raw) - assert result == [ - ("names.primary", "foo"), - ("names.common.en", "bar"), - ] - - def test_list_of_dicts(self) -> None: - """Flatten list of dicts with array notation.""" - raw = {"sources": [{"dataset": "OSM", "record_id": "w123"}]} - result = flatten_example(raw) - assert result == [ - ("sources[0].dataset", "OSM"), - ("sources[0].record_id", "w123"), - ] - - def test_bbox_flattened_at_top_level(self) -> None: - """Bbox fields are flattened like any other nested dict.""" - raw = { - "id": "123", - "bbox": {"xmin": -176.6, "xmax": -176.64}, - "version": 1, - } - result = flatten_example(raw) - assert result == [ - ("id", "123"), - ("bbox.xmin", -176.6), - ("bbox.xmax", -176.64), - ("version", 1), - ] - - def test_plain_list_kept_as_value(self) -> None: - """Plain lists (non-dict items) are kept as values.""" - raw = {"phones": ["+1234", "+5678"]} - result = flatten_example(raw) - assert result == [("phones", ["+1234", "+5678"])] - - def test_empty_dict(self) -> None: - """Empty dict produces empty list.""" - raw: dict[str, object] = {} - result = flatten_example(raw) - assert result == [] - - def test_empty_list(self) -> None: - """Empty list is kept as value.""" - raw: dict[str, object] = {"tags": []} - result = flatten_example(raw) - assert result == [("tags", [])] - - def test_list_of_list_of_dicts(self) -> None: - """Flatten list[list[dict]] with double-index notation.""" - raw = { - "hierarchies": [ - [ - {"division_id": "aaa", "name": "Country"}, - {"division_id": "bbb", "name": "Region"}, - ], - ] - } - result = flatten_example(raw) - assert result == [ - ("hierarchies[0][0].division_id", "aaa"), - ("hierarchies[0][0].name", "Country"), - ("hierarchies[0][1].division_id", "bbb"), - ("hierarchies[0][1].name", "Region"), - ] - - def test_multiple_list_items(self) -> None: - """Handle multiple items in list of dicts.""" - raw = { - "sources": [ - {"dataset": "OSM", "confidence": 0.9}, - {"dataset": "MSFT", "confidence": 0.8}, - ] - } - result = flatten_example(raw) - assert result == [ - ("sources[0].dataset", "OSM"), - ("sources[0].confidence", 0.9), - ("sources[1].dataset", "MSFT"), - ("sources[1].confidence", 0.8), - ] - - def test_dict_field_kept_as_leaf(self) -> None: - """Dict values at dict_paths are kept as leaf values.""" - raw = { - "name": "test", - "tags": {"color": "red", "size": "large"}, - } - result = flatten_example(raw, dict_paths=frozenset({"tags"})) - assert result == [ - ("name", "test"), - ("tags", {"color": "red", "size": "large"}), - ] - - def test_nested_dict_path_kept_as_leaf(self) -> None: - """Dict values at nested dict_paths are kept as leaf values.""" - raw = { - "names": { - "primary": "Tower", - "common": {"en": "Tower", "fr": "Tour"}, - }, - } - result = flatten_example(raw, dict_paths=frozenset({"names.common"})) - assert result == [ - ("names.primary", "Tower"), - ("names.common", {"en": "Tower", "fr": "Tour"}), - ] - - def test_empty_dict_paths_preserves_behavior(self) -> None: - """Empty dict_paths (default) recurses all dicts as before.""" - raw = {"tags": {"color": "red"}} - result = flatten_example(raw) - assert result == [("tags.color", "red")] - - def test_dict_inside_list_kept_as_leaf(self) -> None: - """Dict at indexed path matches schema path in dict_paths.""" - raw = { - "items": [ - {"name": "a", "tags": {"color": "red"}}, - {"name": "b", "tags": {"size": "large"}}, - ], - } - result = flatten_example(raw, dict_paths=frozenset({"items[].tags"})) - assert result == [ - ("items[0].name", "a"), - ("items[0].tags", {"color": "red"}), - ("items[1].name", "b"), - ("items[1].tags", {"size": "large"}), - ] - - class TestOrderExampleRows: """Tests for order_example_rows function.""" @@ -420,9 +274,11 @@ def test_end_to_end(self, mock_project: MockProject) -> None: [[examples.Building]] version = 1 - names = { primary = "Tower" } id = "123" + [examples.Building.names] + primary = "Tower" + [examples.Building.bbox] xmin = 1.0 xmax = 2.0 @@ -433,12 +289,27 @@ def test_end_to_end(self, mock_project: MockProject) -> None: """) ) + class Names(BaseModel): + primary: str + secondary: str | None = None + + class Bbox(BaseModel): + xmin: float + xmax: float + ymin: float | None = None + ymax: float | None = None + + class Source(BaseModel): + dataset: str + record_id: str + class MockModel(BaseModel): __module__ = mock_project.mod_name id: str version: int - names: dict[str, object] - sources: list[dict[str, object]] + bbox: Bbox | None = None + names: Names | None = None + sources: list[Source] = [] field_names = ["id", "bbox", "names", "sources", "version"] result = load_examples(MockModel, "Building", field_names) @@ -451,7 +322,10 @@ class MockModel(BaseModel): ("id", "123"), ("bbox.xmin", 1.0), ("bbox.xmax", 2.0), + ("bbox.ymin", None), + ("bbox.ymax", None), ("names.primary", "Tower"), + ("names.secondary", None), ("sources[0].dataset", "OSM"), ("sources[0].record_id", "w456"), ("version", 1), @@ -518,8 +392,8 @@ class MockModel(BaseModel): for record in caplog.records ) - def test_dict_paths_keep_dicts_as_leaves(self, mock_project: MockProject) -> None: - """Dict fields listed in dict_paths stay as leaf values.""" + def test_dict_field_kept_as_leaf(self, mock_project: MockProject) -> None: + """Dict fields are kept as leaf values without dict_paths.""" mock_project.write_pyproject( dedent(""" [project] @@ -539,12 +413,7 @@ class MockModel(BaseModel): name: str tags: dict[str, str] - result = load_examples( - MockModel, - "MockModel", - ["name", "tags"], - dict_paths=frozenset({"tags"}), - ) + result = load_examples(MockModel, "MockModel", ["name", "tags"]) assert len(result) == 1 assert result[0].rows == [ @@ -552,78 +421,6 @@ class MockModel(BaseModel): ("tags", {"color": "red", "size": "large"}), ] - def test_denulled_values_in_output(self, mock_project: MockProject) -> None: - """Flattened output contains None not "null" strings.""" - mock_project.write_pyproject( - dedent(""" - [project] - name = "test" - - [[examples.MockModel]] - name = "test" - value = "null" - """) - ) - - class MockModel(BaseModel): - __module__ = mock_project.mod_name - name: str - value: int | None - - result = load_examples(MockModel, "MockModel", ["name", "value"]) - - assert len(result) == 1 - assert result[0].rows == [("name", "test"), ("value", None)] - - -class TestDenull: - """Tests for _denull function.""" - - def test_converts_null_string_to_none(self) -> None: - """Top-level "null" strings become None.""" - assert _denull({"a": "null"}) == {"a": None} - - def test_nested_dict(self) -> None: - """Recurse into nested dicts.""" - data = {"a": {"b": "null"}} - assert _denull(data) == {"a": {"b": None}} - - def test_list_of_dicts(self) -> None: - """Recurse into dicts inside lists.""" - data = {"items": [{"x": "null"}]} - assert _denull(data) == {"items": [{"x": None}]} - - def test_mixed_types_unchanged(self) -> None: - """Non-"null" strings, ints, bools, and plain lists pass through.""" - data = { - "name": "hello", - "count": 42, - "flag": True, - "tags": ["a", "b"], - "score": 3.14, - } - assert _denull(data) == data - - def test_no_mutation(self) -> None: - """Original dict is not modified.""" - original = {"a": "null", "b": {"c": "null"}} - _denull(original) - assert original == {"a": "null", "b": {"c": "null"}} - - def test_empty_dict(self) -> None: - """Empty dict returns empty dict.""" - assert _denull({}) == {} - - def test_deeply_nested(self) -> None: - """Handle multiple levels of nesting.""" - data = {"a": {"b": {"c": "null"}}} - assert _denull(data) == {"a": {"b": {"c": None}}} - - def test_null_strings_in_plain_list(self) -> None: - """Convert "null" strings inside plain lists.""" - data = {"tags": ["a", "null", "b"]} - assert _denull(data) == {"tags": ["a", None, "b"]} - class TestInjectLiteralFields: """Tests for _inject_literal_fields function.""" @@ -733,8 +530,8 @@ class MockModel(BaseModel): class TestValidateExample: """Tests for validate_example function.""" - def test_valid_data_passes(self) -> None: - """Valid data is validated and denulled dict returned.""" + def test_valid_data_returns_instance(self) -> None: + """Valid data returns a model instance.""" class MockModel(BaseModel): name: str @@ -742,7 +539,9 @@ class MockModel(BaseModel): raw = {"name": "test", "count": 42} result = validate_example(MockModel, raw) - assert result == {"name": "test", "count": 42} + assert isinstance(result, MockModel) + assert result.name == "test" + assert result.count == 42 def test_invalid_data_raises_validation_error(self) -> None: """Invalid data raises ValidationError.""" @@ -754,17 +553,6 @@ class MockModel(BaseModel): with pytest.raises(ValidationError): validate_example(MockModel, raw) - def test_denulled_dict_returned(self) -> None: - """Denulled dict is returned, not raw or preprocessed.""" - - class MockModel(BaseModel): - name: str - value: int | None - - raw = {"name": "test", "value": "null"} - result = validate_example(MockModel, raw) - assert result == {"name": "test", "value": None} - def test_literals_injected_before_validation(self) -> None: """Missing Literal fields are injected before validation.""" @@ -774,59 +562,50 @@ class MockModel(BaseModel): raw = {"name": "Tower"} result = validate_example(MockModel, raw) - # Returned dict is denulled, NOT preprocessed (no injected literals) - assert result == {"name": "Tower"} + assert isinstance(result, MockModel) + assert result.theme == "buildings" + assert result.name == "Tower" -class TestValidateExampleWithUnion: - """Tests for validate_example with discriminated unions via TypeAdapter.""" +class _Dog(BaseModel): + kind: Literal["dog"] + bark: str - def test_validates_union_via_type_adapter(self) -> None: - """TypeAdapter validates against a discriminated union.""" - class Dog(BaseModel): - kind: Literal["dog"] - bark: str +class _Cat(BaseModel): + kind: Literal["cat"] + purr: bool - class Cat(BaseModel): - kind: Literal["cat"] - purr: bool - PetUnion = Annotated[ - Annotated[Dog, Tag("dog")] | Annotated[Cat, Tag("cat")], - Field(discriminator="kind"), - ] +_PetUnion = Annotated[ + Annotated[_Dog, Tag("dog")] | Annotated[_Cat, Tag("cat")], + Field(discriminator="kind"), +] + + +class TestValidateExampleWithUnion: + """Tests for validate_example with discriminated unions via TypeAdapter.""" + def test_validates_union_via_type_adapter(self) -> None: + """TypeAdapter validates against a discriminated union.""" raw = {"kind": "dog", "bark": "woof"} - result = validate_example(PetUnion, raw, model_fields=Dog.model_fields) - assert result == {"kind": "dog", "bark": "woof"} + result = validate_example(_PetUnion, raw, model_fields=_Dog.model_fields) + assert isinstance(result, _Dog) + assert result.kind == "dog" + assert result.bark == "woof" def test_invalid_union_example_raises(self) -> None: """Invalid data against union raises ValidationError.""" - - class Dog(BaseModel): - kind: Literal["dog"] - bark: str - - class Cat(BaseModel): - kind: Literal["cat"] - purr: bool - - PetUnion = Annotated[ - Annotated[Dog, Tag("dog")] | Annotated[Cat, Tag("cat")], - Field(discriminator="kind"), - ] - raw = {"kind": "dog", "bark": 42} # bark should be str with pytest.raises(ValidationError): - validate_example(PetUnion, raw, model_fields=Dog.model_fields) + validate_example(_PetUnion, raw, model_fields=_Dog.model_fields) - def test_null_cross_arm_fields_accepted(self) -> None: - """Null fields from other union arms are accepted in flat-schema examples. + def test_null_cross_arm_fields_stripped_for_validation(self) -> None: + """Null fields from other union arms are stripped before validation. Parquet files have columns for all union arms. A road segment row - includes ``rail_flags=null`` because the column exists in the table. - Validation should accept these cross-arm nulls. + includes rail_flags=null because the column exists. Preprocessing + strips these so extra='forbid' models accept the data. """ class _Base(BaseModel): @@ -848,15 +627,11 @@ class Cat(_Base): ] # Flat schema: Dog example includes Cat's "purr" field as null - raw = {"kind": "dog", "name": "Rex", "bark": "woof", "purr": "null"} + raw = {"kind": "dog", "name": "Rex", "bark": "woof", "purr": None} result = validate_example(PetUnion, raw, model_fields=_Base.model_fields) - # Returned dict preserves the original denulled data - assert result == { - "kind": "dog", - "name": "Rex", - "bark": "woof", - "purr": None, - } + assert isinstance(result, Dog) + assert result.name == "Rex" + assert result.bark == "woof" class TestIntegration: @@ -868,19 +643,17 @@ def test_real_building_examples_validate(self) -> None: from overture.schema.buildings.building import Building # noqa: PLC0415 - # Find the pyproject.toml for the Building model pyproject_path = resolve_pyproject_path(Building) assert pyproject_path is not None, "Could not find pyproject.toml for Building" - # Load raw examples from TOML raw_examples = load_examples_from_toml(pyproject_path, "Building") assert len(raw_examples) > 0, "No Building examples found in pyproject.toml" - # Validate each example for idx, raw_example in enumerate(raw_examples): - # Should not raise ValidationError validated = validate_example(Building, raw_example) - assert isinstance(validated, dict), f"Example {idx}: Expected dict result" + assert isinstance(validated, BaseModel), ( + f"Example {idx}: Expected BaseModel" + ) def test_real_segment_examples_validate(self) -> None: """Validate real Segment examples (discriminated union with cross-arm fields).""" @@ -904,103 +677,256 @@ def test_real_segment_examples_validate(self) -> None: raw_example, model_fields=TransportationSegment.model_fields, ) - assert isinstance(validated, dict), f"Example {idx}: Expected dict result" - - -def _field( - name: str, - *, - kind: TypeKind = TypeKind.PRIMITIVE, - base_type: str = "str", - is_dict: bool = False, - list_depth: int = 0, - is_required: bool = True, - model: ModelSpec | None = None, - starts_cycle: bool = False, -) -> FieldSpec: - """Build a FieldSpec with sensible defaults for testing.""" - return FieldSpec( - name=name, - type_info=TypeInfo( - base_type=base_type, kind=kind, is_dict=is_dict, list_depth=list_depth - ), - description=None, - is_required=is_required, - model=model, - starts_cycle=starts_cycle, - ) - - -class TestCollectDictPaths: - """Tests for collect_dict_paths.""" - - def test_no_dict_fields(self) -> None: - """Model with only primitive fields returns empty set.""" - fields = [_field("name")] - assert collect_dict_paths(fields) == frozenset() - - def test_top_level_dict_field(self) -> None: - """Dict field at top level is collected.""" - fields = [ - _field("name"), - _field("tags", is_dict=True, is_required=False), + assert isinstance(validated, BaseModel), ( + f"Example {idx}: Expected BaseModel" + ) + + +class TestAugmentMissingFields: + """Tests for augment_missing_fields function.""" + + def test_no_missing_fields(self) -> None: + """All fields present, nothing augmented.""" + rows = [("id", "123"), ("name", "test")] + result = augment_missing_fields(rows, ["id", "name"]) + assert result == [("id", "123"), ("name", "test")] + + def test_missing_top_level_field(self) -> None: + """Missing field added as (name, None).""" + rows = [("id", "123")] + result = augment_missing_fields(rows, ["id", "name", "level"]) + assert result == [("id", "123"), ("name", None), ("level", None)] + + def test_dotted_field_counts_as_present(self) -> None: + """A dotted key like 'names.primary' counts 'names' as present.""" + rows = [("id", "123"), ("names.primary", "foo")] + result = augment_missing_fields(rows, ["id", "names"]) + assert result == [("id", "123"), ("names.primary", "foo")] + + def test_indexed_field_counts_as_present(self) -> None: + """A bracketed key like 'sources[0].dataset' counts 'sources' as present.""" + rows = [("id", "123"), ("sources[0].dataset", "OSM")] + result = augment_missing_fields(rows, ["id", "sources"]) + assert result == [("id", "123"), ("sources[0].dataset", "OSM")] + + def test_union_cross_arm_fields_added(self) -> None: + """Fields from other union arms are added as None.""" + rows = [ + ("kind", "dog"), + ("name", "Rex"), + ("bark", "woof"), + ] + field_names = ["kind", "name", "bark", "purr"] + result = augment_missing_fields(rows, field_names) + assert result == [ + ("kind", "dog"), + ("name", "Rex"), + ("bark", "woof"), + ("purr", None), + ] + + +class TestFlattenModelInstance: + """Tests for flatten_model_instance function.""" + + def test_simple_fields(self) -> None: + """Flatten simple model fields.""" + + class Simple(BaseModel): + id: str + version: int + + instance = Simple(id="123", version=1) + result = flatten_model_instance(instance) + assert result == [("id", "123"), ("version", 1)] + + def test_nested_model(self) -> None: + """Nested BaseModel fields use dot notation.""" + + class Inner(BaseModel): + primary: str + secondary: str | None = None + + class Outer(BaseModel): + name: str + names: Inner + + instance = Outer(name="test", names=Inner(primary="foo")) + result = flatten_model_instance(instance) + assert result == [ + ("name", "test"), + ("names.primary", "foo"), + ("names.secondary", None), ] - assert collect_dict_paths(fields) == frozenset({"tags"}) - def test_nested_dict_in_sub_model(self) -> None: - """Dict field inside a sub-model produces dotted path.""" - inner_fields = [ - _field("primary"), - _field("common", is_dict=True, is_required=False), + def test_list_of_models(self) -> None: + """List of BaseModel uses bracket notation.""" + + class Source(BaseModel): + dataset: str + record_id: str + + class Feature(BaseModel): + id: str + sources: list[Source] + + instance = Feature( + id="123", + sources=[ + Source(dataset="OSM", record_id="w123"), + Source(dataset="MSFT", record_id="w456"), + ], + ) + result = flatten_model_instance(instance) + assert result == [ + ("id", "123"), + ("sources[0].dataset", "OSM"), + ("sources[0].record_id", "w123"), + ("sources[1].dataset", "MSFT"), + ("sources[1].record_id", "w456"), ] - inner_model = ModelSpec(name="Names", description=None, fields=inner_fields) - fields = [ - _field("names", kind=TypeKind.MODEL, base_type="Names", model=inner_model) + + def test_dict_field_kept_as_leaf(self) -> None: + """Dict-typed fields are leaf values, not recursed.""" + + class Tagged(BaseModel): + name: str + tags: dict[str, str] + + instance = Tagged(name="test", tags={"color": "red", "size": "large"}) + result = flatten_model_instance(instance) + assert result == [ + ("name", "test"), + ("tags", {"color": "red", "size": "large"}), ] - assert collect_dict_paths(fields) == frozenset({"names.common"}) - - def test_list_of_model_with_dict(self) -> None: - """Dict inside list-of-model uses [] in path.""" - inner_fields = [_field("tags", is_dict=True, is_required=False)] - inner_model = ModelSpec(name="Item", description=None, fields=inner_fields) - fields = [ - _field( - "items", - kind=TypeKind.MODEL, - base_type="Item", - list_depth=1, - model=inner_model, - ), + + def test_none_defaulted_fields_appear(self) -> None: + """Fields with None defaults still appear in output.""" + + class WithDefaults(BaseModel): + name: str + level: int | None = None + height: float | None = None + + instance = WithDefaults(name="test") + result = flatten_model_instance(instance) + assert result == [ + ("name", "test"), + ("level", None), + ("height", None), ] - assert collect_dict_paths(fields) == frozenset({"items[].tags"}) - - def test_nested_list_depth(self) -> None: - """list[list[Model]] produces [][] in path.""" - inner_fields = [_field("tags", is_dict=True)] - inner_model = ModelSpec(name="Item", description=None, fields=inner_fields) - fields = [ - _field( - "items", - kind=TypeKind.MODEL, - base_type="Item", - list_depth=2, - model=inner_model, - ), + + def test_plain_list_kept_as_leaf(self) -> None: + """Plain list of primitives is a single leaf value.""" + + class WithList(BaseModel): + phones: list[str] + + instance = WithList(phones=["+1234", "+5678"]) + result = flatten_model_instance(instance) + assert result == [("phones", ["+1234", "+5678"])] + + def test_empty_list_kept_as_leaf(self) -> None: + """Empty list is a leaf value.""" + + class WithList(BaseModel): + tags: list[str] = [] + + instance = WithList() + result = flatten_model_instance(instance) + assert result == [("tags", [])] + + def test_nested_list_of_lists_of_models(self) -> None: + """list[list[Model]] uses double-index notation.""" + + class Node(BaseModel): + division_id: str + name: str + + class Feature(BaseModel): + hierarchies: list[list[Node]] + + instance = Feature( + hierarchies=[ + [ + Node(division_id="aaa", name="Country"), + Node(division_id="bbb", name="Region"), + ], + ] + ) + result = flatten_model_instance(instance) + assert result == [ + ("hierarchies[0][0].division_id", "aaa"), + ("hierarchies[0][0].name", "Country"), + ("hierarchies[0][1].division_id", "bbb"), + ("hierarchies[0][1].name", "Region"), ] - assert collect_dict_paths(fields) == frozenset({"items[][].tags"}) - - def test_cycle_stops_recursion(self) -> None: - """Fields with starts_cycle=True are not recursed into.""" - inner_fields = [_field("data", is_dict=True, is_required=False)] - inner_model = ModelSpec(name="Node", description=None, fields=inner_fields) - fields = [ - _field( - "child", - kind=TypeKind.MODEL, - base_type="Node", - is_required=False, - model=inner_model, - starts_cycle=True, - ), + + def test_none_model_field_is_leaf(self) -> None: + """A model-typed field with None value is a leaf, not recursed.""" + + class Inner(BaseModel): + value: str + + class Outer(BaseModel): + name: str + inner: Inner | None = None + + instance = Outer(name="test") + result = flatten_model_instance(instance) + assert result == [("name", "test"), ("inner", None)] + + def test_field_alias(self) -> None: + """Field with validation_alias uses the alias as key.""" + + class Aliased(BaseModel): + class_: Literal["building"] = Field(validation_alias="class") + name: str + + instance = Aliased.model_validate({"class": "building", "name": "Tower"}) + result = flatten_model_instance(instance) + assert result == [("class", "building"), ("name", "Tower")] + + def test_slots_based_field_flattened(self) -> None: + """Non-BaseModel types with __slots__ and properties are flattened.""" + from overture.schema.system.primitive import BBox # noqa: PLC0415 + + class WithBBox(BaseModel): + id: str + bbox: BBox | None = None + + instance = WithBBox(id="123", bbox=BBox(xmin=1.0, ymin=2.0, xmax=3.0, ymax=4.0)) + result = flatten_model_instance(instance) + assert result == [ + ("id", "123"), + ("bbox.xmin", 1.0), + ("bbox.ymin", 2.0), + ("bbox.xmax", 3.0), + ("bbox.ymax", 4.0), ] - assert collect_dict_paths(fields) == frozenset() + + def test_none_slots_based_field_is_leaf(self) -> None: + """A slots-based field with None value is a leaf.""" + from overture.schema.system.primitive import BBox # noqa: PLC0415 + + class WithBBox(BaseModel): + id: str + bbox: BBox | None = None + + instance = WithBBox(id="123") + result = flatten_model_instance(instance) + assert result == [("id", "123"), ("bbox", None)] + + def test_single_slot_wrapper_is_leaf(self) -> None: + """Single-slot types (wrappers like Geometry) are leaf values.""" + from overture.schema.system.primitive import Geometry # noqa: PLC0415 + from shapely.geometry import Point # noqa: PLC0415 + + class WithGeom(BaseModel): + id: str + geometry: Geometry + + geom = Geometry(Point(1, 2)) + instance = WithGeom(id="123", geometry=geom) + result = flatten_model_instance(instance) + assert result == [("id", "123"), ("geometry", geom)] diff --git a/packages/overture-schema-divisions-theme/pyproject.toml b/packages/overture-schema-divisions-theme/pyproject.toml index 21fe72551..0a1cd9e61 100644 --- a/packages/overture-schema-divisions-theme/pyproject.toml +++ b/packages/overture-schema-divisions-theme/pyproject.toml @@ -45,9 +45,7 @@ country = "TO" version = 1 subtype = "locality" class = "village" -wikidata = "null" region = "TO-04" -perspectives = "null" hierarchies = [ [ {division_id = "fef8748b-0c91-46ad-9f2d-976d8d2de3e9", subtype = "country", name = "Tonga"}, @@ -57,10 +55,7 @@ hierarchies = [ ], ] parent_division_id = "8730f0cc-d436-4f11-a7d3-49085813ef44" -norms = "null" population = 534 -capital_division_ids = "null" -capital_of_divisions = "null" theme = "divisions" type = "division" @@ -75,26 +70,16 @@ property = "" dataset = "OpenStreetMap" record_id = "n3173231082@4" update_time = "2014-12-18T09:17:03Z" -confidence = "null" -between = "null" [examples.Division.cartography] prominence = 29 -min_zoom = "null" -max_zoom = "null" -sort_key = "null" [examples.Division.names] primary = "Sia'atoutai" -common = "null" [[examples.Division.names.rules]] variant = "alternate" -language = "null" -perspectives = "null" value = "Nafualu" -between = "null" -side = "null" [examples.Division.local_type] en = "village" @@ -124,13 +109,9 @@ property = "" dataset = "OpenStreetMap" record_id = "r7247527@3" update_time = "2020-12-30T18:41:56Z" -confidence = "null" -between = "null" [examples.DivisionArea.names] primary = "ʻEua" -common = "null" -rules = "null" [[examples.DivisionBoundary]] id = "2bdf68e4-860d-3d8c-a472-ccf439a5302a" @@ -145,9 +126,7 @@ division_ids = [ "ae266459-63a4-4508-8295-0101e27d039b", "d4a6873d-885a-4f2a-bc0f-37e9d9e874e4" ] -region = "null" is_disputed = false -perspectives = "null" theme = "divisions" type = "division_boundary" @@ -162,13 +141,9 @@ property = "" dataset = "OpenStreetMap" record_id = "r6063055@9" update_time = "2023-07-20T00:28:40Z" -confidence = "null" -between = "null" [[examples.DivisionBoundary.sources]] property = "" dataset = "OpenStreetMap" record_id = "r6063063@12" update_time = "2023-07-20T00:28:40Z" -confidence = "null" -between = "null" diff --git a/packages/overture-schema-places-theme/pyproject.toml b/packages/overture-schema-places-theme/pyproject.toml index 1d851b356..b374835be 100644 --- a/packages/overture-schema-places-theme/pyproject.toml +++ b/packages/overture-schema-places-theme/pyproject.toml @@ -48,7 +48,6 @@ websites = [ socials = [ "https://www.facebook.com/107663894904826" ] -emails = "null" phones = [ "+81977009000" ] @@ -68,28 +67,20 @@ dataset = "meta" record_id = "107663894904826" update_time = "2025-06-30T07:00:00.000Z" confidence = 0.7337175792507205 -between = "null" [examples.Place.names] primary = "スーパーホテル別府駅前" -common = "null" -rules = "null" [examples.Place.categories] primary = "hotel" -alternate = "null" [examples.Place.brand] -wikidata = "null" [examples.Place.brand.names] primary = "SUPER HOTEL" -common = "null" -rules = "null" [[examples.Place.addresses]] freeform = "秋田県横手市駅前町13−8" locality = "横手市" postcode = "013-0036" -region = "null" country = "JP" diff --git a/packages/overture-schema-system/src/overture/schema/system/primitive/bbox.py b/packages/overture-schema-system/src/overture/schema/system/primitive/bbox.py index ac0eef391..c078290c5 100644 --- a/packages/overture-schema-system/src/overture/schema/system/primitive/bbox.py +++ b/packages/overture-schema-system/src/overture/schema/system/primitive/bbox.py @@ -237,7 +237,7 @@ def validator( elif isinstance(value, tuple | list): return cls.from_geo_json(value) elif isinstance(value, dict): - BBox(**value) + return BBox(**value) else: raise TypeError( f"expected `BBox` or `tuple` or `list`; got `{type(value).__name__}` with value {repr(value)}" diff --git a/packages/overture-schema-transportation-theme/pyproject.toml b/packages/overture-schema-transportation-theme/pyproject.toml index 3302845b2..08811df39 100644 --- a/packages/overture-schema-transportation-theme/pyproject.toml +++ b/packages/overture-schema-transportation-theme/pyproject.toml @@ -54,10 +54,6 @@ ymax = -43.96794128417969 [[examples.Connector.sources]] property = "" dataset = "OpenStreetMap" -record_id = "null" -update_time = "null" -confidence = "null" -between = "null" [[examples.Segment]] id = "1bc62f3b-08b5-42b8-89fe-36f685f60455" @@ -65,17 +61,6 @@ geometry = "LINESTRING (-176.5636191 -43.954404, -176.5643637 -43.9538145, -176. version = 1 subtype = "road" class = "residential" -routes = "null" -subclass_rules = "null" -access_restrictions = "null" -level_rules = "null" -destinations = "null" -prohibited_transitions = "null" -road_flags = "null" -speed_limits = "null" -width_rules = "null" -subclass = "null" -rail_flags = "null" theme = "transportation" type = "segment" @@ -90,20 +75,13 @@ property = "" dataset = "OpenStreetMap" record_id = "w53435546@6" update_time = "2021-05-03T06:37:03Z" -confidence = "null" -between = "null" [examples.Segment.names] primary = "Meteorological Lane" -common = "null" [[examples.Segment.names.rules]] variant = "common" -language = "null" -perspectives = "null" value = "Meteorological Lane" -between = "null" -side = "null" [[examples.Segment.connectors]] connector_id = "15b2c131-9137-4add-88c6-2acd3fa61355" @@ -119,4 +97,3 @@ at = 1.0 [[examples.Segment.road_surface]] value = "gravel" -between = "null" From 53deb1619de5dcfd4d0798115a29bfddab5a9ca2 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Mon, 9 Mar 2026 22:47:06 -0700 Subject: [PATCH 36/38] style: single backticks in docstrings Replace rST-style double backticks with single backticks across docstrings to match project convention. Preserve double backticks where the wrapped text itself contains backtick characters (literal markdown syntax examples). Fix D301 in type_format.py with a raw docstring for backslash content. --- .../schema/codegen/extraction/examples.py | 20 +++++++------- .../codegen/extraction/model_extraction.py | 6 ++--- .../schema/codegen/extraction/specs.py | 12 ++++----- .../codegen/extraction/type_analyzer.py | 18 ++++++------- .../schema/codegen/layout/module_layout.py | 6 ++--- .../codegen/markdown/link_computation.py | 2 +- .../codegen/markdown/path_assignment.py | 4 +-- .../schema/codegen/markdown/renderer.py | 26 +++++++++---------- .../schema/codegen/markdown/type_format.py | 18 ++++++------- .../tests/test_example_loader.py | 2 +- .../tests/test_type_analyzer.py | 4 +-- .../src/overture/schema/system/feature.py | 10 +++---- 12 files changed, 64 insertions(+), 64 deletions(-) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py index 811a836c3..2241bbe59 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py @@ -41,10 +41,10 @@ def _inject_literal_fields( ) -> dict[str, Any]: """Inject single-value Literal field defaults missing from *data*. - Inspects *model_fields_dict* for fields with single-value ``Literal`` + Inspects *model_fields_dict* for fields with single-value `Literal` annotations. For each field missing from *data*, injects the literal - value using the field's ``validation_alias`` (if set), falling back - to ``alias``, then to the field name. + value using the field's `validation_alias` (if set), falling back + to `alias`, then to the field name. Returns a new dict; the original is not mutated. """ @@ -149,11 +149,11 @@ def sort_key(row: tuple[str, Any]) -> int: def _structured_fields(value: object) -> list[tuple[str, Any]] | None: - """Extract named fields from ``__slots__``-based types like BBox. + """Extract named fields from `__slots__`-based types like BBox. - Returns a list of ``(name, value)`` pairs for types that expose - public properties backed by private slots (``_name`` -> ``name``). - Returns ``None`` for types without this pattern. + Returns a list of `(name, value)` pairs for types that expose + public properties backed by private slots (`_name` -> `name`). + Returns `None` for types without this pattern. """ cls = type(value) slots = getattr(cls, "__slots__", ()) @@ -238,9 +238,9 @@ def augment_missing_fields( ) -> list[tuple[str, Any]]: """Add (name, None) entries for fields absent from *rows*. - Compares base field names (via ``extract_base_field``) against + Compares base field names (via `extract_base_field`) against *field_names*. Fields in *field_names* not represented in *rows* - are appended as ``(name, None)``. Handles dot-notation and bracket- + are appended as `(name, None)`. Handles dot-notation and bracket- notation keys correctly. Parameters @@ -267,7 +267,7 @@ def load_examples_from_toml( pyproject_path: Path, model_name: str, ) -> list[dict[str, Any]]: - """Load ``[examples.]`` from a pyproject.toml file.""" + """Load `[examples.]` from a pyproject.toml file.""" with pyproject_path.open("rb") as f: data = tomllib.load(f) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_extraction.py index 81c360538..76807e123 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_extraction.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_extraction.py @@ -24,7 +24,7 @@ def resolve_field_alias(field_name: str, field_info: FieldInfo) -> str: """Return the data-dict key for a Pydantic field. - Prefers ``validation_alias``, falls back to ``alias``, then the + Prefers `validation_alias`, falls back to `alias`, then the Python field name. Only string aliases are supported; AliasPath and AliasChoices are ignored. """ @@ -153,10 +153,10 @@ def expand_model_tree( ) -> FeatureSpec: """Populate model references on MODEL-kind fields, recursively. - Walks *spec*'s fields and sets ``field.model`` for fields whose type + Walks *spec*'s fields and sets `field.model` for fields whose type is a Pydantic model. Uses *cache* to reuse already-extracted ModelSpecs and detect shared references. Marks fields whose model creates a cycle - in the ancestor chain with ``starts_cycle=True``. + in the ancestor chain with `starts_cycle=True`. Mutates *spec* in place and returns it. """ diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py index 801e69c23..132c966b8 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py @@ -39,7 +39,7 @@ class TypeIdentity: Pairs a unique Python object (class, NewType callable, or union annotation) with its display name. Equality and hashing delegate - to ``obj`` identity so registry lookups work regardless of how + to `obj` identity so registry lookups work regardless of how the display name was derived. """ @@ -65,12 +65,12 @@ def module(self) -> str: class _SourceTypeIdentityMixin: - """Mixin providing ``identity`` from ``source_type`` and ``name``. + """Mixin providing `identity` from `source_type` and `name`. Shared by EnumSpec, ModelSpec, NewTypeSpec, and PydanticTypeSpec -- - each has a ``source_type`` (the Python class/callable) and a ``name``. - UnionSpec uses ``source_annotation`` instead, so it defines its - own ``identity``. + each has a `source_type` (the Python class/callable) and a `name`. + UnionSpec uses `source_annotation` instead, so it defines its + own `identity`. """ source_type: object | None @@ -225,7 +225,7 @@ def docs_url(self) -> str: def is_pydantic_sourced(source_type: type | None) -> bool: - """Check whether *source_type* originates from the ``pydantic`` package.""" + """Check whether *source_type* originates from the `pydantic` package.""" return getattr(source_type, "__module__", "").startswith("pydantic") diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_analyzer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_analyzer.py index fd4c9a06b..a0cd5314f 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_analyzer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/type_analyzer.py @@ -79,8 +79,8 @@ def walk_type_info(ti: TypeInfo, visitor: Callable[[TypeInfo], None]) -> None: """Call *visitor* on *ti*, then recurse into dict key/value types. Captures the shared recursive descent pattern used by type collection - and reverse reference computation. Union members are ``type`` objects - (not ``TypeInfo``), so callers handle them directly. + and reverse reference computation. Union members are `type` objects + (not `TypeInfo`), so callers handle them directly. """ visitor(ti) if ti.dict_key_type is not None: @@ -108,13 +108,13 @@ class _UnwrapState: """Accumulated state from iterative type unwrapping. Tracks NewType names and refs during unwrapping: - - ``outermost_newtype_name`` / ``outermost_newtype_ref``: the first - NewType encountered, exposed as ``TypeInfo.newtype_name`` / ``newtype_ref``. - - ``last_newtype_name``: the most recently entered NewType name, used - as the resolved ``base_type`` for the terminal type. - - ``last_newtype_ref``: the most recently entered NewType callable, + - `outermost_newtype_name` / `outermost_newtype_ref`: the first + NewType encountered, exposed as `TypeInfo.newtype_name` / `newtype_ref`. + - `last_newtype_name`: the most recently entered NewType name, used + as the resolved `base_type` for the terminal type. + - `last_newtype_ref`: the most recently entered NewType callable, used as constraint provenance (which NewType contributed each constraint). - - ``newtype_outer_list_depth``: list layers accumulated before entering + - `newtype_outer_list_depth`: list layers accumulated before entering the outermost NewType boundary. """ @@ -329,7 +329,7 @@ def single_literal_value(annotation: object) -> object | None: Delegates to analyze_type for all unwrapping, then checks whether the result is a single-value Literal. Multi-value Literals return None — callers needing all values should use - ``analyze_type`` and read ``literal_values`` directly. + `analyze_type` and read `literal_values` directly. """ try: ti = analyze_type(annotation) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/layout/module_layout.py b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/module_layout.py index f7b8f0a44..bb6b92379 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/layout/module_layout.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/module_layout.py @@ -25,7 +25,7 @@ def _split_entry_point(entry_point_path: str) -> tuple[str, str]: - """Split ``"module.path:ClassName"`` into its two parts. + """Split `"module.path:ClassName"` into its two parts. >>> _split_entry_point("overture.schema.buildings:Building") ('overture.schema.buildings', 'Building') @@ -98,7 +98,7 @@ def is_package_module( ) -> bool: """Check whether a module is a package (directory) or a file module. - Packages have ``__path__``; file modules do not (PEP 302). + Packages have `__path__`; file modules do not (PEP 302). """ registry: Mapping[str, object] = ( module_registry if module_registry is not None else sys.modules @@ -134,7 +134,7 @@ def compute_output_dir( """Compute output directory for a module, mirroring package structure. File modules drop their last component (the .py filename). - Packages keep all components. Returns ``PurePosixPath(".")`` for + Packages keep all components. Returns `PurePosixPath(".")` for the root directory. """ relpath = module_relpath(module, schema_root) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/link_computation.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/link_computation.py index f05fa6fd7..bf09950c4 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/link_computation.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/link_computation.py @@ -39,7 +39,7 @@ def _is_normalized(path: PurePosixPath) -> bool: def relative_link(source: PurePosixPath, target: PurePosixPath) -> str: """Compute a relative path from source file to target file. - Both paths must be normalized (no ``..`` components) and relative + Both paths must be normalized (no `..` components) and relative to the same output root. """ if not _is_normalized(source): diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py index 2700d5a9e..a05693310 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py @@ -95,10 +95,10 @@ def _aggregate_page_entries( def _nest_under_types( output_dir: PurePosixPath, feature_dirs: set[PurePosixPath] ) -> PurePosixPath: - """Insert ``types/`` after the feature directory portion. + """Insert `types/` after the feature directory portion. If *output_dir* equals or is a subdirectory of a feature directory, - returns a path with ``types/`` inserted after the feature directory. + returns a path with `types/` inserted after the feature directory. Otherwise returns *output_dir* unchanged. """ for fd in sorted(feature_dirs, key=lambda p: len(p.parts), reverse=True): diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py index 0dac5c258..13e0333b0 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py @@ -65,10 +65,10 @@ def _linkify_bare_urls(text: str) -> str: """Wrap bare URLs in Markdown link syntax. - Turns ``www.example.com`` into ``[www.example.com](https://www.example.com)`` - and ``https://example.com`` into ``[https://example.com](https://example.com)``. - URLs already inside ``[text](url)`` or backtick code spans are left - untouched. Trailing sentence punctuation (``.``, ``,``, etc.) is excluded + Turns `www.example.com` into `[www.example.com](https://www.example.com)` + and `https://example.com` into `[https://example.com](https://example.com)`. + URLs already inside `[text](url)` or backtick code spans are left + untouched. Trailing sentence punctuation (`.`, `,`, etc.) is excluded from the link. Two-pass approach: extract code spans first, linkify the remaining @@ -118,7 +118,7 @@ def _get_jinja_env() -> Environment: class _FieldRow(TypedDict): """Template context for a single field table row. - ``pre_formatted`` indicates the ``name`` already contains backticks + `pre_formatted` indicates the `name` already contains backticks and variant tags, so the template should render it verbatim. """ @@ -135,7 +135,7 @@ def _unwrap_paragraphs(text: str) -> str: r"""Unwrap hard-wrapped lines within paragraphs, preserving paragraph breaks. Splits on blank lines (paragraph boundaries), replaces single newlines - within each paragraph with spaces, then rejoins with ``\n\n``. + within each paragraph with spaces, then rejoins with `\n\n`. Matches markdown's treatment of newlines within paragraphs. """ paragraphs = _PARAGRAPH_BREAK_RE.split(text) @@ -146,8 +146,8 @@ def _sanitize_for_table_cell(text: str) -> str: """Sanitize text for embedding in a markdown table cell. Unwraps within-paragraph newlines to spaces, then converts paragraph - breaks to ``

``. Escapes pipe characters for table safety. - Uses ``
`` (not ``
``) for MDX/Docusaurus compatibility. + breaks to `

`. Escapes pipe characters for table safety. + Uses `
` (not `
`) for MDX/Docusaurus compatibility. """ text = text.strip() text = _unwrap_paragraphs(text) @@ -156,7 +156,7 @@ def _sanitize_for_table_cell(text: str) -> str: def _truncate(text: str) -> str: - """Truncate text to ``_EXAMPLE_TRUNCATION_LIMIT`` chars, adding ellipsis.""" + """Truncate text to `_EXAMPLE_TRUNCATION_LIMIT` chars, adding ellipsis.""" if len(text) > _EXAMPLE_TRUNCATION_LIMIT: return text[: _EXAMPLE_TRUNCATION_LIMIT - 3] + "..." return text @@ -255,7 +255,7 @@ def _annotate_field_constraints( def _expandable_list_suffix(field_spec: FieldSpec) -> str: - """Return ``"[]"`` per nesting level for list-of-model fields expanded inline.""" + """Return `"[]"` per nesting level for list-of-model fields expanded inline.""" if ( field_spec.type_info.is_list and field_spec.model @@ -340,7 +340,7 @@ def _short_variant_name(class_name: str, union_name: str) -> str: def _variant_tag(annotated: AnnotatedField, union_name: str) -> str | None: - """Return an italic variant tag like ``*(Road, Water)*``, or None for shared fields.""" + """Return an italic variant tag like `*(Road, Water)*`, or None for shared fields.""" if annotated.variant_sources is None: return None short_names = [ @@ -535,7 +535,7 @@ def render_pydantic_type( def _format_bound(value: int | float) -> str: """Format a numeric bound for display. - Uses ``2^63`` notation for int64-scale values to avoid unreadable + Uses `2^63` notation for int64-scale values to avoid unreadable numbers; otherwise formats with thousands separators for ints. """ if value == _INT64_MIN: @@ -550,7 +550,7 @@ def _format_bound(value: int | float) -> str: def _format_interval(bounds: Interval) -> str: """Format an Interval as a range string, or empty if unconstrained. - Two inclusive bounds render as ``lower to upper``. All other + Two inclusive bounds render as `lower to upper`. All other combinations use explicit comparison operators so the inclusivity/exclusivity is unambiguous. """ diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py index 4b7132145..5689cb77f 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py @@ -18,7 +18,7 @@ def _code_link(name: str, href: str) -> str: - """Format a markdown link with inline-code text: [``name``](href).""" + """Format a markdown link with inline-code text: [`name`](href).""" return f"[`{name}`]({href})" @@ -38,10 +38,10 @@ def resolve_type_link(identity: TypeIdentity, ctx: LinkContext | None = None) -> def _wrap_list_n(inner: str, depth: int) -> str: - """Wrap an inner type string in ``list<...>`` markdown syntax *depth* times. + """Wrap an inner type string in `list<...>` markdown syntax *depth* times. Builds a single broken-backtick wrapper rather than nesting iteratively. - Iterative nesting creates adjacent backticks (`````) that CommonMark + Iterative nesting creates adjacent backticks that CommonMark interprets as multi-backtick code span delimiters. """ return f"`{'list<' * depth}`{inner}`{'>' * depth}`" @@ -69,7 +69,7 @@ def _try_primitive_link( Registered primitives (int32, Geometry) and Pydantic types (HttpUrl) can have pages in the registry. Uses the type registry display name - (e.g. ``geometry`` not ``Geometry``) for the link text. + (e.g. `geometry` not `Geometry`) for the link text. """ if ti.kind != TypeKind.PRIMITIVE or not ctx: return None @@ -85,15 +85,15 @@ def _try_primitive_link( def _markdown_type_name(ti: TypeInfo) -> str: """Return the markdown display name for a type. - Uses the semantic NewType name when present (e.g. ``LanguageTag``), - otherwise falls back to the resolved markdown type (e.g. ``string``). + Uses the semantic NewType name when present (e.g. `LanguageTag`), + otherwise falls back to the resolved markdown type (e.g. `string`). """ name = ti.newtype_name if is_semantic_newtype(ti) else None return name or resolve_type_name(ti, "markdown") def format_dict_type(ti: TypeInfo) -> str: - """Format a dict TypeInfo as bare ``map`` using resolved markdown names.""" + """Format a dict TypeInfo as bare `map` using resolved markdown names.""" if ti.dict_key_type is None or ti.dict_value_type is None: msg = f"format_dict_type requires dict key/value types, got {ti}" raise ValueError(msg) @@ -111,7 +111,7 @@ def _format_union_members( Each member is resolved independently so members with pages get linked while others render as plain code spans. *separator* is inserted between - members (default is ``\|`` for table-cell safety). + members (default is `\|` for table-cell safety). """ return separator.join(resolve_type_link(TypeIdentity.of(m), ctx) for m in members) @@ -184,7 +184,7 @@ def _linked_or_backticked(ti: TypeInfo, ctx: LinkContext | None) -> tuple[str, b need broken-backtick formatting (interleaving backtick runs with linked text). - When ``has_link`` is True, ``formatted_string`` is a markdown link + When `has_link` is True, `formatted_string` is a markdown link ready for broken-backtick container syntax. When False, it is a raw name that the caller embeds inside backticks. """ diff --git a/packages/overture-schema-codegen/tests/test_example_loader.py b/packages/overture-schema-codegen/tests/test_example_loader.py index 780efc089..1f94bc06d 100644 --- a/packages/overture-schema-codegen/tests/test_example_loader.py +++ b/packages/overture-schema-codegen/tests/test_example_loader.py @@ -196,7 +196,7 @@ def mock_project(tmp_path: Path) -> Iterator[MockProject]: Yields a MockProject with root, pyproject path, and mod_name. Writes a minimal pyproject.toml by default; tests can overwrite via - ``project.write_pyproject()``. + `project.write_pyproject()`. """ root = tmp_path / "project" root.mkdir() diff --git a/packages/overture-schema-codegen/tests/test_type_analyzer.py b/packages/overture-schema-codegen/tests/test_type_analyzer.py index d48d12211..bbf8373fd 100644 --- a/packages/overture-schema-codegen/tests/test_type_analyzer.py +++ b/packages/overture-schema-codegen/tests/test_type_analyzer.py @@ -51,9 +51,9 @@ def test_builtin_returns_primitive_type_info(self, annotation: type) -> None: class TestAnalyzeTypeSentinel: """Tests for Sentinel type filtering in unions. - Pydantic uses ``typing_extensions.Sentinel`` instances (like ````) + Pydantic uses `typing_extensions.Sentinel` instances (like ``) in union types for optional fields. The type analyzer filters these out - alongside ``None`` when processing unions. + alongside `None` when processing unions. """ @pytest.fixture() diff --git a/packages/overture-schema-system/src/overture/schema/system/feature.py b/packages/overture-schema-system/src/overture/schema/system/feature.py index fd067df54..82715014f 100644 --- a/packages/overture-schema-system/src/overture/schema/system/feature.py +++ b/packages/overture-schema-system/src/overture/schema/system/feature.py @@ -35,10 +35,10 @@ def resolve_discriminator_field_name(discriminator: object) -> str | None: Handles the three forms a discriminator can take: - A plain string (used directly as the field name). - - A ``pydantic.Discriminator`` whose ``.discriminator`` attribute is a string. - - A ``pydantic.Discriminator`` whose ``.discriminator`` is a callable - produced by ``Feature.field_discriminator``, which stores the field name - as ``_field_name`` on the callable. + - A `pydantic.Discriminator` whose `.discriminator` attribute is a string. + - A `pydantic.Discriminator` whose `.discriminator` is a callable + produced by `Feature.field_discriminator`, which stores the field name + as `_field_name` on the callable. Returns None if *discriminator* is None or its field name cannot be determined. @@ -234,7 +234,7 @@ def field_discriminator( ------- Discriminator Discriminator that enables discriminated unions that include features. - The inner callable carries a ``_field_name`` attribute set to *field*, + The inner callable carries a `_field_name` attribute set to *field*, allowing introspection code to recover the discriminator field name without hardcoding it. From 05d97c7472e33dfb88204e50b85e04da720b3903 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Mon, 9 Mar 2026 23:54:31 -0700 Subject: [PATCH 37/38] =?UTF-8?q?rename=20primitive=20=E2=86=92=20numeric?= =?UTF-8?q?=20in=20extraction=20layer?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Extraction-layer spec describes numeric types exclusively (bounds, float_bits). Frees 'primitive' for the broader system-level taxonomy. Renames: - PrimitiveSpec → NumericSpec - primitive_extraction.py → numeric_extraction.py - extract_primitives → extract_numerics - partition_primitive_and_geometry_names → partition_numeric_and_geometry_types partition_numeric_and_geometry_types moved from numeric_extraction to pipeline — it discovers both numeric and geometry types, so it didn't belong in a module scoped to numeric extraction. Renderer function names and output constants unchanged — those describe rendered output, not the extraction concept. --- packages/overture-schema-codegen/README.md | 2 +- .../overture-schema-codegen/docs/design.md | 2 +- .../docs/walkthrough.md | 10 ++-- .../codegen/extraction/field_constraints.py | 2 +- ...ve_extraction.py => numeric_extraction.py} | 47 +++++-------------- .../schema/codegen/extraction/specs.py | 8 ++-- .../codegen/markdown/path_assignment.py | 8 ++-- .../schema/codegen/markdown/pipeline.py | 40 ++++++++++++---- .../schema/codegen/markdown/renderer.py | 6 +-- .../overture-schema-codegen/tests/conftest.py | 12 ++--- .../tests/test_markdown_renderer.py | 4 +- ...traction.py => test_numeric_extraction.py} | 34 +++++++------- .../tests/test_type_placement.py | 14 +++--- 13 files changed, 96 insertions(+), 93 deletions(-) rename packages/overture-schema-codegen/src/overture/schema/codegen/extraction/{primitive_extraction.py => numeric_extraction.py} (57%) rename packages/overture-schema-codegen/tests/{test_primitive_extraction.py => test_numeric_extraction.py} (77%) diff --git a/packages/overture-schema-codegen/README.md b/packages/overture-schema-codegen/README.md index f09467f77..92a4d8fbe 100644 --- a/packages/overture-schema-codegen/README.md +++ b/packages/overture-schema-codegen/README.md @@ -55,7 +55,7 @@ the same pipeline. function -- a single iterative loop that peels NewType, Annotated, Union, and container wrappers, accumulating constraints tagged with the NewType that contributed them. Domain-specific extractors (`model_extraction`, `union_extraction`, `enum_extraction`, -`newtype_extraction`, `primitive_extraction`) call `analyze_type()` for field types and +`newtype_extraction`, `numeric_extraction`) call `analyze_type()` for field types and produce spec dataclasses. **Output Layout** determines what artifacts to generate and where they go. Supplementary diff --git a/packages/overture-schema-codegen/docs/design.md b/packages/overture-schema-codegen/docs/design.md index 7a4237749..f29b3ce87 100644 --- a/packages/overture-schema-codegen/docs/design.md +++ b/packages/overture-schema-codegen/docs/design.md @@ -144,7 +144,7 @@ Extraction is split by entity kind: - `extraction/enum_extraction.py`: Enum class -> `EnumSpec` - `extraction/newtype_extraction.py`: NewType -> `NewTypeSpec` - `extraction/union_extraction.py`: Discriminated union alias -> `UnionSpec` -- `extraction/primitive_extraction.py`: Numeric primitives -> `PrimitiveSpec` +- `extraction/numeric_extraction.py`: Numeric types -> `NumericSpec` Each calls `analyze_type()` for field types. Tree expansion (`expand_model_tree()`) walks MODEL-kind fields to populate nested model references, with a shared cache and diff --git a/packages/overture-schema-codegen/docs/walkthrough.md b/packages/overture-schema-codegen/docs/walkthrough.md index dacf2b25b..f4d1562c2 100644 --- a/packages/overture-schema-codegen/docs/walkthrough.md +++ b/packages/overture-schema-codegen/docs/walkthrough.md @@ -230,11 +230,11 @@ dispatch, and example loading all operate on `FeatureSpec` without knowing which concrete type they hold. **EnumSpec** and **EnumMemberSpec** serve enums. **NewTypeSpec** serves NewTypes. -**PrimitiveSpec** serves numeric primitives with an `Interval` for bounds and optional +**NumericSpec** serves numeric primitives with an `Interval` for bounds and optional `float_bits`. **SupplementarySpec** is the union type alias `EnumSpec | NewTypeSpec | ModelSpec` -- -the set of non-feature types that need their own output pages. `PrimitiveSpec` and +the set of non-feature types that need their own output pages. `NumericSpec` and geometry types are excluded because they render on aggregate pages rather than individual ones. @@ -363,10 +363,10 @@ member for single-value `Literal` fields on the discriminator. ### Primitive extraction -`partition_primitive_and_geometry_names` reads a module's `__all__` exports. NewType +`partition_numeric_and_geometry_types` reads a module's `__all__` exports. NewType exports are numeric primitives; non-constraint class exports are geometry types. -`extract_primitives` builds `PrimitiveSpec` objects. For each primitive name it resolves +`extract_numerics` builds `NumericSpec` objects. For each primitive name it resolves the object from the module, calls `extract_newtype` for the type analysis, then extracts numeric bounds from constraints. `extract_numeric_bounds` scans constraint objects for `ge`/`gt`/`le`/`lt` attributes and packs them into an `Interval`. @@ -713,7 +713,7 @@ two NewType layers and an `Annotated` layer, producing a `TypeInfo` with `FieldSpec.model` references. The shared cache ensures sub-models referenced by multiple features (like `Sources`) extract once. Union-kind fields skip inline expansion. -**Layout.** `partition_primitive_and_geometry_names` reads the system module's exports. +**Layout.** `partition_numeric_and_geometry_types` reads the system module's exports. `collect_all_supplementary_types` walks Segment's expanded fields and discovers referenced enums (like `Subtype`), semantic NewTypes (like `Id`, `Sources`), and sub-models. The walk follows `FieldSpec.model` references down the tree, and for diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/field_constraints.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/field_constraints.py index 5981528d1..5cdc3dcd2 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/field_constraints.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/field_constraints.py @@ -28,7 +28,7 @@ # annotated_types constraint attribute (Ge, Gt, Le, Lt, Interval) to its # mathematical symbol for prose rendering. # -# primitive_extraction.py has its own _BOUND_ATTRS for numeric extraction. The +# numeric_extraction.py has its own _BOUND_ATTRS for numeric extraction. The # duplication is deliberate: these modules use the same attribute names for # unrelated purposes (display formatting vs. numeric bound extraction), and # coupling them for four string literals adds a dependency without value. diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/primitive_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/numeric_extraction.py similarity index 57% rename from packages/overture-schema-codegen/src/overture/schema/codegen/extraction/primitive_extraction.py rename to packages/overture-schema-codegen/src/overture/schema/codegen/extraction/numeric_extraction.py index ceb8ff7cd..ae899a4e6 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/primitive_extraction.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/numeric_extraction.py @@ -1,23 +1,22 @@ -"""Primitive extraction and partitioning.""" +"""Numeric type extraction.""" from annotated_types import Interval from .docstring import first_docstring_line from .newtype_extraction import extract_newtype -from .specs import PrimitiveSpec, TypeIdentity -from .type_analyzer import TypeInfo, is_newtype +from .specs import NumericSpec, TypeIdentity +from .type_analyzer import TypeInfo __all__ = [ "extract_numeric_bounds", - "extract_primitives", - "partition_primitive_and_geometry_names", + "extract_numerics", ] # Bound attribute names on annotated_types constraint objects (Ge, Gt, Le, # Lt, Interval) used for numeric bound extraction. # -# field_constraint_description.py has its own _BOUND_OPS for display formatting. +# field_constraints.py has its own _BOUND_OPS for display formatting. # The duplication is deliberate: these modules use the same attribute names # for unrelated purposes (numeric extraction vs. prose rendering), and # coupling them for four string literals adds a dependency without value. @@ -41,18 +40,18 @@ def extract_numeric_bounds(type_info: TypeInfo) -> Interval: return Interval(**found) -def extract_primitives( - primitive_ids: list[TypeIdentity], -) -> list[PrimitiveSpec]: - """Extract specifications for numeric primitive types.""" - specs: list[PrimitiveSpec] = [] - for tid in primitive_ids: +def extract_numerics( + numeric_ids: list[TypeIdentity], +) -> list[NumericSpec]: + """Extract specifications for numeric types.""" + specs: list[NumericSpec] = [] + for tid in numeric_ids: newtype_spec = extract_newtype(tid.obj) bounds = extract_numeric_bounds(newtype_spec.type_info) description = first_docstring_line(getattr(tid.obj, "__doc__", None)) float_bits = _extract_float_bits(tid.name) specs.append( - PrimitiveSpec( + NumericSpec( name=tid.name, description=description, bounds=bounds, @@ -71,25 +70,3 @@ def extract_primitives( def _extract_float_bits(name: str) -> int | None: """Extract bit width from a float type name like 'float32'.""" return _FLOAT_BITS.get(name) - - -def partition_primitive_and_geometry_names( - primitive_module: object, -) -> tuple[list[TypeIdentity], list[TypeIdentity]]: - """Discover primitive and geometry types from a module's exports. - - NewType exports are numeric primitives. - Non-constraint class/enum exports are geometry types. - """ - module_all: list[str] = getattr(primitive_module, "__all__", []) - primitives: list[TypeIdentity] = [] - geometries: list[TypeIdentity] = [] - - for name in module_all: - obj = getattr(primitive_module, name) - if is_newtype(obj): - primitives.append(TypeIdentity(obj, name)) - elif isinstance(obj, type) and not name.endswith("Constraint"): - geometries.append(TypeIdentity(obj, name)) - - return primitives, geometries diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py index 132c966b8..33a291d6e 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py @@ -21,7 +21,7 @@ "FieldSpec", "ModelSpec", "NewTypeSpec", - "PrimitiveSpec", + "NumericSpec", "PydanticTypeSpec", "SupplementarySpec", "TypeIdentity", @@ -189,8 +189,8 @@ class NewTypeSpec(_SourceTypeIdentityMixin): @dataclass -class PrimitiveSpec: - """Extracted specification for a numeric primitive type.""" +class NumericSpec: + """Extracted specification for a numeric type.""" name: str description: str | None @@ -219,7 +219,7 @@ def docs_url(self) -> str: SupplementarySpec = EnumSpec | NewTypeSpec | ModelSpec | PydanticTypeSpec """Non-feature types referenced by feature models. -Excludes PrimitiveSpec and geometry types, which are extracted +Excludes NumericSpec and geometry types, which are extracted separately via dedicated functions. """ diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py index a05693310..f0d224ee4 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/path_assignment.py @@ -31,7 +31,7 @@ def build_placement_registry( feature_specs: Sequence[FeatureSpec], all_specs: dict[TypeIdentity, SupplementarySpec], - primitive_names: list[TypeIdentity], + numeric_names: list[TypeIdentity], geometry_names: list[TypeIdentity], schema_root: str, ) -> dict[TypeIdentity, PurePosixPath]: @@ -41,7 +41,7 @@ def build_placement_registry( the source Python module path relative to schema_root. """ registry: dict[TypeIdentity, PurePosixPath] = _aggregate_page_entries( - primitive_names, geometry_names + numeric_names, geometry_names ) feature_dirs: set[PurePosixPath] = set() @@ -81,12 +81,12 @@ def resolve_output_path( def _aggregate_page_entries( - primitive_names: list[TypeIdentity], + numeric_names: list[TypeIdentity], geometry_names: list[TypeIdentity], ) -> dict[TypeIdentity, PurePosixPath]: """Pre-populate registry entries for types documented on aggregate pages.""" entries: dict[TypeIdentity, PurePosixPath] = dict.fromkeys( - primitive_names, PRIMITIVES_PAGE + numeric_names, PRIMITIVES_PAGE ) entries.update(dict.fromkeys(geometry_names, GEOMETRY_PAGE)) return entries diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py index dbd21f0f4..f7c676c06 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/pipeline.py @@ -15,10 +15,7 @@ from ..extraction.examples import ExampleRecord, load_examples from ..extraction.model_extraction import expand_model_tree -from ..extraction.primitive_extraction import ( - extract_primitives, - partition_primitive_and_geometry_names, -) +from ..extraction.numeric_extraction import extract_numerics from ..extraction.specs import ( EnumSpec, FeatureSpec, @@ -29,6 +26,7 @@ TypeIdentity, UnionSpec, ) +from ..extraction.type_analyzer import is_newtype from ..layout.type_collection import collect_all_supplementary_types from .link_computation import LinkContext from .path_assignment import ( @@ -47,7 +45,11 @@ ) from .reverse_references import UsedByEntry, compute_reverse_references -__all__ = ["RenderedPage", "generate_markdown_pages"] +__all__ = [ + "RenderedPage", + "generate_markdown_pages", + "partition_numeric_and_geometry_types", +] @dataclass(frozen=True, slots=True) @@ -109,6 +111,28 @@ def _render_supplement( return RenderedPage(content=content, path=output_path) +def partition_numeric_and_geometry_types( + types_module: object, +) -> tuple[list[TypeIdentity], list[TypeIdentity]]: + """Discover numeric and geometry types from a module's exports. + + NewType exports are numeric types. + Non-constraint class/enum exports are geometry types. + """ + module_all: list[str] = getattr(types_module, "__all__", []) + numerics: list[TypeIdentity] = [] + geometries: list[TypeIdentity] = [] + + for name in module_all: + obj = getattr(types_module, name) + if is_newtype(obj): + numerics.append(TypeIdentity(obj, name)) + elif isinstance(obj, type) and not name.endswith("Constraint"): + geometries.append(TypeIdentity(obj, name)) + + return numerics, geometries + + def generate_markdown_pages( feature_specs: Sequence[FeatureSpec], schema_root: str, @@ -123,12 +147,12 @@ def generate_markdown_pages( for spec in feature_specs: expand_model_tree(spec, cache) - primitive_names, geometry_names = partition_primitive_and_geometry_names( + numeric_names, geometry_names = partition_numeric_and_geometry_types( _system_primitive ) all_specs = collect_all_supplementary_types(feature_specs) registry = build_placement_registry( - feature_specs, all_specs, primitive_names, geometry_names, schema_root + feature_specs, all_specs, numeric_names, geometry_names, schema_root ) reverse_refs = compute_reverse_references(feature_specs, all_specs) @@ -148,7 +172,7 @@ def generate_markdown_pages( pages.append( RenderedPage( - content=render_primitives_from_specs(extract_primitives(primitive_names)), + content=render_primitives_from_specs(extract_numerics(numeric_names)), path=PRIMITIVES_PAGE, ) ) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py index 13e0333b0..28e8fd458 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py @@ -23,7 +23,7 @@ FieldSpec, ModelSpec, NewTypeSpec, - PrimitiveSpec, + NumericSpec, PydanticTypeSpec, TypeIdentity, UnionSpec, @@ -587,8 +587,8 @@ def _bit_width_key(name: str) -> tuple[str, int]: return (prefix, int(digits) if digits else 0) -def render_primitives_from_specs(specs: list[PrimitiveSpec]) -> str: - """Render the primitives.md page from pre-extracted PrimitiveSpecs.""" +def render_primitives_from_specs(specs: list[NumericSpec]) -> str: + """Render the primitives.md page from pre-extracted NumericSpecs.""" template = _get_jinja_env().get_template("primitives.md.jinja2") signed_ints: list[dict[str, str | None]] = [] diff --git a/packages/overture-schema-codegen/tests/conftest.py b/packages/overture-schema-codegen/tests/conftest.py index 775fc628c..8dce88bf5 100644 --- a/packages/overture-schema-codegen/tests/conftest.py +++ b/packages/overture-schema-codegen/tests/conftest.py @@ -5,11 +5,11 @@ from click.testing import CliRunner from codegen_test_support import find_model_class from overture.schema.codegen.extraction.model_extraction import extract_model -from overture.schema.codegen.extraction.primitive_extraction import ( - extract_primitives, - partition_primitive_and_geometry_names, -) +from overture.schema.codegen.extraction.numeric_extraction import extract_numerics from overture.schema.codegen.extraction.specs import ModelSpec +from overture.schema.codegen.markdown.pipeline import ( + partition_numeric_and_geometry_types, +) from overture.schema.codegen.markdown.renderer import ( render_geometry_from_values, render_primitives_from_specs, @@ -72,8 +72,8 @@ def division_class(all_discovered_models: dict) -> type[BaseModel]: @pytest.fixture(scope="module") def primitives_markdown() -> str: """Render the primitives.md page from the system primitive module.""" - primitive_names, _ = partition_primitive_and_geometry_names(_system_primitive) - return render_primitives_from_specs(extract_primitives(primitive_names)) + numeric_names, _ = partition_numeric_and_geometry_types(_system_primitive) + return render_primitives_from_specs(extract_numerics(numeric_names)) @pytest.fixture(scope="module") diff --git a/packages/overture-schema-codegen/tests/test_markdown_renderer.py b/packages/overture-schema-codegen/tests/test_markdown_renderer.py index e22154196..5ebddcab9 100644 --- a/packages/overture-schema-codegen/tests/test_markdown_renderer.py +++ b/packages/overture-schema-codegen/tests/test_markdown_renderer.py @@ -34,7 +34,7 @@ EnumMemberSpec, EnumSpec, FieldSpec, - PrimitiveSpec, + NumericSpec, TypeIdentity, ) from overture.schema.codegen.extraction.type_analyzer import ConstraintSource @@ -1167,7 +1167,7 @@ def test_float_precision(self, primitives_markdown: str) -> None: def test_pipe_in_description_escaped(self) -> None: """Pipe characters in primitive descriptions are escaped.""" specs = [ - PrimitiveSpec( + NumericSpec( name="int8", description="Range: -128 | 127", bounds=Interval(ge=-128, le=127), diff --git a/packages/overture-schema-codegen/tests/test_primitive_extraction.py b/packages/overture-schema-codegen/tests/test_numeric_extraction.py similarity index 77% rename from packages/overture-schema-codegen/tests/test_primitive_extraction.py rename to packages/overture-schema-codegen/tests/test_numeric_extraction.py index 5fb2de9ed..ee604ba75 100644 --- a/packages/overture-schema-codegen/tests/test_primitive_extraction.py +++ b/packages/overture-schema-codegen/tests/test_numeric_extraction.py @@ -1,47 +1,49 @@ -"""Tests for primitive extraction and numeric bounds.""" +"""Tests for numeric extraction and numeric bounds.""" from typing import Annotated, NewType import overture.schema.system.primitive as _system_primitive from overture.schema.codegen.extraction.newtype_extraction import extract_newtype -from overture.schema.codegen.extraction.primitive_extraction import ( +from overture.schema.codegen.extraction.numeric_extraction import ( extract_numeric_bounds, - extract_primitives, - partition_primitive_and_geometry_names, + extract_numerics, ) from overture.schema.codegen.extraction.specs import TypeIdentity from overture.schema.codegen.extraction.type_analyzer import analyze_type +from overture.schema.codegen.markdown.pipeline import ( + partition_numeric_and_geometry_types, +) from overture.schema.system.primitive import float32, int32, int64, uint8 from pydantic import Field -class TestPartitionPrimitiveAndGeometryNames: - """Tests for partition_primitive_and_geometry_names function.""" +class TestPartitionNumericAndGeometryTypes: + """Tests for partition_numeric_and_geometry_types function.""" def test_returns_type_identities(self) -> None: - prims, geoms = partition_primitive_and_geometry_names(_system_primitive) - assert all(isinstance(p, TypeIdentity) for p in prims) + nums, geoms = partition_numeric_and_geometry_types(_system_primitive) + assert all(isinstance(p, TypeIdentity) for p in nums) assert all(isinstance(g, TypeIdentity) for g in geoms) def test_identity_obj_is_actual_callable(self) -> None: - prims, _ = partition_primitive_and_geometry_names(_system_primitive) - int32_id = next(p for p in prims if p.name == "int32") + nums, _ = partition_numeric_and_geometry_types(_system_primitive) + int32_id = next(p for p in nums if p.name == "int32") assert int32_id.obj is _system_primitive.int32 -class TestExtractPrimitives: - """Tests for extract_primitives function.""" +class TestExtractNumerics: + """Tests for extract_numerics function.""" def test_accepts_type_identities(self) -> None: - prims, _ = partition_primitive_and_geometry_names(_system_primitive) - specs = extract_primitives(prims) + nums, _ = partition_numeric_and_geometry_types(_system_primitive) + specs = extract_numerics(nums) assert len(specs) > 0 names = [s.name for s in specs] assert "int32" in names def test_extracts_bounds(self) -> None: - prims, _ = partition_primitive_and_geometry_names(_system_primitive) - specs = extract_primitives(prims) + nums, _ = partition_numeric_and_geometry_types(_system_primitive) + specs = extract_numerics(nums) int32_spec = next(s for s in specs if s.name == "int32") assert int32_spec.bounds.ge == -(2**31) assert int32_spec.bounds.le == 2**31 - 1 diff --git a/packages/overture-schema-codegen/tests/test_type_placement.py b/packages/overture-schema-codegen/tests/test_type_placement.py index 62ef7449a..7e21a6a46 100644 --- a/packages/overture-schema-codegen/tests/test_type_placement.py +++ b/packages/overture-schema-codegen/tests/test_type_placement.py @@ -12,9 +12,6 @@ make_union_spec, ) from overture.schema.codegen.extraction.model_extraction import expand_model_tree -from overture.schema.codegen.extraction.primitive_extraction import ( - partition_primitive_and_geometry_names, -) from overture.schema.codegen.extraction.specs import ( AnnotatedField, FeatureSpec, @@ -32,9 +29,12 @@ PRIMITIVES_PAGE, build_placement_registry, ) +from overture.schema.codegen.markdown.pipeline import ( + partition_numeric_and_geometry_types, +) from pydantic import BaseModel -_PRIMITIVE_NAMES, _GEOMETRY_NAMES = partition_primitive_and_geometry_names( +_NUMERIC_NAMES, _GEOMETRY_NAMES = partition_numeric_and_geometry_types( _system_primitive ) @@ -50,7 +50,7 @@ def _build_registry( expand_model_tree(spec, cache) all_specs = collect_all_supplementary_types(feature_specs) registry = build_placement_registry( - feature_specs, all_specs, _PRIMITIVE_NAMES, _GEOMETRY_NAMES, _SCHEMA_ROOT + feature_specs, all_specs, _NUMERIC_NAMES, _GEOMETRY_NAMES, _SCHEMA_ROOT ) return registry, all_specs @@ -219,7 +219,7 @@ def test_pydantic_type_placed_under_module_dir(self) -> None: registry = build_placement_registry( feature_specs=[], all_specs={HTTP_URL_SPEC.identity: HTTP_URL_SPEC}, - primitive_names=[], + numeric_names=[], geometry_names=[], schema_root="overture.schema", ) @@ -235,7 +235,7 @@ def test_multiple_pydantic_types_same_module(self) -> None: registry = build_placement_registry( feature_specs=[], all_specs=specs, - primitive_names=[], + numeric_names=[], geometry_names=[], schema_root="overture.schema", ) From a5f64db3028e6a8d6e78f2426d58fe31cd42b470 Mon Sep 17 00:00:00 2001 From: Seth Fitzsimmons Date: Mon, 9 Mar 2026 23:55:49 -0700 Subject: [PATCH 38/38] replace runtime asserts with proper checks, simplify MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Assert statements used for runtime validation disappear under python -O. Replace with TypeError/ValueError raises in validate_example, TypeIdentity.of, _SourceTypeIdentityMixin, _format_constraint, _linked_type_identity, type_collection. Guard _find_common_base against empty members list. Collapse duplicate datetime.datetime/date branches. Rename _format_field_list → _backtick_join (returns string, not list). --- .../overture/schema/codegen/extraction/examples.py | 5 +++-- .../schema/codegen/extraction/model_constraints.py | 12 +++++------- .../src/overture/schema/codegen/extraction/specs.py | 9 ++++++--- .../schema/codegen/extraction/union_extraction.py | 2 ++ .../schema/codegen/layout/type_collection.py | 5 ++++- .../src/overture/schema/codegen/markdown/renderer.py | 7 +++---- .../overture/schema/codegen/markdown/type_format.py | 8 +++++--- 7 files changed, 28 insertions(+), 20 deletions(-) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py index 2241bbe59..e5f949831 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/examples.py @@ -107,8 +107,9 @@ def validate_example( known_keys = _known_field_keys(model_fields) preprocessed = _inject_literal_fields(model_fields, raw) preprocessed = _strip_null_unknown_fields(preprocessed, known_keys) - result: BaseModel = TypeAdapter(validation_type).validate_python(preprocessed) - assert isinstance(result, BaseModel) + result: object = TypeAdapter(validation_type).validate_python(preprocessed) + if not isinstance(result, BaseModel): + raise TypeError(f"Expected BaseModel instance, got {type(result).__name__}") return result diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_constraints.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_constraints.py index 76f2934fc..8290ee3ea 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_constraints.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/model_constraints.py @@ -34,7 +34,7 @@ class _ConstraintEntry: field_names: frozenset[str] -def _format_field_list(names: tuple[str, ...]) -> str: +def _backtick_join(names: tuple[str, ...]) -> str: """Format field names as backtick-quoted, comma-separated list.""" return ", ".join(f"`{n}`" for n in names) @@ -73,7 +73,7 @@ def _describe_condition(condition: object) -> str: def _describe_conditional(constraint: _ConditionalConstraint) -> str: """Describe a require_if or forbid_if constraint.""" - fields = _format_field_list(constraint.field_names) + fields = _backtick_join(constraint.field_names) verb = _conditional_verb(constraint) cond = _describe_condition(constraint.condition) return f"{fields} {_plural_verb(constraint.field_names)} {verb} when {cond}" @@ -110,7 +110,7 @@ def _describe_consolidated( ) -> str: """Describe a group of consolidated conditional constraints.""" first = constraints[0] - fields = _format_field_list(first.field_names) + fields = _backtick_join(first.field_names) verb = _conditional_verb(first) cond_field = _as_field_eq(first).field_name values = ", ".join(f"`{_as_field_eq(c).value}`" for c in constraints) @@ -151,11 +151,9 @@ def _describe_one(constraint: ModelConstraint) -> str | None: if isinstance(constraint, NoExtraFieldsConstraint): return None if isinstance(constraint, RequireAnyOfConstraint): - return ( - f"At least one of {_format_field_list(constraint.field_names)} must be set" - ) + return f"At least one of {_backtick_join(constraint.field_names)} must be set" if isinstance(constraint, RadioGroupConstraint): - return f"Exactly one of {_format_field_list(constraint.field_names)} must be `true`" + return f"Exactly one of {_backtick_join(constraint.field_names)} must be `true`" if isinstance(constraint, MinFieldsSetConstraint): return f"At least {constraint.count} fields must be set" if isinstance(constraint, (RequireIfConstraint, ForbidIfConstraint)): diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py index 33a291d6e..acba1577d 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/specs.py @@ -49,8 +49,10 @@ class TypeIdentity: @classmethod def of(cls, obj: object) -> TypeIdentity: """Derive a TypeIdentity from a named object (class, NewType, etc.).""" - assert obj is not None - return cls(obj, obj.__name__) # type: ignore[attr-defined] + name = getattr(obj, "__name__", None) + if name is None: + raise TypeError(f"Cannot derive TypeIdentity from {obj!r}: no __name__") + return cls(obj, name) def __eq__(self, other: object) -> bool: return isinstance(other, TypeIdentity) and self.obj is other.obj @@ -78,7 +80,8 @@ class _SourceTypeIdentityMixin: @property def identity(self) -> TypeIdentity: - assert self.source_type is not None + if self.source_type is None: + raise ValueError(f"Cannot derive identity for {self.name}: no source_type") return TypeIdentity(self.source_type, self.name) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/union_extraction.py b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/union_extraction.py index 6cd4d73d5..c555fdba0 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/union_extraction.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/extraction/union_extraction.py @@ -18,6 +18,8 @@ def _find_common_base(members: list[type[BaseModel]]) -> type[BaseModel]: """Find the most-derived common BaseModel ancestor of all members.""" + if not members: + raise ValueError("Cannot find common base of empty members list") filtered_mros = [ [c for c in cls.__mro__ if is_model_class(c) and c is not BaseModel] for cls in members diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/layout/type_collection.py b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/type_collection.py index 7d7cf95f7..b9072da64 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/layout/type_collection.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/layout/type_collection.py @@ -109,7 +109,10 @@ def _visit(node: TypeInfo) -> None: if enum_id not in all_specs: all_specs[enum_id] = extract_enum(node.source_type) elif is_pydantic_type(node): - assert node.source_type is not None # guaranteed by is_pydantic_type + if node.source_type is None: + raise TypeError( + "is_pydantic_type returned True but source_type is None" + ) pid = TypeIdentity.of(node.source_type) if pid not in all_specs: all_specs[pid] = extract_pydantic_type(node.source_type) diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py index 28e8fd458..0e829d1f4 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/renderer.py @@ -174,9 +174,6 @@ def _format_example_value(value: object) -> str: if isinstance(value, bool): return "`true`" if value else "`false`" - if isinstance(value, datetime.datetime): - return f"`{value.isoformat()}`" - if isinstance(value, datetime.date): return f"`{value.isoformat()}`" @@ -456,7 +453,9 @@ def _format_constraint( if cs.source_ref is None or cs.source_ref is newtype_ref: return _NewTypeConstraintRow(display=display) - assert cs.source_name is not None # source_ref and source_name are set together + # source_ref and source_name are always set together + if cs.source_name is None: + return _NewTypeConstraintRow(display=display) source_identity = TypeIdentity(cs.source_ref, cs.source_name) source_link = ctx.resolve_link(source_identity) if ctx else None return _NewTypeConstraintRow( diff --git a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py index 5689cb77f..b6bd7a6ec 100644 --- a/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py +++ b/packages/overture-schema-codegen/src/overture/schema/codegen/markdown/type_format.py @@ -54,8 +54,11 @@ def _plain_list_type(base: str, depth: int) -> str: def _linked_type_identity(ti: TypeInfo) -> TypeIdentity | None: """Return the TypeIdentity to use for a markdown link, or None for non-linked types.""" - if is_semantic_newtype(ti) and ti.newtype_ref is not None: - assert ti.newtype_name is not None # guaranteed by is_semantic_newtype + if ( + is_semantic_newtype(ti) + and ti.newtype_ref is not None + and ti.newtype_name is not None + ): return TypeIdentity(ti.newtype_ref, ti.newtype_name) if ti.kind in (TypeKind.ENUM, TypeKind.MODEL) and ti.source_type is not None: return TypeIdentity(ti.source_type, ti.base_type) @@ -148,7 +151,6 @@ def format_type( # qualifier instead (e.g., Sources wrapping list[SourceItem] renders as # Sources (list)), since the list-ness is an implementation detail of the type. if ti.newtype_outer_list_depth > 0: - assert ti.is_list # outer list layers are a subset of total list layers display = _wrap_list_n(display, ti.newtype_outer_list_depth) elif ti.is_list and ti.newtype_name is not None: # list is inside the NewType qualifiers.append("list")