From 3091ac5ac82f4ac26fb3c29d1e4be5b2e767ac3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Casper=20L=C3=BCtzh=C3=B8ft=20Christensen?= <61698286+casperlchristensen@users.noreply.github.com> Date: Tue, 9 Dec 2025 10:16:25 -0800 Subject: [PATCH 01/35] Factored processes -> Dev (#131) * pylint (#98) * add compatibility for factored states * concrete examples and alternating process * tweaks to vocab sizes * update naming * lock * full merge, renaming * test factored representation * finalise gen-process PR * update after merge * static analysis * static analysis tweaks * arg name * better test coverage * factor input args * ruff * better linting * bind i * elipsis to protocol * simplify protocol * format * Minor fixes * Minor fixes * jnp.ndarray -> jax.Array * Fix JIT compilation issue Previous code extracted values from JAX arrays and convert to Python ints at runtime. This will fail when the function is JIT-compiled because JAX arrays become tracers during compilation, and int() on a tracer raises an error. The vocab_sizes parameter must be provided to __init__ for this method to work with JIT. * Refactor generative process config tests to use a helper method for creating factored process configurations. Added parameterized tests for valid and invalid configurations, improving test coverage and maintainability. * Add docstrings * Add match strings to value errors in tests * add better factor handling and allow regression to individual factors * pass device * static analysis * better output format * to_factor in validation * update returns and concatenations * tuple handling * fix typehint * improve test coverage --------- Co-authored-by: ealt Co-authored-by: Eric Alt --- simplexity/activations/activation_analyses.py | 8 +- simplexity/activations/activation_tracker.py | 25 +- simplexity/analysis/layerwise_analysis.py | 11 +- simplexity/analysis/linear_regression.py | 42 +- simplexity/generative_processes/builder.py | 486 +- .../factored_generative_process.py | 269 ++ .../generalized_hidden_markov_model.py | 18 +- simplexity/generative_processes/generator.py | 12 +- .../structures/__init__.py | 37 + .../structures/conditional_transitions.py | 232 + .../structures/fully_conditional.py | 196 + .../structures/independent.py | 74 + .../structures/protocol.py | 59 + .../structures/sequential_conditional.py | 134 + .../generative_processes/torch_generator.py | 23 +- simplexity/utils/analysis_utils.py | 61 +- simplexity/utils/factoring_utils.py | 167 + tests/activations/test_activation_analysis.py | 317 ++ tests/analysis/test_layerwise_analysis.py | 30 +- tests/analysis/test_linear_regression.py | 138 + .../configs/activation_tracker/default.yaml | 2 +- .../unified_chain_3mess3_2tomq.yaml | 108 + .../unified_chain_example.yaml | 57 + .../unified_independent_example.yaml | 53 + .../unified_symmetric_example.yaml | 100 + ...ansition_and_emission_coupled_example.yaml | 93 + .../unified_transition_coupled_example.yaml | 93 + tests/generative_processes/test_builder.py | 383 ++ .../test_factored_generative_process.py | 629 +++ .../test_factored_structures.py | 354 ++ tests/generative_processes/test_generator.py | 14 +- .../test_torch_generator.py | 12 +- .../test_transition_matrices.py | 2 +- .../test_generative_process_config.py | 174 + .../test_predictive_model_config.py | 9 + tests/utils/test_analysis_utils.py | 6 + tests/utils/test_factoring_utils.py | 111 + uv.lock | 4248 ++++++++--------- 38 files changed, 6621 insertions(+), 2166 deletions(-) create mode 100644 simplexity/generative_processes/factored_generative_process.py create mode 100644 simplexity/generative_processes/structures/__init__.py create mode 100644 simplexity/generative_processes/structures/conditional_transitions.py create mode 100644 simplexity/generative_processes/structures/fully_conditional.py create mode 100644 simplexity/generative_processes/structures/independent.py create mode 100644 simplexity/generative_processes/structures/protocol.py create mode 100644 simplexity/generative_processes/structures/sequential_conditional.py create mode 100644 simplexity/utils/factoring_utils.py create mode 100644 tests/end_to_end/configs/generative_process/unified_chain_3mess3_2tomq.yaml create mode 100644 tests/end_to_end/configs/generative_process/unified_chain_example.yaml create mode 100644 tests/end_to_end/configs/generative_process/unified_independent_example.yaml create mode 100644 tests/end_to_end/configs/generative_process/unified_symmetric_example.yaml create mode 100644 tests/end_to_end/configs/generative_process/unified_transition_and_emission_coupled_example.yaml create mode 100644 tests/end_to_end/configs/generative_process/unified_transition_coupled_example.yaml create mode 100644 tests/generative_processes/test_factored_generative_process.py create mode 100644 tests/generative_processes/test_factored_structures.py create mode 100644 tests/utils/test_factoring_utils.py diff --git a/simplexity/activations/activation_analyses.py b/simplexity/activations/activation_analyses.py index 8e843e13..37de3e7b 100644 --- a/simplexity/activations/activation_analyses.py +++ b/simplexity/activations/activation_analyses.py @@ -35,7 +35,7 @@ def analyze( self, activations: Mapping[str, jax.Array], weights: jax.Array, - belief_states: jax.Array | None = None, + belief_states: jax.Array | tuple[jax.Array, ...] | None = None, ) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: """Analyze activations and return scalar metrics and projections.""" ... @@ -76,13 +76,14 @@ def __init__( concat_layers: bool = False, use_probs_as_weights: bool = True, fit_intercept: bool = True, + to_factors: bool = False, ) -> None: super().__init__( analysis_type="linear_regression", last_token_only=last_token_only, concat_layers=concat_layers, use_probs_as_weights=use_probs_as_weights, - analysis_kwargs={"fit_intercept": fit_intercept}, + analysis_kwargs={"fit_intercept": fit_intercept, "to_factors": to_factors}, ) @@ -97,8 +98,9 @@ def __init__( use_probs_as_weights: bool = True, rcond_values: Sequence[float] | None = None, fit_intercept: bool = True, + to_factors: bool = False, ) -> None: - analysis_kwargs: dict[str, Any] = {"fit_intercept": fit_intercept} + analysis_kwargs: dict[str, Any] = {"fit_intercept": fit_intercept, "to_factors": to_factors} if rcond_values is not None: analysis_kwargs["rcond_values"] = tuple(rcond_values) super().__init__( diff --git a/simplexity/activations/activation_tracker.py b/simplexity/activations/activation_tracker.py index 03e02253..0908e4f9 100644 --- a/simplexity/activations/activation_tracker.py +++ b/simplexity/activations/activation_tracker.py @@ -20,7 +20,7 @@ class PreparedActivations: """Prepared activations with belief states and sample weights.""" activations: Mapping[str, jax.Array] - belief_states: jax.Array | None + belief_states: jax.Array | tuple[jax.Array, ...] | None weights: jax.Array @@ -48,16 +48,26 @@ def _to_jax_array(value: Any) -> jax.Array: return jnp.asarray(value) +def _convert_tuple_to_jax_array(value: tuple[Any, ...]) -> tuple[jax.Array, ...]: + """Convert a tuple of supported tensor types to JAX arrays.""" + return tuple(_to_jax_array(v) for v in value) + + def prepare_activations( inputs: jax.Array | torch.Tensor | np.ndarray, - beliefs: jax.Array | torch.Tensor | np.ndarray, + beliefs: jax.Array + | torch.Tensor + | np.ndarray + | tuple[jax.Array, ...] + | tuple[torch.Tensor, ...] + | tuple[np.ndarray, ...], probs: jax.Array | torch.Tensor | np.ndarray, activations: Mapping[str, jax.Array | torch.Tensor | np.ndarray], prepare_options: PrepareOptions, ) -> PreparedActivations: """Preprocess activations by deduplicating sequences, selecting tokens/layers, and computing weights.""" inputs = _to_jax_array(inputs) - beliefs = _to_jax_array(beliefs) + beliefs = _convert_tuple_to_jax_array(beliefs) if isinstance(beliefs, tuple) else _to_jax_array(beliefs) probs = _to_jax_array(probs) activations = {name: _to_jax_array(layer) for name, layer in activations.items()} @@ -74,7 +84,7 @@ def prepare_activations( weights = ( dataset.probs if prepare_options.use_probs_as_weights - else _get_uniform_weights(belief_states.shape[0], belief_states.dtype) + else _get_uniform_weights(dataset.probs.shape[0], dataset.probs.dtype) ) if prepare_options.concat_layers: @@ -98,7 +108,12 @@ def __init__(self, analyses: Mapping[str, ActivationAnalysis]): def analyze( self, inputs: jax.Array | torch.Tensor | np.ndarray, - beliefs: jax.Array | torch.Tensor | np.ndarray, + beliefs: jax.Array + | torch.Tensor + | np.ndarray + | tuple[jax.Array, ...] + | tuple[torch.Tensor, ...] + | tuple[np.ndarray, ...], probs: jax.Array | torch.Tensor | np.ndarray, activations: Mapping[str, jax.Array | torch.Tensor | np.ndarray], ) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: diff --git a/simplexity/analysis/layerwise_analysis.py b/simplexity/analysis/layerwise_analysis.py index 94140298..637531cf 100644 --- a/simplexity/analysis/layerwise_analysis.py +++ b/simplexity/analysis/layerwise_analysis.py @@ -34,21 +34,23 @@ class AnalysisRegistration: def _validate_linear_regression_kwargs(kwargs: Mapping[str, Any] | None) -> dict[str, Any]: provided = dict(kwargs or {}) - allowed = {"fit_intercept"} + allowed = {"fit_intercept", "to_factors"} unexpected = set(provided) - allowed if unexpected: raise ValueError(f"Unexpected linear_regression kwargs: {sorted(unexpected)}") fit_intercept = bool(provided.get("fit_intercept", True)) - return {"fit_intercept": fit_intercept} + to_factors = bool(provided.get("to_factors", False)) + return {"fit_intercept": fit_intercept, "to_factors": to_factors} def _validate_linear_regression_svd_kwargs(kwargs: Mapping[str, Any] | None) -> dict[str, Any]: provided = dict(kwargs or {}) - allowed = {"fit_intercept", "rcond_values"} + allowed = {"fit_intercept", "rcond_values", "to_factors"} unexpected = set(provided) - allowed if unexpected: raise ValueError(f"Unexpected linear_regression_svd kwargs: {sorted(unexpected)}") fit_intercept = bool(provided.get("fit_intercept", True)) + to_factors = bool(provided.get("to_factors", False)) rcond_values = provided.get("rcond_values") if rcond_values is not None: if not isinstance(rcond_values, (list, tuple)): @@ -58,6 +60,7 @@ def _validate_linear_regression_svd_kwargs(kwargs: Mapping[str, Any] | None) -> rcond_values = tuple(float(v) for v in rcond_values) return { "fit_intercept": fit_intercept, + "to_factors": to_factors, "rcond_values": rcond_values, } @@ -152,7 +155,7 @@ def analyze( self, activations: Mapping[str, jax.Array], weights: jax.Array, - belief_states: jax.Array | None = None, + belief_states: jax.Array | tuple[jax.Array, ...] | None = None, ) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: """Analyze activations and return namespaced scalar metrics and projections.""" if self._requires_belief_states and belief_states is None: diff --git a/simplexity/analysis/linear_regression.py b/simplexity/analysis/linear_regression.py index 54a8315d..1e3bcf3e 100644 --- a/simplexity/analysis/linear_regression.py +++ b/simplexity/analysis/linear_regression.py @@ -139,22 +139,56 @@ def linear_regression_svd( def layer_linear_regression( layer_activations: jax.Array, weights: jax.Array, - belief_states: jax.Array | None, + belief_states: jax.Array | tuple[jax.Array, ...] | None, + to_factors: bool = False, **kwargs: Any, ) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: """Layer-wise regression helper that wraps :func:`linear_regression`.""" if belief_states is None: raise ValueError("linear_regression requires belief_states") - return linear_regression(layer_activations, belief_states, weights, **kwargs) + + if to_factors: + scalars, projections = {}, {} + if not isinstance(belief_states, tuple): + raise ValueError("belief_states must be a tuple when to_factors is True") + for factor_idx, factor in enumerate(belief_states): + if not isinstance(factor, jax.Array): + raise ValueError("Each factor in belief_states must be a jax.Array") + factor_scalars, factor_projections = linear_regression(layer_activations, factor, weights, **kwargs) + for key, value in factor_scalars.items(): + scalars[f"factor_{factor_idx}/{key}"] = value + for key, value in factor_projections.items(): + projections[f"factor_{factor_idx}/{key}"] = value + return scalars, projections + else: + belief_states = jnp.concatenate(belief_states, axis=-1) if isinstance(belief_states, tuple) else belief_states + return linear_regression(layer_activations, belief_states, weights, **kwargs) def layer_linear_regression_svd( layer_activations: jax.Array, weights: jax.Array, - belief_states: jax.Array | None, + belief_states: jax.Array | tuple[jax.Array, ...] | None, + to_factors: bool = False, **kwargs: Any, ) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: """Layer-wise regression helper that wraps :func:`linear_regression_svd`.""" if belief_states is None: raise ValueError("linear_regression_svd requires belief_states") - return linear_regression_svd(layer_activations, belief_states, weights, **kwargs) + + if to_factors: + scalars, projections = {}, {} + if not isinstance(belief_states, tuple): + raise ValueError("belief_states must be a tuple when to_factors is True") + for factor_idx, factor in enumerate(belief_states): + if not isinstance(factor, jax.Array): + raise ValueError("Each factor in belief_states must be a jax.Array") + factor_scalars, factor_projections = linear_regression_svd(layer_activations, factor, weights, **kwargs) + for key, value in factor_scalars.items(): + scalars[f"factor_{factor_idx}/{key}"] = value + for key, value in factor_projections.items(): + projections[f"factor_{factor_idx}/{key}"] = value + return scalars, projections + else: + belief_states = jnp.concatenate(belief_states, axis=-1) if isinstance(belief_states, tuple) else belief_states + return linear_regression_svd(layer_activations, belief_states, weights, **kwargs) diff --git a/simplexity/generative_processes/builder.py b/simplexity/generative_processes/builder.py index 8731e783..9f0fc929 100644 --- a/simplexity/generative_processes/builder.py +++ b/simplexity/generative_processes/builder.py @@ -11,13 +11,20 @@ import inspect from collections.abc import Callable, Mapping, Sequence -from typing import Any +from typing import Any, Literal import jax import jax.numpy as jnp +from simplexity.generative_processes.factored_generative_process import ComponentType, FactoredGenerativeProcess from simplexity.generative_processes.generalized_hidden_markov_model import GeneralizedHiddenMarkovModel from simplexity.generative_processes.hidden_markov_model import HiddenMarkovModel +from simplexity.generative_processes.structures import ( + ConditionalTransitions, + FullyConditional, + IndependentStructure, + SequentialConditional, +) from simplexity.generative_processes.transition_matrices import ( GHMM_MATRIX_FUNCTIONS, HMM_MATRIX_FUNCTIONS, @@ -146,3 +153,480 @@ def build_nonergodic_hidden_markov_model( initial_state = jnp.zeros((num_states,), dtype=composite_transition_matrix.dtype) initial_state = initial_state.at[num_states - 1].set(1) return HiddenMarkovModel(composite_transition_matrix, initial_state, device=device) + + +def build_factored_process( + structure_type: Literal["independent", "chain", "symmetric", "transition_coupled"], + component_types: Sequence[ComponentType], + transition_matrices: Sequence[jax.Array], + normalizing_eigenvectors: Sequence[jax.Array], + initial_states: Sequence[jax.Array], + **structure_kwargs, +) -> FactoredGenerativeProcess: + """Factory function for building factored processes with different conditional structures. + + Args: + structure_type: Which conditional structure to instantiate + component_types: Type of each factor ("hmm" or "ghmm") + transition_matrices: Per-factor transition tensors (shape [K_i, V_i, S_i, S_i]) + normalizing_eigenvectors: Per-factor eigenvectors (shape [K_i, S_i]) + initial_states: Initial state per factor (shape [S_i]) + **structure_kwargs: Structure-specific keyword arguments: + - For "independent": (none) + - For "chain": control_maps + - For "symmetric": control_maps + - For "transition_coupled": control_maps_transition, + emission_variant_indices, emission_control_maps (optional) + + Returns: + FactoredGenerativeProcess configured with the requested conditional structure + + Raises: + ValueError: If structure_type is invalid or required kwargs are missing + """ + vocab_sizes = jnp.array([int(T.shape[1]) for T in transition_matrices]) + + if structure_type == "independent": + structure = IndependentStructure() + elif structure_type == "chain": + if "control_maps" not in structure_kwargs: + raise ValueError("Missing required argument 'control_maps' for chain structure") + structure = SequentialConditional(control_maps=tuple(structure_kwargs["control_maps"]), vocab_sizes=vocab_sizes) + elif structure_type == "symmetric": + if "control_maps" not in structure_kwargs: + raise ValueError("Missing required argument 'control_maps' for symmetric structure") + structure = FullyConditional(control_maps=tuple(structure_kwargs["control_maps"]), vocab_sizes=vocab_sizes) + elif structure_type == "transition_coupled": + if "control_maps_transition" not in structure_kwargs: + raise ValueError("Missing required argument 'control_maps_transition' for transition_coupled structure") + if "emission_variant_indices" not in structure_kwargs: + raise ValueError("Missing required argument 'emission_variant_indices' for transition_coupled structure") + structure = ConditionalTransitions( + control_maps_transition=tuple(structure_kwargs["control_maps_transition"]), + emission_variant_indices=structure_kwargs["emission_variant_indices"], + vocab_sizes=vocab_sizes, + emission_control_maps=tuple(structure_kwargs["emission_control_maps"]) + if "emission_control_maps" in structure_kwargs and structure_kwargs["emission_control_maps"] is not None + else None, + ) + else: + raise ValueError(f"Unknown structure_type '{structure_type}'") + + return FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + +def build_factored_process_from_spec( + structure_type: Literal["independent", "chain", "symmetric", "transition_coupled"], + spec: Sequence[dict[str, Any]], + **structure_params, +) -> FactoredGenerativeProcess: + """Unified builder for factored processes from specification. + + Args: + structure_type: Which conditional structure to use + spec: Component specifications. Format depends on structure_type: + - For "independent": List of component dicts + - For "chain": List of component dicts with control_maps + - For "symmetric": List of component dicts + - For "transition_coupled": List of component dicts + **structure_params: Additional structure-specific parameters: + - For "independent": (none) + - For "chain": (none, uses spec's control_map fields) + - For "symmetric": control_maps (list) + - For "transition_coupled": control_maps_transition, emission_variant_indices, + emission_control_maps (optional) + + Returns: + FactoredGenerativeProcess with specified structure + + Example: + ```python + # Independent + process = build_factored_process_from_spec( + structure_type="independent", + spec=[ + {"component_type": "hmm", "variants": [{"process_name": "mess3", "x": 0.15, "a": 0.6}]}, + {"component_type": "hmm", "variants": [{"process_name": "mess3", "x": 0.5, "a": 0.6}]}, + ], + ) + + # Symmetric + process = build_factored_process_from_spec( + structure_type="symmetric", + spec=[...], + control_maps=[[0, 1, 0, 1], [1, 0, 1, 0]], + ) + ``` + """ + if structure_type == "independent": + component_types, transition_matrices, normalizing_eigenvectors, initial_states = build_matrices_from_spec(spec) + return build_factored_process( + structure_type="independent", + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + ) + elif structure_type == "chain": + component_types, transition_matrices, normalizing_eigenvectors, initial_states, control_maps = ( + build_chain_from_spec(spec) + ) + return build_factored_process( + structure_type="chain", + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + control_maps=control_maps, + ) + elif structure_type == "symmetric": + if "control_maps" not in structure_params: + raise ValueError("symmetric structure requires 'control_maps' parameter") + ( + component_types, + transition_matrices, + normalizing_eigenvectors, + initial_states, + control_maps_arrays, + ) = build_symmetric_from_spec(spec, structure_params["control_maps"]) + return build_factored_process( + structure_type="symmetric", + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + control_maps=control_maps_arrays, + ) + elif structure_type == "transition_coupled": + if "control_maps_transition" not in structure_params: + raise ValueError("transition_coupled structure requires 'control_maps_transition' parameter") + if "emission_variant_indices" not in structure_params: + raise ValueError("transition_coupled structure requires 'emission_variant_indices' parameter") + ( + component_types, + transition_matrices, + normalizing_eigenvectors, + initial_states, + control_maps_arrays, + emission_variant_indices_array, + emission_control_maps_arrays, + ) = build_transition_coupled_from_spec( + spec, + structure_params["control_maps_transition"], + structure_params["emission_variant_indices"], + structure_params.get("emission_control_maps"), + ) + return build_factored_process( + structure_type="transition_coupled", + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + control_maps_transition=control_maps_arrays, + emission_variant_indices=emission_variant_indices_array, + emission_control_maps=emission_control_maps_arrays, + ) + else: + raise ValueError(f"Unknown structure_type '{structure_type}'") + + +def build_matrices_from_spec( + spec: Sequence[dict[str, Any]], +) -> tuple[ + list[ComponentType], + list[jax.Array], + list[jax.Array], + list[jax.Array], +]: + """Build transition matrices, eigenvectors, and initial states from spec. + + This is a generic helper that works for all conditional structures. Each element of spec + should be a dict with: + - component_type: "hmm" | "ghmm" + - variants: list of dicts, each with "process_name" and process-specific kwargs + + Args: + spec: List of factor specifications + + Returns: + Tuple of (component_types, transition_matrices, normalizing_eigenvectors, initial_states) + + Example: + ```python + spec = [ + { + "component_type": "hmm", + "variants": [ + {"process_name": "mess3", "x": 0.15, "a": 0.6}, + {"process_name": "mess3", "x": 0.5, "a": 0.6}, + ] + }, + { + "component_type": "ghmm", + "variants": [ + {"process_name": "tom_quantum", "alpha": 1.0, "beta": 1.0}, + ] + }, + ] + component_types, T_mats, norms, states = build_matrices_from_spec(spec) + ``` + """ + if not spec: + raise ValueError("spec must contain at least one factor") + + component_types: list[ComponentType] = [] + transition_matrices: list[jax.Array] = [] + normalizing_eigenvectors: list[jax.Array] = [] + initial_states: list[jax.Array] = [] + + for idx, factor_spec in enumerate(spec): + ctype: ComponentType = factor_spec.get("component_type", "ghmm") + variants: Sequence[dict[str, Any]] = factor_spec.get("variants", []) + + if not variants: + raise ValueError(f"spec[{idx}].variants must be non-empty") + + # Build all variants for this factor + built = [ + build_hidden_markov_model(**v) if ctype == "hmm" else build_generalized_hidden_markov_model(**v) + for v in variants + ] + + # Validate dimensions + vocab_sizes = [b.vocab_size for b in built] + num_states = [b.num_states if hasattr(b, "num_states") else b.transition_matrices.shape[1] for b in built] + + if len(set(vocab_sizes)) != 1: + raise ValueError(f"All variants in spec[{idx}] must have same vocab size; got {vocab_sizes}") + if len(set(num_states)) != 1: + raise ValueError(f"All variants in spec[{idx}] must have same state dim; got {num_states}") + + S = num_states[0] + + # Stack transition matrices: [K, V, S, S] + T_stack = jnp.stack([b.transition_matrices for b in built], axis=0) + transition_matrices.append(T_stack) + + # Stack normalizing eigenvectors (or create dummy for HMM) + if ctype == "ghmm": + norms = jnp.stack([b.normalizing_eigenvector for b in built], axis=0) # [K, S] + else: # dummy (unused) vector for HMM + norms = jnp.ones((len(built), S)) + normalizing_eigenvectors.append(norms) + + # Initial state: use variant 0's initial state + initial_states.append(built[0].initial_state) + + component_types.append(ctype) + + return component_types, transition_matrices, normalizing_eigenvectors, initial_states + + +def build_chain_from_spec( + chain: Sequence[dict[str, Any]], +) -> tuple[ + list[ComponentType], + list[jax.Array], + list[jax.Array], + list[jax.Array], + list[jax.Array | None], +]: + """Build all parameters for chain structure from chain specification. + + Each element of chain should be a dict with: + - component_type: "hmm" | "ghmm" + - variants: list of variant specs + - control_map (optional for index 0, required for i>0): list[int] mapping + parent token -> variant index + + Args: + chain: List of factor specifications with control maps + + Returns: + Tuple of (component_types, transition_matrices, normalizing_eigenvectors, + initial_states, control_maps) + + Example: + ```python + chain = [ + { + "component_type": "hmm", + "variants": [{"process_name": "mess3", "x": 0.15, "a": 0.6}], + # No control_map for root + }, + { + "component_type": "hmm", + "variants": [ + {"process_name": "mess3", "x": 0.15, "a": 0.6}, + {"process_name": "mess3", "x": 0.5, "a": 0.6}, + ], + "control_map": [0, 1, 0], # Maps 3 parent tokens -> 2 variants + }, + ] + ``` + """ + if not chain: + raise ValueError("chain must contain at least one node") + + # Build base matrices + component_types, transition_matrices, normalizing_eigenvectors, initial_states = build_matrices_from_spec(chain) + + # Extract control maps + control_maps: list[jax.Array | None] = [] + expected_prev_vocab = None + + for idx, node in enumerate(chain): + if idx == 0: + control_maps.append(None) + else: + cm = node.get("control_map", None) + if cm is None: + raise ValueError(f"chain[{idx}].control_map is required for i>0") + + cm_arr = jnp.asarray(cm, dtype=jnp.int32) + + if expected_prev_vocab is not None and int(cm_arr.shape[0]) != int(expected_prev_vocab): + raise ValueError( + f"chain[{idx}].control_map length {cm_arr.shape[0]} must equal parent vocab {expected_prev_vocab}" + ) + + control_maps.append(cm_arr) + + # Track vocab size for next iteration + expected_prev_vocab = int(transition_matrices[idx].shape[1]) + + return component_types, transition_matrices, normalizing_eigenvectors, initial_states, control_maps + + +def build_symmetric_from_spec( + components: Sequence[dict[str, Any]], + control_maps: Sequence[list[int]], +) -> tuple[ + list[ComponentType], + list[jax.Array], + list[jax.Array], + list[jax.Array], + list[jax.Array], +]: + """Build all parameters for symmetric structure from specification. + + Args: + components: List of factor specifications (same format as build_matrices_from_spec) + control_maps: Control maps for each factor. control_maps[i] should have + shape [prod(V_j for j!=i)] mapping other-factor tokens to variant index. + + Returns: + Tuple of (component_types, transition_matrices, normalizing_eigenvectors, + initial_states, control_maps_arrays) + + Example: + ```python + components = [ + { + "component_type": "hmm", + "variants": [ + {"process_name": "mess3", "x": 0.15, "a": 0.6}, + {"process_name": "mess3", "x": 0.5, "a": 0.6}, + ], + }, + # ... more components + ] + control_maps = [ + [0, 1, 0, 1], # Factor 0: 4 other-token combos -> variants + [1, 0, 1, 0], # Factor 1: 4 other-token combos -> variants + ] + ``` + """ + # Build base matrices + component_types, transition_matrices, normalizing_eigenvectors, initial_states = build_matrices_from_spec( + components + ) + + # Convert control maps to JAX arrays + control_maps_arrays = [jnp.asarray(cm, dtype=jnp.int32) for cm in control_maps] + + # Validate control map lengths + vocab_sizes = [int(T.shape[1]) for T in transition_matrices] + F = len(vocab_sizes) + + for i, cm in enumerate(control_maps_arrays): + # Expected length: product of all vocab sizes except i + expected = 1 + for j in range(F): + if j != i: + expected *= vocab_sizes[j] + + if int(cm.shape[0]) != expected: + raise ValueError(f"control_maps[{i}] length {cm.shape[0]} must equal prod(V_j for j!=[{i}]) = {expected}") + + return component_types, transition_matrices, normalizing_eigenvectors, initial_states, control_maps_arrays + + +def build_transition_coupled_from_spec( + components: Sequence[dict[str, Any]], + control_maps_transition: Sequence[list[int]], + emission_variant_indices: Sequence[int], + emission_control_maps: Sequence[list[int] | None] | None = None, +) -> tuple[ + list[ComponentType], + list[jax.Array], + list[jax.Array], + list[jax.Array], + list[jax.Array], + jax.Array, + list[jax.Array | None] | None, +]: + """Build all parameters for transition-coupled structure from specification. + + Args: + components: List of factor specifications + control_maps_transition: Transition control maps (same format as symmetric) + emission_variant_indices: Fixed emission variant per factor + emission_control_maps: Optional chain-style emission control maps + + Returns: + Tuple of (component_types, transition_matrices, normalizing_eigenvectors, + initial_states, control_maps_transition_arrays, + emission_variant_indices_array, emission_control_maps_arrays) + + Example: + ```python + components = [...] + control_maps_transition = [[0, 1, 0, 1], [1, 0, 1, 0]] + emission_variant_indices = [0, 0] # Use variant 0 for emissions + emission_control_maps = None # Independent emissions + ``` + """ + # Build base matrices + component_types, transition_matrices, normalizing_eigenvectors, initial_states = build_matrices_from_spec( + components + ) + + # Convert transition control maps + control_maps_arrays = [jnp.asarray(cm, dtype=jnp.int32) for cm in control_maps_transition] + + # Convert emission variant indices + emission_variant_indices_array = jnp.asarray(emission_variant_indices, dtype=jnp.int32) + + # Convert emission control maps if provided + emission_control_maps_arrays = None + if emission_control_maps is not None: + emission_control_maps_arrays = [ + jnp.asarray(cm, dtype=jnp.int32) if cm is not None else None for cm in emission_control_maps + ] + + return ( + component_types, + transition_matrices, + normalizing_eigenvectors, + initial_states, + control_maps_arrays, + emission_variant_indices_array, + emission_control_maps_arrays, + ) diff --git a/simplexity/generative_processes/factored_generative_process.py b/simplexity/generative_processes/factored_generative_process.py new file mode 100644 index 00000000..2a842b51 --- /dev/null +++ b/simplexity/generative_processes/factored_generative_process.py @@ -0,0 +1,269 @@ +"""Unified factored generative process with pluggable conditional structures.""" + +from __future__ import annotations + +from collections.abc import Sequence +from typing import Literal + +import chex +import equinox as eqx +import jax +import jax.numpy as jnp + +from simplexity.generative_processes.generative_process import GenerativeProcess +from simplexity.generative_processes.structures import ConditionalContext, ConditionalStructure +from simplexity.logger import SIMPLEXITY_LOGGER +from simplexity.utils.factoring_utils import TokenEncoder, transition_with_obs +from simplexity.utils.jnp_utils import resolve_jax_device + +ComponentType = Literal["hmm", "ghmm"] +FactoredState = tuple[jax.Array, ...] + + +def _move_arrays_to_device( + arrays: Sequence[jax.Array], + device: jax.Device, # type: ignore[valid-type] + name: str, +) -> tuple[jax.Array, ...]: + """Move arrays to specified device with warning if needed. + + Args: + arrays: Sequence of arrays to move + device: Target device + name: Name for warning messages (e.g., "Transition matrices") + + Returns: + Tuple of arrays on target device + """ + result = [] + for i, arr in enumerate(arrays): + if arr.device != device: + SIMPLEXITY_LOGGER.warning( + "%s[%d] on device %s but model is on device %s. Moving to model device.", + name, + i, + arr.device, + device, + ) + arr = jax.device_put(arr, device) + result.append(arr) + return tuple(result) + + +class FactoredGenerativeProcess(GenerativeProcess[FactoredState]): + """Unified factored generative process with pluggable conditional structures. + + This class provides a single implementation of factored generative processes + that supports different conditional dependency patterns via the ConditionalStructure protocol. + + Attributes: + component_types: Type of each factor ("hmm" or "ghmm") + transition_matrices: Per-factor transition tensors (shape [K_i, V_i, S_i, S_i]) + normalizing_eigenvectors: Per-factor eigenvectors (shape [K_i, S_i]) + initial_states: Initial state per factor (shape [S_i]) + num_variants: Number of parameter variants per factor + structure: Conditional structure determining factor interactions + encoder: Token encoder for composite observations + """ + + # Static structure + component_types: tuple[ComponentType, ...] + num_variants: tuple[int, ...] + device: jax.Device # type: ignore[valid-type] + + # Per-factor parameters + transition_matrices: tuple[jax.Array, ...] + normalizing_eigenvectors: tuple[jax.Array, ...] + initial_states: tuple[jax.Array, ...] + + # Conditional structure and encoding + structure: ConditionalStructure + encoder: TokenEncoder + + def __init__( + self, + *, + component_types: Sequence[ComponentType], + transition_matrices: Sequence[jax.Array], + normalizing_eigenvectors: Sequence[jax.Array], + initial_states: Sequence[jax.Array], + structure: ConditionalStructure, + device: str | None = None, + ) -> None: + """Initialize factored generative process. + + Args: + component_types: Type of each factor ("hmm" or "ghmm") + transition_matrices: Per-factor transition tensors. + transition_matrices[i] has shape [K_i, V_i, S_i, S_i] + normalizing_eigenvectors: Per-factor eigenvectors for GHMM. + normalizing_eigenvectors[i] has shape [K_i, S_i] + initial_states: Initial state per factor (shape [S_i]) + structure: Conditional structure defining factor interactions + device: Device to place arrays on (e.g., "cpu", "gpu") + """ + if len(component_types) == 0: + raise ValueError("Must provide at least one component") + + self.device = resolve_jax_device(device) + self.component_types = tuple(component_types) + + # Move all arrays to device + self.transition_matrices = _move_arrays_to_device(transition_matrices, self.device, "Transition matrices") + self.normalizing_eigenvectors = _move_arrays_to_device( + normalizing_eigenvectors, self.device, "Normalizing eigenvectors" + ) + self.initial_states = _move_arrays_to_device(initial_states, self.device, "Initial states") + + self.structure = structure + + # Validate shapes and compute derived sizes + vocab_sizes = [] + num_variants = [] + for i, transition_matrix in enumerate(self.transition_matrices): + if transition_matrix.ndim != 4: + raise ValueError( + f"transition_matrices[{i}] must have shape [K, V, S, S], got {transition_matrix.shape}" + ) + num_var, vocab_size, state_dim1, state_dim2 = transition_matrix.shape + if state_dim1 != state_dim2: + raise ValueError(f"transition_matrices[{i}] square mismatch: {state_dim1} vs {state_dim2}") + vocab_sizes.append(vocab_size) + num_variants.append(num_var) + self.num_variants = tuple(int(k) for k in num_variants) + self.encoder = TokenEncoder(jnp.array(vocab_sizes)) + + def _make_context(self, state: FactoredState) -> ConditionalContext: + """Create conditional context for structure methods.""" + return ConditionalContext( + states=state, + component_types=self.component_types, + transition_matrices=self.transition_matrices, + normalizing_eigenvectors=self.normalizing_eigenvectors, + vocab_sizes=self.encoder.vocab_sizes, + num_variants=self.num_variants, + ) + + # ------------------------ GenerativeProcess API ------------------------- + @property + def vocab_size(self) -> int: + """Total vocabulary size of composite observations.""" + return self.encoder.composite_vocab_size + + @property + def initial_state(self) -> FactoredState: + """Initial state across all factors.""" + return tuple(self.initial_states) + + @eqx.filter_jit + def observation_probability_distribution(self, state: FactoredState) -> jax.Array: + """Compute P(composite_token | state) under the conditional structure. + + Args: + state: Tuple of state vectors (one per factor) + + Returns: + Distribution over composite tokens, shape [prod(V_i)] + """ + context = self._make_context(state) + return self.structure.compute_joint_distribution(context) + + @eqx.filter_jit + def log_observation_probability_distribution(self, log_belief_state: FactoredState) -> jax.Array: + """Compute log P(composite_token | state). + + Args: + log_belief_state: Tuple of log-state vectors + + Returns: + Log-distribution over composite tokens, shape [prod(V_i)] + """ + state = tuple(jnp.exp(s) for s in log_belief_state) + probs = self.observation_probability_distribution(state) + return jnp.log(probs) + + @eqx.filter_jit + def emit_observation(self, state: FactoredState, key: jax.Array) -> jax.Array: + """Sample composite observation from current state. + + Args: + state: Tuple of state vectors + key: JAX random key + + Returns: + Composite observation (scalar token) + """ + probs = self.observation_probability_distribution(state) + token_flat = jax.random.categorical(key, jnp.log(probs)) + return token_flat + + @eqx.filter_jit + def transition_states(self, state: FactoredState, obs: chex.Array) -> FactoredState: + """Update states given composite observation. + + Args: + state: Tuple of current state vectors + obs: Composite observation (scalar token) + + Returns: + Tuple of updated state vectors + """ + # Decode composite observation to per-factor tokens + obs_tuple = self.encoder.token_to_tuple(obs) + + # Select variants based on conditional structure + context = self._make_context(state) + variants = self.structure.select_variants(obs_tuple, context) + + # Update each factor's state + new_states: list[jax.Array] = [] + for i, (s_i, t_i, k_i) in enumerate(zip(state, obs_tuple, variants, strict=True)): + transition_matrix_k = self.transition_matrices[i][k_i] + norm_k = self.normalizing_eigenvectors[i][k_i] if self.component_types[i] == "ghmm" else None + new_state_i = transition_with_obs(self.component_types[i], s_i, transition_matrix_k, t_i, norm_k) + new_states.append(new_state_i) + + return tuple(new_states) + + @eqx.filter_jit + def probability(self, observations: jax.Array) -> jax.Array: + """Compute P(observations) by scanning through sequence. + + Args: + observations: Array of composite observations + + Returns: + Scalar probability + """ + + def step(carry: FactoredState, obs: jax.Array): + state = carry + dist = self.observation_probability_distribution(state) + p = dist[obs] + new_state = self.transition_states(state, obs) + return new_state, p + + _, ps = jax.lax.scan(step, self.initial_state, observations) + return jnp.prod(ps) + + @eqx.filter_jit + def log_probability(self, observations: jax.Array) -> jax.Array: + """Compute log P(observations) by scanning through sequence. + + Args: + observations: Array of composite observations + + Returns: + Scalar log-probability + """ + + def step(carry: FactoredState, obs: jax.Array): + state = carry + # Compute distribution directly without converting to log and back + dist = self.observation_probability_distribution(state) + lp = jnp.log(dist[obs]) + new_state = self.transition_states(state, obs) + return new_state, lp + + _, lps = jax.lax.scan(step, self.initial_state, observations) + return jnp.sum(lps) diff --git a/simplexity/generative_processes/generalized_hidden_markov_model.py b/simplexity/generative_processes/generalized_hidden_markov_model.py index 53b70340..c3c60807 100644 --- a/simplexity/generative_processes/generalized_hidden_markov_model.py +++ b/simplexity/generative_processes/generalized_hidden_markov_model.py @@ -87,10 +87,26 @@ def __init__( self.log_normalizing_constant = jax.nn.logsumexp(self.log_initial_state + self.log_normalizing_eigenvector) def validate_transition_matrices(self, transition_matrices: jax.Array): - """Validate the transition matrices.""" + """Validate the transition matrices. + + For GHMM, transition matrices must be non-negative and the net transition + matrix T = sum_x T^(x) should have a dominant eigenvalue close to 1. + """ if transition_matrices.ndim != 3 or transition_matrices.shape[1] != transition_matrices.shape[2]: raise ValueError("Transition matrices must have shape (vocab_size, num_states, num_states)") + # Check that net transition matrix has dominant eigenvalue close to 1 + state_transition_matrix = jnp.asarray(jnp.sum(transition_matrices, axis=0)) + eigenvalues, _ = jnp.linalg.eig(state_transition_matrix) + eigenvalues = jnp.asarray(eigenvalues) + principal_eigenvalue = jnp.max(jnp.abs(eigenvalues)) + + if not jnp.isclose(principal_eigenvalue, 1.0, rtol=1e-5): + SIMPLEXITY_LOGGER.warning( + "Net transition matrix has principal eigenvalue %.6f (expected 1.0). Matrices will be normalized.", + float(principal_eigenvalue), + ) + @property def vocab_size(self) -> int: """The number of distinct observations that can be emitted by the model.""" diff --git a/simplexity/generative_processes/generator.py b/simplexity/generative_processes/generator.py index f9e7fa5d..d8ec2c34 100644 --- a/simplexity/generative_processes/generator.py +++ b/simplexity/generative_processes/generator.py @@ -49,11 +49,10 @@ def generate_data_batch_with_full_history( key: jax.Array, bos_token: int | None = None, eos_token: int | None = None, -) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]: +) -> dict[str, jax.Array | tuple[jax.Array, ...]]: """Generate sequences plus per-token belief states and prefix probabilities.""" batch_keys = jax.random.split(key, batch_size) belief_states, tokens = data_generator.generate(gen_states, batch_keys, sequence_len, True) - next_states = belief_states[:, -1, :] prefix_probs = _compute_prefix_probabilities(data_generator, gen_states, tokens) @@ -74,7 +73,14 @@ def generate_data_batch_with_full_history( labels = tokens[:, 1:] prefix_probs = prefix_probs[:, : inputs.shape[1]] - return next_states, belief_states, prefix_probs, inputs, labels + result = { + "belief_states": belief_states, + "prefix_probabilities": prefix_probs, + "inputs": inputs, + "labels": labels, + } + + return result def _compute_prefix_probabilities( diff --git a/simplexity/generative_processes/structures/__init__.py b/simplexity/generative_processes/structures/__init__.py new file mode 100644 index 00000000..524c8597 --- /dev/null +++ b/simplexity/generative_processes/structures/__init__.py @@ -0,0 +1,37 @@ +"""Conditional structures for factored generative processes. + +This module provides various conditional dependency structures that define +how factors in a factored generative process depend on each other. + +Available structures: +- IndependentStructure: No conditional dependencies between factors +- SequentialConditional: One-way chain dependencies (factor i depends on i-1) +- FullyConditional: Mutual dependencies between all factors +- ConditionalTransitions: Hybrid structure (independent/sequential emissions, mutual transitions) +""" + +from simplexity.generative_processes.structures.conditional_transitions import ( + ConditionalTransitions, +) +from simplexity.generative_processes.structures.fully_conditional import ( + FullyConditional, +) +from simplexity.generative_processes.structures.independent import ( + IndependentStructure, +) +from simplexity.generative_processes.structures.protocol import ( + ConditionalContext, + ConditionalStructure, +) +from simplexity.generative_processes.structures.sequential_conditional import ( + SequentialConditional, +) + +__all__ = [ + "ConditionalContext", + "ConditionalStructure", + "ConditionalTransitions", + "FullyConditional", + "IndependentStructure", + "SequentialConditional", +] diff --git a/simplexity/generative_processes/structures/conditional_transitions.py b/simplexity/generative_processes/structures/conditional_transitions.py new file mode 100644 index 00000000..9e4db05b --- /dev/null +++ b/simplexity/generative_processes/structures/conditional_transitions.py @@ -0,0 +1,232 @@ +"""Conditional transitions structure: independent/sequential emissions with mutually conditional transitions. + +Emissions can be either: +- Independent: each factor uses a fixed emission variant +- Sequential: factor i selects emission variant based on previous tokens 0..i-1 + +Transitions are always mutually conditional: factor i selects transition variant based on +all other factors' tokens. +""" + +from __future__ import annotations + +from collections.abc import Sequence + +import equinox as eqx +import jax +import jax.numpy as jnp + +from simplexity.generative_processes.structures.protocol import ConditionalContext +from simplexity.utils.factoring_utils import compute_obs_dist_for_variant + + +class ConditionalTransitions(eqx.Module): + """Conditional transitions structure with flexible emission modes. + + Emissions can be: + - Independent (use_emission_chain=False): P(t) = ∏_i P_i(t_i | s_i, k_emit_i) + - Sequential (use_emission_chain=True): P(t) = P0(t0) * ∏_{i>0} P_i(t_i | t_0..t_{i-1}, s_i) + + Transitions are always mutually conditional: factor i selects transition variant based on + all other factors' tokens. + + Attributes: + control_maps_transition: Transition control maps. control_maps_transition[i] + has shape [prod(V_j for j!=i)] mapping other tokens to transition variant. + emission_variant_indices: Fixed emission variants per factor (shape [F]) + emission_control_maps: Optional sequential emission control maps + use_emission_chain: Whether to use sequential emissions + other_multipliers: Precomputed radix multipliers for other-factor indexing + prefix_multipliers: Precomputed radix multipliers for prefix indexing + vocab_sizes_py: Python int tuple of vocab sizes + """ + + control_maps_transition: tuple[jax.Array, ...] + emission_variant_indices: jax.Array # shape [F] + emission_control_maps: tuple[jax.Array | None, ...] + use_emission_chain: bool + other_multipliers: tuple[jax.Array, ...] + prefix_multipliers: tuple[jax.Array, ...] + vocab_sizes_py: tuple[int, ...] + + def __init__( + self, + control_maps_transition: tuple[jax.Array, ...], + emission_variant_indices: jax.Array | Sequence[int], + vocab_sizes: jax.Array, + emission_control_maps: tuple[jax.Array | None, ...] | None = None, + ): + """Initialize conditional transitions structure. + + Args: + control_maps_transition: Transition control maps for each factor. + control_maps_transition[i] should have shape [prod(V_j for j!=i)]. + emission_variant_indices: Fixed emission variant per factor (shape [F]) + vocab_sizes: Vocabulary sizes per factor (shape [F]) + emission_control_maps: Optional sequential emission control maps. + If provided, emission_control_maps[i] should have shape + [prod(V_j for j0. + """ + self.control_maps_transition = tuple(jnp.asarray(cm, dtype=jnp.int32) for cm in control_maps_transition) + self.emission_variant_indices = jnp.asarray(emission_variant_indices, dtype=jnp.int32) + self.vocab_sizes_py = tuple(int(v) for v in vocab_sizes) + num_factors = len(vocab_sizes) + + # Process emission control maps + use_chain = False + ecm_list: list[jax.Array | None] = [] + if emission_control_maps is not None: + for i, cm_i in enumerate(emission_control_maps): + if cm_i is None: + ecm_list.append(None) + else: + ecm_list.append(jnp.asarray(cm_i, dtype=jnp.int32)) + if i > 0: + use_chain = True + else: + ecm_list = [None] * num_factors + self.emission_control_maps = tuple(ecm_list) + self.use_emission_chain = bool(use_chain) + + # Precompute multipliers for other-factor indexing (for transitions) + other_multipliers: list[jax.Array] = [] + for i in range(num_factors): + mult = [] + for j in range(num_factors): + if j == i: + mult.append(0) # Unused + else: + m = 1 + for k in range(j + 1, num_factors): + if k == i: + continue + m *= self.vocab_sizes_py[k] + mult.append(m) + other_multipliers.append(jnp.array(mult)) + self.other_multipliers = tuple(other_multipliers) + + # Precompute multipliers for prefix indexing (for sequential emissions) + prefix_multipliers: list[jax.Array] = [] + for i in range(num_factors): + pmult = [] + for j in range(num_factors): + if j >= i: + pmult.append(0) # Unused + else: + m = 1 + for k in range(j + 1, i): + m *= self.vocab_sizes_py[k] + pmult.append(m) + prefix_multipliers.append(jnp.array(pmult)) + self.prefix_multipliers = tuple(prefix_multipliers) + + def _flatten_other_tokens_index(self, tokens: jax.Array, i: int) -> jax.Array: + """Flatten other-factor tokens to transition control map index.""" + mult = self.other_multipliers[i] + return jnp.sum(tokens * mult) + + def _flatten_prev_tokens_index(self, tokens: jax.Array, i: int) -> jax.Array: + """Flatten prefix tokens to emission control map index.""" + mult = self.prefix_multipliers[i] + return jnp.sum(tokens * mult) + + def compute_joint_distribution(self, context: ConditionalContext) -> jax.Array: + """Compute joint distribution based on emission mode. + + Args: + context: Conditional context with states and parameters + + Returns: + Flattened joint distribution of shape [prod(V_i)] + """ + num_factors = len(context.vocab_sizes) + states = context.states + component_types = context.component_types + transition_matrices = context.transition_matrices + normalizing_eigenvectors = context.normalizing_eigenvectors + num_variants = context.num_variants + + if not self.use_emission_chain: + # Independent emissions + parts = [] + for i in range(num_factors): + k_emit = self.emission_variant_indices[i] + transition_matrix_k = transition_matrices[i][k_emit] + norm_k = normalizing_eigenvectors[i][k_emit] if component_types[i] == "ghmm" else None + p_i = compute_obs_dist_for_variant(component_types[i], states[i], transition_matrix_k, norm_k) + parts.append(p_i) + + # Product of independent factors + j_prod = parts[0] + for i in range(1, num_factors): + j_prod = (j_prod[..., None] * parts[i]).reshape(*j_prod.shape, parts[i].shape[0]) + return j_prod.reshape(-1) + + # Sequential emissions + k0 = self.emission_variant_indices[0] + transition_matrix0 = transition_matrices[0][k0] + norm0 = normalizing_eigenvectors[0][k0] if component_types[0] == "ghmm" else None + joint = compute_obs_dist_for_variant(component_types[0], states[0], transition_matrix0, norm0) + prev_prod = self.vocab_sizes_py[0] + + for i in range(1, num_factors): + variant_k = num_variants[i] + ks = jnp.arange(variant_k, dtype=jnp.int32) + + # Compute all variant distributions + def get_dist_i(k: jax.Array, i: int = i) -> jax.Array: + transition_matrix_k = transition_matrices[i][k] + norm_k = normalizing_eigenvectors[i][k] if component_types[i] == "ghmm" else None + return compute_obs_dist_for_variant(component_types[i], states[i], transition_matrix_k, norm_k) + + all_pi = jax.vmap(get_dist_i)(ks) # [K_i, V_i] + + cm = self.emission_control_maps[i] + if cm is None: + # Use fixed emission variant + fixed = self.emission_variant_indices[i] + cond = jnp.tile(all_pi[fixed][None, :], (prev_prod, 1)) # [prev_prod, V_i] + else: + # Use control map + cond = all_pi[cm] # [prev_prod, V_i] + + left = joint.reshape(prev_prod) + extended = cond * left[:, None] + curr_vocab_size = self.vocab_sizes_py[i] + joint = extended.reshape(*(list(joint.shape) + [curr_vocab_size])) + prev_prod *= curr_vocab_size + + return joint.reshape(-1) + + def select_variants( + self, + obs_tuple: tuple[jax.Array, ...], + context: ConditionalContext, # pylint: disable=unused-argument + ) -> tuple[jax.Array, ...]: + """Select transition variants based on other factors' tokens. + + Note: This returns TRANSITION variants, not emission variants. + + Args: + obs_tuple: Tuple of observed tokens (one per factor) + context: Conditional context (unused) + + Returns: + Tuple of transition variant indices (one per factor) + """ + tokens_arr = jnp.array(obs_tuple) + variants = [] + for i in range(len(obs_tuple)): + idx = self._flatten_other_tokens_index(tokens_arr, i) + k_trans = self.control_maps_transition[i][idx] + variants.append(k_trans) + return tuple(variants) + + def get_required_params(self) -> dict[str, type]: + """Return required parameters for conditional transitions structure.""" + return { + "control_maps_transition": tuple, + "emission_variant_indices": jax.Array, + "vocab_sizes": jax.Array, + "emission_control_maps": tuple, # optional + } diff --git a/simplexity/generative_processes/structures/fully_conditional.py b/simplexity/generative_processes/structures/fully_conditional.py new file mode 100644 index 00000000..d8ea5d14 --- /dev/null +++ b/simplexity/generative_processes/structures/fully_conditional.py @@ -0,0 +1,196 @@ +"""Fully conditional structure: mutual dependencies between all factors. + +Each factor's parameter variant is selected based on the tokens of +ALL OTHER factors via a control map, producing mutual dependencies. +""" + +from __future__ import annotations + +import equinox as eqx +import jax +import jax.numpy as jnp + +from simplexity.generative_processes.structures.protocol import ConditionalContext +from simplexity.utils.factoring_utils import compute_obs_dist_for_variant + + +class FullyConditional(eqx.Module): + """Fully conditional structure with mutual dependencies. + + Each factor i selects its variant based on all other factors' tokens. + Joint distribution uses product-of-experts with normalization. + + Attributes: + control_maps: Tuple of F arrays. control_maps[i] has shape [prod(V_j for j!=i)] + mapping flattened other-tokens to variant index for factor i. + other_multipliers: Precomputed radix multipliers for flattening other tokens + other_shapes: Reshape targets for conditioning on other factors + perms_py: Axis permutations to align conditional distributions + vocab_sizes_py: Python int tuple of vocab sizes for shape operations + joint_vocab_size: Total vocabulary size (product of all V_i) + """ + + control_maps: tuple[jax.Array, ...] + other_multipliers: tuple[jax.Array, ...] + other_shapes: tuple[tuple[int, ...], ...] + perms_py: tuple[tuple[int, ...], ...] + vocab_sizes_py: tuple[int, ...] + joint_vocab_size: int + + def __init__( + self, + control_maps: tuple[jax.Array, ...], + vocab_sizes: jax.Array, + ): + """Initialize fully conditional structure. + + Args: + control_maps: Control maps for each factor. control_maps[i] should + have shape [prod(V_j for j!=i)] mapping other-factor tokens + to variant index for factor i. + vocab_sizes: Array of shape [F] with vocab sizes per factor + """ + self.control_maps = tuple(jnp.asarray(cm, dtype=jnp.int32) for cm in control_maps) + self.vocab_sizes_py = tuple(int(v) for v in vocab_sizes) + num_factors = len(vocab_sizes) + + # Compute joint vocab size + jv = 1 + for v in self.vocab_sizes_py: + jv *= v + self.joint_vocab_size = jv + + # Precompute indexing helpers for each factor + other_multipliers: list[jax.Array] = [] + other_shapes: list[tuple[int, ...]] = [] + perms_py: list[tuple[int, ...]] = [] + + for i in range(num_factors): + # Compute radix multipliers for "other" factors (excluding i) + mult = [] + for j in range(num_factors): + if j == i: + mult.append(0) # Unused + else: + m = 1 + for k in range(j + 1, num_factors): + if k == i: + continue + m *= self.vocab_sizes_py[k] + mult.append(m) + other_multipliers.append(jnp.array(mult)) + + # Shape for reshaping conditional [prod_others, V_i] -> [*others, V_i] + other_shapes.append(tuple(self.vocab_sizes_py[j] for j in range(num_factors) if j != i)) + + # Permutation to align [*others, V_i] to [V_0, ..., V_{F-1}] + others = [j for j in range(num_factors) if j != i] + axis_pos = {j: pos for pos, j in enumerate(others)} + perm = [] + for j in range(num_factors): + if j == i: + perm.append(len(others)) # V_i is the last axis + else: + perm.append(axis_pos[j]) + perms_py.append(tuple(perm)) + + self.other_multipliers = tuple(other_multipliers) + self.other_shapes = tuple(other_shapes) + self.perms_py = tuple(perms_py) + + def _flatten_other_tokens_index(self, tokens: jax.Array, i: int) -> jax.Array: + """Flatten other-factor tokens to control map index. + + Args: + tokens: Array of shape [F] with all tokens + i: Factor index to exclude + + Returns: + Scalar index for control_maps[i] + """ + mult = self.other_multipliers[i] + # Multiply elementwise and sum (mult[i] == 0) + return jnp.sum(tokens * mult) + + def compute_joint_distribution(self, context: ConditionalContext) -> jax.Array: + """Compute joint distribution using product-of-experts. + + For each factor i, computes conditional P(t_i | all other t_j), + then multiplies all conditionals and normalizes. + + Args: + context: Conditional context with states and parameters + + Returns: + Flattened joint distribution of shape [prod(V_i)] + """ + num_factors = len(context.vocab_sizes) + states = context.states + component_types = context.component_types + transition_matrices = context.transition_matrices + normalizing_eigenvectors = context.normalizing_eigenvectors + num_variants = context.num_variants + + # Compute per-factor conditionals + parts = [] + for i in range(num_factors): + variant_k = num_variants[i] + ks = jnp.arange(variant_k, dtype=jnp.int32) + + # Compute all variant distributions for factor i + def get_dist_i(k: jax.Array, i: int = i) -> jax.Array: + transition_matrix_k = transition_matrices[i][k] + norm_k = normalizing_eigenvectors[i][k] if component_types[i] == "ghmm" else None + return compute_obs_dist_for_variant(component_types[i], states[i], transition_matrix_k, norm_k) + + all_pi = jax.vmap(get_dist_i)(ks) # [K_i, V_i] + + # Select per other-tokens using control map + cm = self.control_maps[i] # [prod_others] + cond = all_pi[cm] # [prod_others, V_i] + + # Reshape to [*others, V_i] + cond_nd = cond.reshape(self.other_shapes[i] + (self.vocab_sizes_py[i],)) + + # Permute to [V_0, ..., V_{F-1}] with V_i at position i + aligned = jnp.transpose(cond_nd, self.perms_py[i]) + parts.append(aligned) + + # Product of experts + prod_j = parts[0] + for p in parts[1:]: + prod_j = prod_j * p + + # Normalize + sum_j = jnp.sum(prod_j) + norm_j = jnp.where(sum_j > 0, prod_j / sum_j, jnp.ones_like(prod_j) / self.joint_vocab_size) + + assert isinstance(norm_j, jax.Array) + + return norm_j.reshape(-1) + + def select_variants( + self, + obs_tuple: tuple[jax.Array, ...], + context: ConditionalContext, # pylint: disable=unused-argument + ) -> tuple[jax.Array, ...]: + """Select variants based on all other factors' tokens. + + Args: + obs_tuple: Tuple of observed tokens (one per factor) + context: Conditional context (unused for fully conditional structure) + + Returns: + Tuple of variant indices (one per factor) + """ + tokens_arr = jnp.array(obs_tuple) + variants = [] + for i in range(len(obs_tuple)): + idx = self._flatten_other_tokens_index(tokens_arr, i) + k_i = self.control_maps[i][idx] + variants.append(k_i) + return tuple(variants) + + def get_required_params(self) -> dict[str, type]: + """Return required parameters for fully conditional structure.""" + return {"control_maps": tuple, "vocab_sizes": jax.Array} diff --git a/simplexity/generative_processes/structures/independent.py b/simplexity/generative_processes/structures/independent.py new file mode 100644 index 00000000..a7edbbdb --- /dev/null +++ b/simplexity/generative_processes/structures/independent.py @@ -0,0 +1,74 @@ +"""Independent structure: no conditional dependencies between factors. + +Each factor operates independently, always using variant 0. +Joint distribution is the product of independent factor distributions. +""" + +from __future__ import annotations + +import equinox as eqx +import jax +import jax.numpy as jnp + +from simplexity.generative_processes.structures.protocol import ConditionalContext +from simplexity.utils.factoring_utils import compute_obs_dist_for_variant + + +class IndependentStructure(eqx.Module): + """Independent structure with no conditional dependencies. + + Each factor operates independently: + - All factors always use variant 0 + - No control maps needed + - Joint distribution: P(t0, t1, ..., tF) = P(t0) * P(t1) * ... * P(tF) + + This is the simplest factored structure. + """ + + def compute_joint_distribution(self, context: ConditionalContext) -> jax.Array: + """Compute joint distribution as product of independent factors. + + Args: + context: Conditional context with states and parameters + + Returns: + Flattened joint distribution of shape [prod(V_i)] + """ + num_factors = len(context.vocab_sizes) + states = context.states + component_types = context.component_types + transition_matrices = context.transition_matrices + normalizing_eigenvectors = context.normalizing_eigenvectors + + parts = [] + for i in range(num_factors): + T_i = transition_matrices[i][0] # pylint: disable=invalid-name # T_i is standard notation + norm_i = normalizing_eigenvectors[i][0] if component_types[i] == "ghmm" else None + p_i = compute_obs_dist_for_variant(component_types[i], states[i], T_i, norm_i) + parts.append(p_i) + + joint = parts[0] + for i in range(1, num_factors): + joint = (joint[..., None] * parts[i]).reshape(*joint.shape, parts[i].shape[0]) + + return joint.reshape(-1) + + def select_variants( + self, + obs_tuple: tuple[jax.Array, ...], + context: ConditionalContext, # pylint: disable=unused-argument # Required by protocol + ) -> tuple[jax.Array, ...]: + """Select variants (always 0 for all factors). + + Args: + obs_tuple: Tuple of observed tokens (unused) + context: Conditional context (unused) + + Returns: + Tuple of variant indices (all zeros) + """ + return tuple(jnp.array(0, dtype=jnp.int32) for _ in obs_tuple) + + def get_required_params(self) -> dict[str, type]: + """Return required parameters (none for independent structure).""" + return {} diff --git a/simplexity/generative_processes/structures/protocol.py b/simplexity/generative_processes/structures/protocol.py new file mode 100644 index 00000000..9f82f958 --- /dev/null +++ b/simplexity/generative_processes/structures/protocol.py @@ -0,0 +1,59 @@ +"""Conditional structure protocol for factored generative processes. + +Defines the interface for different conditional dependency structures between factors. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Literal, Protocol + +import jax + +ComponentType = Literal["hmm", "ghmm"] +FactoredState = tuple[jax.Array, ...] + + +@dataclass +class ConditionalContext: + """Context information needed by conditional structures to compute joint distributions. + + Attributes: + states: Tuple of state vectors, one per factor (shape [S_i]) + component_types: Type of each factor ("hmm" or "ghmm") + transition_matrices: Per-factor transition tensors (shape [K_i, V_i, S_i, S_i]) + normalizing_eigenvectors: Per-factor eigenvectors (shape [K_i, S_i]) + vocab_sizes: Vocabulary size per factor (shape [F]) + num_variants: Number of parameter variants per factor + """ + + states: FactoredState + component_types: tuple[ComponentType, ...] + transition_matrices: tuple[jax.Array, ...] + normalizing_eigenvectors: tuple[jax.Array, ...] + vocab_sizes: jax.Array + num_variants: tuple[int, ...] + + +class ConditionalStructure(Protocol): + """Protocol for conditional dependency structures between factors. + + A conditional structure defines how factors conditionally depend on each other + to produce joint observation distributions and how variant selection works. + """ + + def compute_joint_distribution(self, context: ConditionalContext) -> jax.Array: + """Compute the joint distribution of the conditional structure.""" + ... # pylint: disable=unnecessary-ellipsis + + def select_variants( + self, + obs_tuple: tuple[jax.Array, ...], + context: ConditionalContext, + ) -> tuple[jax.Array, ...]: + """Select the variants of the conditional structure.""" + ... # pylint: disable=unnecessary-ellipsis + + def get_required_params(self) -> dict[str, type]: + """Get the required parameters for the conditional structure.""" + ... # pylint: disable=unnecessary-ellipsis diff --git a/simplexity/generative_processes/structures/sequential_conditional.py b/simplexity/generative_processes/structures/sequential_conditional.py new file mode 100644 index 00000000..87c254ad --- /dev/null +++ b/simplexity/generative_processes/structures/sequential_conditional.py @@ -0,0 +1,134 @@ +"""Sequential conditional structure: one-way conditional dependencies between factors. + +Factor i>0 selects its parameter variant based on the emitted token +of factor i-1 (parent) via a control map. +""" + +from __future__ import annotations + +import equinox as eqx +import jax +import jax.numpy as jnp + +from simplexity.generative_processes.structures.protocol import ConditionalContext +from simplexity.utils.factoring_utils import compute_obs_dist_for_variant + + +class SequentialConditional(eqx.Module): + """Sequential conditional structure (autoregressive chain). + + Factors form a chain: Factor i depends on Factor i-1's emitted token. + - Factor 0 always uses variant 0 + - Factor i>0 uses control_maps[i][parent_token] to select variant + + Joint distribution: P(t0, t1, ..., tF) = P(t0) * P(t1|t0) * ... * P(tF|t_{F-1}) + + Attributes: + control_maps: Tuple of F arrays. control_maps[i] has shape [V_{i-1}] for i>0, + mapping parent token to variant index. control_maps[0] is None. + vocab_sizes_py: Python int tuple of vocab sizes (for reshape operations) + """ + + control_maps: tuple[jax.Array | None, ...] + vocab_sizes_py: tuple[int, ...] + + def __init__( + self, + control_maps: tuple[jax.Array | None, ...], + vocab_sizes: jax.Array, + ): + """Initialize sequential conditional structure. + + Args: + control_maps: Control maps for variant selection. control_maps[0] + should be None (root factor). control_maps[i] for i>0 should + have shape [V_{i-1}] mapping parent token to variant index. + vocab_sizes: Vocab sizes for shape operations. Must be array of shape [F]. + """ + self.control_maps = tuple(control_maps) + self.vocab_sizes_py = tuple(int(v) for v in vocab_sizes) + + def compute_joint_distribution(self, context: ConditionalContext) -> jax.Array: + """Compute joint distribution using sequential factorization. + + Builds P(t0, t1, ..., tF) = P(t0) * P(t1|t0) * ... * P(tF|t_{F-1}) + iteratively, then flattens to radix encoding. + + Args: + context: Conditional context with states and parameters + + Returns: + Flattened joint distribution of shape [prod(V_i)] + """ + num_factors = len(context.vocab_sizes) + states = context.states + component_types = context.component_types + transition_matrices = context.transition_matrices + normalizing_eigenvectors = context.normalizing_eigenvectors + num_variants = context.num_variants + + # Root distribution (factor 0, variant 0) + transition_matrix_root = transition_matrices[0][0] # [V_0, S_0, S_0] + norm_root = normalizing_eigenvectors[0][0] if component_types[0] == "ghmm" else None + p_root = compute_obs_dist_for_variant(component_types[0], states[0], transition_matrix_root, norm_root) # [V_0] + joint = p_root + + # Iteratively extend with conditional factors + for i in range(1, num_factors): + # Compute distributions for all variants of factor i + num_var_i = num_variants[i] + ks = jnp.arange(num_var_i, dtype=jnp.int32) + + # Vectorize over variants + def get_dist_i(k: jax.Array, i: int = i) -> jax.Array: + transition_matrix_k = transition_matrices[i][k] + norm_k = normalizing_eigenvectors[i][k] if component_types[i] == "ghmm" else None + return compute_obs_dist_for_variant(component_types[i], states[i], transition_matrix_k, norm_k) + + all_pi = jax.vmap(get_dist_i)(ks) # [K_i, V_i] + + # Build conditional matrix [V_{i-1}, V_i] via control map + cm = self.control_maps[i] # [V_{i-1}] + cond = all_pi[cm] # [V_{i-1}, V_i] + + # Extend joint distribution + # Current joint has shape [..., V_{i-1}] + # We want to expand to [..., V_{i-1}, V_i] + # Use precomputed Python ints for reshape (JIT-compatible) + prev_vocab_size = self.vocab_sizes_py[i - 1] + curr_vocab_size = self.vocab_sizes_py[i] + left = joint.reshape(-1, prev_vocab_size) # [P, V_{i-1}] + extended = left[..., None] * cond[None, ...] # [P, V_{i-1}, V_i] + joint = extended.reshape(joint.shape + (curr_vocab_size,)) + + return joint.reshape(-1) + + def select_variants( + self, + obs_tuple: tuple[jax.Array, ...], + context: ConditionalContext, # pylint: disable=unused-argument + ) -> tuple[jax.Array, ...]: + """Select variants based on parent tokens in chain. + + Args: + obs_tuple: Tuple of observed tokens (one per factor) + context: Conditional context (unused for sequential conditional) + + Returns: + Tuple of variant indices (one per factor) + """ + variants = [] + for i in range(len(obs_tuple)): + if i == 0: + # Root factor always uses variant 0 + variants.append(jnp.array(0, dtype=jnp.int32)) + else: + # Select based on parent's observed token + parent_token = obs_tuple[i - 1] + k_i = self.control_maps[i][parent_token] # type: ignore + variants.append(k_i) + return tuple(variants) + + def get_required_params(self) -> dict[str, type]: + """Return required parameters for sequential conditional structure.""" + return {"control_maps": tuple} diff --git a/simplexity/generative_processes/torch_generator.py b/simplexity/generative_processes/torch_generator.py index b153911d..53a04980 100644 --- a/simplexity/generative_processes/torch_generator.py +++ b/simplexity/generative_processes/torch_generator.py @@ -68,7 +68,7 @@ def generate_data_batch_with_full_history( bos_token: int | None = None, eos_token: int | None = None, device: str | torch.device | None = None, -) -> tuple[jax.Array, jax.Array, jax.Array, torch.Tensor, torch.Tensor]: +) -> dict[str, jax.Array | torch.Tensor | tuple[jax.Array, ...]]: """Generate data plus full belief/prefix histories. Args: @@ -82,9 +82,13 @@ def generate_data_batch_with_full_history( device: Optional target device for PyTorch tensors Returns: - Tuple of (next states, belief states, prefix probs, inputs, labels) + Dict with keys: + - belief_states: Belief states (jax.Array or tuple[jax.Array, ...]) + - prefix_probabilities: Prefix probabilities (jax.Array) + - inputs: Input tokens (torch.Tensor) + - labels: Label tokens (torch.Tensor) """ - next_states, belief_states, prefix_probs, inputs, labels = generate_jax_data_batch_with_full_history( + result = generate_jax_data_batch_with_full_history( gen_states, data_generator, batch_size, @@ -93,4 +97,15 @@ def generate_data_batch_with_full_history( bos_token, eos_token, ) - return next_states, belief_states, prefix_probs, jax_to_torch(inputs, device), jax_to_torch(labels, device) + # Extract inputs and labels (these are always jax.Arrays) + inputs = result["inputs"] + labels = result["labels"] + assert isinstance(inputs, jax.Array) + assert isinstance(labels, jax.Array) + + return { + "belief_states": result["belief_states"], + "prefix_probabilities": result["prefix_probabilities"], + "inputs": jax_to_torch(inputs, device), + "labels": jax_to_torch(labels, device), + } diff --git a/simplexity/utils/analysis_utils.py b/simplexity/utils/analysis_utils.py index 2bd76c47..df78c860 100644 --- a/simplexity/utils/analysis_utils.py +++ b/simplexity/utils/analysis_utils.py @@ -38,6 +38,24 @@ def dedup_tensor_first( return jnp.stack(values, axis=0), prefixes +def dedup_tuple_of_tensors_first( + tensors: tuple[jax.Array, ...], + prefix_to_indices: dict[tuple[int, ...], list[tuple[int, int]]], +) -> tuple[tuple[jax.Array, ...], list[tuple[int, ...]]]: + """Deduplicate a tuple of (batch, seq_len, ...) tensors by prefixes, taking the first occurrence in each tuple.""" + combined_values = [] + prefixes = prefix_to_indices.keys() + + for tensor in tensors: + values = [] + for idxs in prefix_to_indices.values(): + seq_idx, pos = idxs[0] + values.append(tensor[seq_idx, pos]) + combined_values.append(jnp.stack(values, axis=0)) + + return tuple(combined_values), list(prefixes) + + def dedup_probs_sum( probs: jax.Array, prefix_to_indices: dict[tuple[int, ...], list[tuple[int, int]]], @@ -127,19 +145,37 @@ def dedup_last_token_probs_sum( return dedup_probs, sequences +def dedup_last_token_tuple_of_tensors_first( + tensors: tuple[jax.Array, ...], + sequence_to_indices: dict[tuple[int, ...], list[int]], +) -> tuple[tuple[jax.Array, ...], list[tuple[int, ...]]]: + """Deduplicate a tuple of (batch, ...) tensors by full sequences, taking the first occurrence in each tuple.""" + combined_values = [] + sequences = list(sequence_to_indices.keys()) + + for tensor in tensors: + values = [] + for idxs in sequence_to_indices.values(): + seq_idx = idxs[0] + values.append(tensor[seq_idx]) + combined_values.append(jnp.stack(values, axis=0)) + + return tuple(combined_values), sequences + + @dataclass class DeduplicatedDataset: """A clean container for last-token-only data.""" sequences: list[tuple[int, ...]] - beliefs: jax.Array + beliefs: jax.Array | tuple[jax.Array, ...] probs: jax.Array activations_by_layer: dict[str, jax.Array] def build_deduplicated_dataset( inputs: jax.Array, - beliefs: jax.Array, + beliefs: jax.Array | tuple[jax.Array, ...], probs: jax.Array, activations_by_layer: dict[str, jax.Array], select_last_token: bool = False, @@ -163,14 +199,18 @@ def build_deduplicated_dataset( def build_prefix_dataset( inputs: jax.Array, - beliefs: jax.Array, + beliefs: jax.Array | tuple[jax.Array, ...], probs: jax.Array, activations_by_layer: dict[str, jax.Array], ) -> DeduplicatedDataset: """Deduplicate everything by prefix.""" prefix_to_indices = make_prefix_groups(inputs) - dedup_beliefs, prefixes = dedup_tensor_first(beliefs, prefix_to_indices) + dedup_beliefs, prefixes = ( + dedup_tensor_first(beliefs, prefix_to_indices) + if isinstance(beliefs, jax.Array) + else dedup_tuple_of_tensors_first(beliefs, prefix_to_indices) + ) dedup_probs, prefixes2 = dedup_probs_sum(probs, prefix_to_indices) if prefixes != prefixes2: @@ -193,18 +233,25 @@ def build_prefix_dataset( def build_last_token_dataset( inputs: jax.Array, - beliefs: jax.Array, + beliefs: jax.Array | tuple[jax.Array, ...], probs: jax.Array, activations_by_layer: dict[str, jax.Array], ) -> DeduplicatedDataset: """Deduplicate everything by full sequence.""" - beliefs = beliefs[:, -1, :] + if isinstance(beliefs, tuple): + beliefs = tuple(b[:, -1, :] for b in beliefs) + else: + beliefs = beliefs[:, -1, :] probs = probs[:, -1] activations_by_layer = {name: acts[:, -1, :] for name, acts in activations_by_layer.items()} sequence_to_indices = make_sequence_groups(inputs) # Dedup beliefs & probs - dedup_beliefs, sequences = dedup_last_token_tensor_first(beliefs, sequence_to_indices) + dedup_beliefs, sequences = ( + dedup_last_token_tensor_first(beliefs, sequence_to_indices) + if isinstance(beliefs, jax.Array) + else dedup_last_token_tuple_of_tensors_first(beliefs, sequence_to_indices) + ) dedup_probs, sequences2 = dedup_last_token_probs_sum(probs, sequence_to_indices) if sequences != sequences2: diff --git a/simplexity/utils/factoring_utils.py b/simplexity/utils/factoring_utils.py new file mode 100644 index 00000000..6da30f8a --- /dev/null +++ b/simplexity/utils/factoring_utils.py @@ -0,0 +1,167 @@ +"""Core computational kernels for HMM/GHMM factor operations. + +These functions implement the observation and transition dynamics +for individual factors, supporting both HMM and GHMM variants. +""" + +from __future__ import annotations + +from typing import Literal + +import chex +import equinox as eqx +import jax +import jax.numpy as jnp + +ComponentType = Literal["hmm", "ghmm"] + + +def compute_obs_dist_for_variant( + component_type: ComponentType, + state: jax.Array, + transition_matrix: jax.Array, + normalizing_eigenvector: jax.Array | None = None, +) -> jax.Array: + """Compute observation distribution for a single factor variant. + + Args: + component_type: "hmm" or "ghmm" + state: State vector of shape [S] + transition_matrix: Transition tensor of shape [V, S, S] + normalizing_eigenvector: For GHMM only, shape [S]. Ignored for HMM. + + Returns: + Distribution over observations, shape [V] + """ + if component_type == "hmm": + # HMM: normalize by sum + obs_state = state @ transition_matrix # [V, S] + return jnp.sum(obs_state, axis=1) # [V] + else: # ghmm + # GHMM: normalize by eigenvector + if normalizing_eigenvector is None: + raise ValueError("GHMM requires normalizing_eigenvector") + numer = state @ transition_matrix @ normalizing_eigenvector # [V] + denom = jnp.sum(state * normalizing_eigenvector) # scalar + return numer / denom + + +def transition_with_obs( + component_type: ComponentType, + state: jax.Array, + transition_matrix: jax.Array, + obs: jax.Array, + normalizing_eigenvector: jax.Array | None = None, +) -> jax.Array: + """Update state after observing a token. + + Args: + component_type: "hmm" or "ghmm" + state: Current state vector of shape [S] + transition_matrix: Transition tensor of shape [V, S, S] + obs: Observed token (scalar int) + normalizing_eigenvector: For GHMM only, shape [S]. Ignored for HMM. + + Returns: + New normalized state vector of shape [S] + """ + new_state = state @ transition_matrix[obs] # [S] + + if component_type == "hmm": + # HMM: normalize by sum + return new_state / jnp.sum(new_state) + else: # ghmm + # GHMM: normalize by eigenvector + if normalizing_eigenvector is None: + raise ValueError("GHMM requires normalizing_eigenvector") + return new_state / (new_state @ normalizing_eigenvector) + + +class TokenEncoder(eqx.Module): + """Encodes/decodes composite observations from per-factor tokens. + + Uses radix encoding: given vocab sizes [V_0, V_1, ..., V_{F-1}], + a tuple (t_0, t_1, ..., t_{F-1}) maps to: + composite = t_0 * (V_1 * V_2 * ... * V_{F-1}) + t_1 * (V_2 * ... * V_{F-1}) + ... + t_{F-1} + + Attributes: + vocab_sizes: Array of shape [F] with vocabulary size per factor + radix_multipliers: Array of shape [F] with multipliers for encoding + """ + + vocab_sizes: jax.Array # shape [F] + radix_multipliers: jax.Array # shape [F] + + def __init__(self, vocab_sizes: jax.Array): + """Initialize encoder with vocab sizes. + + Args: + vocab_sizes: Array of shape [F] with vocabulary size per factor + """ + self.vocab_sizes = jnp.asarray(vocab_sizes) + + # Compute radix multipliers + f = len(vocab_sizes) + multipliers = [] + for i in range(f): + m = 1 + for j in range(i + 1, f): + m *= int(vocab_sizes[j]) + multipliers.append(m) + self.radix_multipliers = jnp.array(multipliers) + + @property + def num_factors(self) -> int: + """Number of factors.""" + return int(self.vocab_sizes.shape[0]) + + @property + def composite_vocab_size(self) -> int: + """Total vocabulary size of composite observation.""" + return int(jnp.prod(self.vocab_sizes)) + + def tuple_to_token(self, token_tuple: tuple[jax.Array, ...]) -> jax.Array: + """Convert per-factor tokens to composite token. + + Args: + token_tuple: Tuple of f scalar arrays, each in [0, V_i) + + Returns: + Scalar array with composite token in [0, prod(V_i)) + """ + token = jnp.array(0) + multiplier = jnp.array(1) + for i in reversed(range(len(token_tuple))): + token += token_tuple[i] * multiplier + multiplier *= self.vocab_sizes[i] + return token + + def token_to_tuple(self, token: chex.Array) -> tuple[jax.Array, ...]: + """Convert composite token to per-factor tokens. + + Args: + token: Scalar array with composite token + + Returns: + Tuple of f scalar arrays with per-factor tokens + """ + result = [] + remaining = jnp.array(token) + for i in reversed(range(self.num_factors)): + v = self.vocab_sizes[i] + t_i = remaining % v + result.append(t_i) + remaining = remaining // v + return tuple(reversed(result)) + + def extract_factors_vectorized(self, tokens: jax.Array) -> jax.Array: + """Extract per-factor tokens from batch of composite tokens. + + Args: + tokens: Array of shape [n] with composite tokens + + Returns: + Array of shape [n, f] with per-factor tokens + """ + tokens = jnp.atleast_1d(tokens) + return (tokens[:, None] // self.radix_multipliers[None, :]) % self.vocab_sizes[None, :] diff --git a/tests/activations/test_activation_analysis.py b/tests/activations/test_activation_analysis.py index b7dee2aa..3d67e13c 100644 --- a/tests/activations/test_activation_analysis.py +++ b/tests/activations/test_activation_analysis.py @@ -9,6 +9,7 @@ # (code quality, style, undefined names, etc.) to run normally while bypassing # the problematic imports checker that would crash during AST traversal. +import jax import jax.numpy as jnp import numpy as np import pytest @@ -85,6 +86,7 @@ def test_all_tokens_individual(self, synthetic_data): assert "layer_1" in result.activations assert result.belief_states is not None + assert isinstance(result.belief_states, jax.Array) n_prefixes = result.belief_states.shape[0] assert result.activations["layer_0"].shape == (n_prefixes, synthetic_data["d_layer0"]) assert result.activations["layer_1"].shape == (n_prefixes, synthetic_data["d_layer1"]) @@ -110,6 +112,7 @@ def test_all_tokens_concatenated(self, synthetic_data): assert "layer_1" not in result.activations assert result.belief_states is not None + assert isinstance(result.belief_states, jax.Array) n_prefixes = result.belief_states.shape[0] expected_d = synthetic_data["d_layer0"] + synthetic_data["d_layer1"] assert result.activations["concatenated"].shape == (n_prefixes, expected_d) @@ -132,6 +135,7 @@ def test_last_token_individual(self, synthetic_data): assert "layer_1" in result.activations assert result.belief_states is not None + assert isinstance(result.belief_states, jax.Array) batch_size = synthetic_data["batch_size"] assert result.activations["layer_0"].shape == (batch_size, synthetic_data["d_layer0"]) assert result.activations["layer_1"].shape == (batch_size, synthetic_data["d_layer1"]) @@ -237,6 +241,7 @@ def test_basic_regression(self, synthetic_data): assert "layer_1_projected" in projections assert prepared.belief_states is not None + assert isinstance(prepared.belief_states, jax.Array) assert projections["layer_0_projected"].shape == prepared.belief_states.shape assert projections["layer_1_projected"].shape == prepared.belief_states.shape @@ -328,6 +333,7 @@ def test_basic_regression_svd(self, synthetic_data): assert "layer_1_projected" in projections assert prepared.belief_states is not None + assert isinstance(prepared.belief_states, jax.Array) assert projections["layer_0_projected"].shape == prepared.belief_states.shape assert projections["layer_1_projected"].shape == prepared.belief_states.shape @@ -654,3 +660,314 @@ def test_tracker_accepts_torch_inputs(self, synthetic_data): assert "regression/layer_0_r2" in scalars assert "pca/layer_0_pca" in projections + + +class TestTupleBeliefStates: + """Test activation tracker with tuple belief states for factored processes.""" + + @pytest.fixture + def factored_belief_data(self): + """Create synthetic data with factored belief states.""" + batch_size = 4 + seq_len = 5 + d_layer0 = 8 + d_layer1 = 12 + + inputs = jnp.array( + [ + [1, 2, 3, 4, 5], + [1, 2, 3, 6, 7], + [1, 2, 8, 9, 10], + [1, 2, 3, 4, 11], + ] + ) + + # Factored beliefs: 2 factors with dimensions 3 and 2 + factor_0 = jnp.ones((batch_size, seq_len, 3)) * 0.3 + factor_1 = jnp.ones((batch_size, seq_len, 2)) * 0.7 + factored_beliefs = (factor_0, factor_1) + + probs = jnp.ones((batch_size, seq_len)) * 0.1 + + activations = { + "layer_0": jnp.ones((batch_size, seq_len, d_layer0)) * 0.3, + "layer_1": jnp.ones((batch_size, seq_len, d_layer1)) * 0.7, + } + + return { + "inputs": inputs, + "factored_beliefs": factored_beliefs, + "probs": probs, + "activations": activations, + "batch_size": batch_size, + "seq_len": seq_len, + "factor_0_dim": 3, + "factor_1_dim": 2, + "d_layer0": d_layer0, + "d_layer1": d_layer1, + } + + def test_prepare_activations_accepts_tuple_beliefs(self, factored_belief_data): + """prepare_activations should accept and preserve tuple belief states.""" + result = prepare_activations( + factored_belief_data["inputs"], + factored_belief_data["factored_beliefs"], + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 2 + + batch_size = factored_belief_data["batch_size"] + assert result.belief_states[0].shape == (batch_size, factored_belief_data["factor_0_dim"]) + assert result.belief_states[1].shape == (batch_size, factored_belief_data["factor_1_dim"]) + + def test_prepare_activations_tuple_beliefs_all_tokens(self, factored_belief_data): + """Tuple beliefs should work with all tokens mode.""" + result = prepare_activations( + factored_belief_data["inputs"], + factored_belief_data["factored_beliefs"], + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=False, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 2 + + # With deduplication, we expect fewer samples than batch_size * seq_len + n_prefixes = result.belief_states[0].shape[0] + assert result.belief_states[0].shape == (n_prefixes, factored_belief_data["factor_0_dim"]) + assert result.belief_states[1].shape == (n_prefixes, factored_belief_data["factor_1_dim"]) + assert result.activations["layer_0"].shape[0] == n_prefixes + + def test_prepare_activations_torch_tuple_beliefs(self, factored_belief_data): + """prepare_activations should accept tuple of PyTorch tensors.""" + torch = pytest.importorskip("torch") + + torch_factor_0 = torch.tensor(np.asarray(factored_belief_data["factored_beliefs"][0])) + torch_factor_1 = torch.tensor(np.asarray(factored_belief_data["factored_beliefs"][1])) + torch_beliefs = (torch_factor_0, torch_factor_1) + + result = prepare_activations( + factored_belief_data["inputs"], + torch_beliefs, + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 2 + # Should be converted to JAX arrays + assert isinstance(result.belief_states[0], jnp.ndarray) + assert isinstance(result.belief_states[1], jnp.ndarray) + + def test_prepare_activations_numpy_tuple_beliefs(self, factored_belief_data): + """prepare_activations should accept tuple of numpy arrays.""" + np_factor_0 = np.asarray(factored_belief_data["factored_beliefs"][0]) + np_factor_1 = np.asarray(factored_belief_data["factored_beliefs"][1]) + np_beliefs = (np_factor_0, np_factor_1) + + result = prepare_activations( + factored_belief_data["inputs"], + np_beliefs, + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 2 + # Should be converted to JAX arrays + assert isinstance(result.belief_states[0], jnp.ndarray) + assert isinstance(result.belief_states[1], jnp.ndarray) + + def test_linear_regression_with_to_factors_true(self, factored_belief_data): + """LinearRegressionAnalysis with to_factors=True should regress to each factor separately.""" + analysis = LinearRegressionAnalysis(to_factors=True) + + prepared = prepare_activations( + factored_belief_data["inputs"], + factored_belief_data["factored_beliefs"], + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + scalars, projections = analysis.analyze( + activations=prepared.activations, + belief_states=prepared.belief_states, + weights=prepared.weights, + ) + + # Should have separate metrics for each factor + # Format is: layer_name_factor_idx/metric_name + assert "layer_0_factor_0/r2" in scalars + assert "layer_0_factor_1/r2" in scalars + assert "layer_0_factor_0/rmse" in scalars + assert "layer_0_factor_1/rmse" in scalars + assert "layer_0_factor_0/mae" in scalars + assert "layer_0_factor_1/mae" in scalars + assert "layer_0_factor_0/dist" in scalars + assert "layer_0_factor_1/dist" in scalars + + assert "layer_1_factor_0/r2" in scalars + assert "layer_1_factor_1/r2" in scalars + + # Should have separate projections for each factor + assert "layer_0_factor_0/projected" in projections + assert "layer_0_factor_1/projected" in projections + assert "layer_1_factor_0/projected" in projections + assert "layer_1_factor_1/projected" in projections + + # Check projection shapes + batch_size = factored_belief_data["batch_size"] + assert projections["layer_0_factor_0/projected"].shape == (batch_size, factored_belief_data["factor_0_dim"]) + assert projections["layer_0_factor_1/projected"].shape == (batch_size, factored_belief_data["factor_1_dim"]) + + def test_linear_regression_svd_with_to_factors_true(self, factored_belief_data): + """LinearRegressionSVDAnalysis with to_factors=True should regress to each factor separately.""" + analysis = LinearRegressionSVDAnalysis(to_factors=True, rcond_values=[1e-10]) + + prepared = prepare_activations( + factored_belief_data["inputs"], + factored_belief_data["factored_beliefs"], + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + scalars, projections = analysis.analyze( + activations=prepared.activations, + belief_states=prepared.belief_states, + weights=prepared.weights, + ) + + # Should have separate metrics for each factor including best_rcond + assert "layer_0_factor_0/r2" in scalars + assert "layer_0_factor_1/r2" in scalars + assert "layer_0_factor_0/best_rcond" in scalars + assert "layer_0_factor_1/best_rcond" in scalars + + # Should have separate projections for each factor + assert "layer_0_factor_0/projected" in projections + assert "layer_0_factor_1/projected" in projections + + def test_tracker_with_factored_beliefs(self, factored_belief_data): + """ActivationTracker should work with tuple belief states.""" + tracker = ActivationTracker( + { + "regression": LinearRegressionAnalysis( + last_token_only=True, + concat_layers=False, + to_factors=True, + ), + "pca": PcaAnalysis( + n_components=2, + last_token_only=True, + concat_layers=False, + ), + } + ) + + scalars, projections = tracker.analyze( + inputs=factored_belief_data["inputs"], + beliefs=factored_belief_data["factored_beliefs"], + probs=factored_belief_data["probs"], + activations=factored_belief_data["activations"], + ) + + # Regression should have per-factor metrics + assert "regression/layer_0_factor_0/r2" in scalars + assert "regression/layer_0_factor_1/r2" in scalars + + # PCA should still work (doesn't use belief states) + assert "pca/layer_0_variance_explained" in scalars + + # Projections should be present + assert "regression/layer_0_factor_0/projected" in projections + assert "regression/layer_0_factor_1/projected" in projections + assert "pca/layer_0_pca" in projections + + def test_single_factor_tuple(self, synthetic_data): + """Test with a single-factor tuple (edge case).""" + # Create single-factor tuple + single_factor = (synthetic_data["beliefs"],) + + result = prepare_activations( + synthetic_data["inputs"], + single_factor, + synthetic_data["probs"], + synthetic_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 1 + assert result.belief_states[0].shape == (synthetic_data["batch_size"], synthetic_data["belief_dim"]) + + def test_three_factor_tuple(self, factored_belief_data): + """Test with three factors to ensure generalization.""" + batch_size = factored_belief_data["batch_size"] + seq_len = factored_belief_data["seq_len"] + + # Add a third factor + factor_0 = jnp.ones((batch_size, seq_len, 3)) * 0.3 + factor_1 = jnp.ones((batch_size, seq_len, 2)) * 0.5 + factor_2 = jnp.ones((batch_size, seq_len, 4)) * 0.7 + three_factor_beliefs = (factor_0, factor_1, factor_2) + + result = prepare_activations( + factored_belief_data["inputs"], + three_factor_beliefs, + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 3 + assert result.belief_states[0].shape == (batch_size, 3) + assert result.belief_states[1].shape == (batch_size, 2) + assert result.belief_states[2].shape == (batch_size, 4) diff --git a/tests/analysis/test_layerwise_analysis.py b/tests/analysis/test_layerwise_analysis.py index c0d1a839..7524647d 100644 --- a/tests/analysis/test_layerwise_analysis.py +++ b/tests/analysis/test_layerwise_analysis.py @@ -1,5 +1,6 @@ """Tests for the LayerwiseAnalysis orchestrator.""" +import jax import jax.numpy as jnp import pytest @@ -7,7 +8,7 @@ @pytest.fixture -def analysis_inputs() -> tuple[dict[str, jnp.ndarray], jnp.ndarray, jnp.ndarray]: +def analysis_inputs() -> tuple[dict[str, jax.Array], jax.Array, jax.Array]: """Provides sample activations, weights, and belief states for analysis tests.""" activations = { "layer_a": jnp.array([[1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]), @@ -161,3 +162,30 @@ def test_layerwise_analysis_property_accessors() -> None: assert analysis.concat_layers assert not analysis.use_probs_as_weights assert not analysis.requires_belief_states + + +def test_linear_regression_accepts_to_factors() -> None: + """linear_regression validator should accept to_factors parameter.""" + validator = ANALYSIS_REGISTRY["linear_regression"].validator + params = validator({"fit_intercept": False, "to_factors": True}) + + assert params["fit_intercept"] is False + assert params["to_factors"] is True + + +def test_linear_regression_svd_accepts_to_factors() -> None: + """linear_regression_svd validator should accept to_factors parameter.""" + validator = ANALYSIS_REGISTRY["linear_regression_svd"].validator + params = validator({"fit_intercept": True, "to_factors": True, "rcond_values": [1e-3]}) + + assert params["fit_intercept"] is True + assert params["to_factors"] is True + assert params["rcond_values"] == (0.001,) + + +def test_linear_regression_to_factors_defaults_false() -> None: + """to_factors should default to False when not provided.""" + validator = ANALYSIS_REGISTRY["linear_regression"].validator + params = validator({"fit_intercept": True}) + + assert params["to_factors"] is False diff --git a/tests/analysis/test_linear_regression.py b/tests/analysis/test_linear_regression.py index f3413429..bca9f6bf 100644 --- a/tests/analysis/test_linear_regression.py +++ b/tests/analysis/test_linear_regression.py @@ -183,3 +183,141 @@ def test_layer_linear_regression_svd_runs_end_to_end() -> None: assert pytest.approx(1.0, abs=1e-6) == scalars["r2"] chex.assert_trees_all_close(projections["projected"], beliefs) + + +def test_layer_linear_regression_to_factors_basic() -> None: + """Layer regression with to_factors should regress to each factor separately.""" + x = jnp.arange(12.0).reshape(4, 3) # 4 samples, 3 features + weights = jnp.ones(4) / 4.0 + + # Two factors: factor 0 has 2 states, factor 1 has 3 states + factor_0 = jnp.array([[0.3, 0.7], [0.5, 0.5], [0.8, 0.2], [0.1, 0.9]]) # [4, 2] + factor_1 = jnp.array([[0.2, 0.3, 0.5], [0.1, 0.6, 0.3], [0.4, 0.4, 0.2], [0.3, 0.3, 0.4]]) # [4, 3] + factored_beliefs = (factor_0, factor_1) + + scalars, projections = layer_linear_regression( + x, + weights, + factored_beliefs, + to_factors=True, + ) + + # Should have separate metrics for each factor + assert "factor_0/r2" in scalars + assert "factor_1/r2" in scalars + assert "factor_0/rmse" in scalars + assert "factor_1/rmse" in scalars + assert "factor_0/mae" in scalars + assert "factor_1/mae" in scalars + assert "factor_0/dist" in scalars + assert "factor_1/dist" in scalars + + # Should have separate projections for each factor + assert "factor_0/projected" in projections + assert "factor_1/projected" in projections + + # Check shapes + assert projections["factor_0/projected"].shape == factor_0.shape + assert projections["factor_1/projected"].shape == factor_1.shape + + +def test_layer_linear_regression_svd_to_factors_basic() -> None: + """Layer regression SVD with to_factors should regress to each factor separately.""" + x = jnp.arange(12.0).reshape(4, 3) # 4 samples, 3 features + weights = jnp.ones(4) / 4.0 + + # Two factors: factor 0 has 2 states, factor 1 has 3 states + factor_0 = jnp.array([[0.3, 0.7], [0.5, 0.5], [0.8, 0.2], [0.1, 0.9]]) # [4, 2] + factor_1 = jnp.array([[0.2, 0.3, 0.5], [0.1, 0.6, 0.3], [0.4, 0.4, 0.2], [0.3, 0.3, 0.4]]) # [4, 3] + factored_beliefs = (factor_0, factor_1) + + scalars, projections = layer_linear_regression_svd( + x, + weights, + factored_beliefs, + to_factors=True, + rcond_values=[1e-6], + ) + + # Should have separate metrics for each factor including best_rcond + assert "factor_0/r2" in scalars + assert "factor_1/r2" in scalars + assert "factor_0/best_rcond" in scalars + assert "factor_1/best_rcond" in scalars + + # Should have separate projections for each factor + assert "factor_0/projected" in projections + assert "factor_1/projected" in projections + + # Check shapes + assert projections["factor_0/projected"].shape == factor_0.shape + assert projections["factor_1/projected"].shape == factor_1.shape + + +def test_layer_linear_regression_to_factors_single_factor() -> None: + """to_factors=True should work with a single factor tuple.""" + x = jnp.arange(9.0).reshape(3, 3) + weights = jnp.ones(3) / 3.0 + + # Single factor in tuple + factor_0 = jnp.array([[0.3, 0.7], [0.5, 0.5], [0.8, 0.2]]) + factored_beliefs = (factor_0,) + + scalars, projections = layer_linear_regression( + x, + weights, + factored_beliefs, + to_factors=True, + ) + + # Should have metrics for single factor + assert "factor_0/r2" in scalars + assert "factor_0/projected" in projections + assert projections["factor_0/projected"].shape == factor_0.shape + + +def test_layer_linear_regression_to_factors_requires_tuple() -> None: + """to_factors=True requires belief_states to be a tuple.""" + x = jnp.ones((3, 2)) + weights = jnp.ones(3) / 3.0 + beliefs_array = jnp.ones((3, 2)) + + with pytest.raises(ValueError, match="belief_states must be a tuple when to_factors is True"): + layer_linear_regression(x, weights, beliefs_array, to_factors=True) + + with pytest.raises(ValueError, match="belief_states must be a tuple when to_factors is True"): + layer_linear_regression_svd(x, weights, beliefs_array, to_factors=True) + + +def test_layer_linear_regression_to_factors_validates_tuple_contents() -> None: + """to_factors=True requires all elements in tuple to be jax.Arrays.""" + x = jnp.ones((3, 2)) + weights = jnp.ones(3) / 3.0 + + # Invalid: tuple contains non-array + invalid_beliefs = (jnp.ones((3, 2)), "not an array") # type: ignore + + with pytest.raises(ValueError, match="Each factor in belief_states must be a jax.Array"): + layer_linear_regression(x, weights, invalid_beliefs, to_factors=True) # type: ignore + + with pytest.raises(ValueError, match="Each factor in belief_states must be a jax.Array"): + layer_linear_regression_svd(x, weights, invalid_beliefs, to_factors=True) # type: ignore + + +def test_layer_linear_regression_to_factors_false_works() -> None: + """to_factors=False requires belief_states to be a single array, not a tuple.""" + x = jnp.ones((3, 2)) + weights = jnp.ones(3) / 3.0 + + # Invalid: tuple when to_factors=False + factored_beliefs = (jnp.ones((3, 2)), jnp.ones((3, 3))) + + scalars, projections = layer_linear_regression(x, weights, factored_beliefs, to_factors=False) + assert "r2" in scalars + assert "projected" in projections + assert projections["projected"].shape == (3, 5) + + scalars, projections = layer_linear_regression_svd(x, weights, factored_beliefs, to_factors=False) + assert "r2" in scalars + assert "projected" in projections + assert projections["projected"].shape == (3, 5) diff --git a/tests/end_to_end/configs/activation_tracker/default.yaml b/tests/end_to_end/configs/activation_tracker/default.yaml index 1950e7ee..5158ba6d 100644 --- a/tests/end_to_end/configs/activation_tracker/default.yaml +++ b/tests/end_to_end/configs/activation_tracker/default.yaml @@ -17,4 +17,4 @@ instance: last_token_only: false concat_layers: true use_probs_as_weights: true - rcond_values: [1e-15, 1e-10, 1e-8, 1e-6, 1e-4, 1e-2] + rcond_values: [1e-15, 1e-10, 1e-8, 1e-6, 1e-4, 1e-2] \ No newline at end of file diff --git a/tests/end_to_end/configs/generative_process/unified_chain_3mess3_2tomq.yaml b/tests/end_to_end/configs/generative_process/unified_chain_3mess3_2tomq.yaml new file mode 100644 index 00000000..d15066cc --- /dev/null +++ b/tests/end_to_end/configs/generative_process/unified_chain_3mess3_2tomq.yaml @@ -0,0 +1,108 @@ +# Unified API: Sequential Conditional Structure - 3 mess3 + 2 tom_quantum +# Factor i depends on Factor i-1's emitted token (one-way conditional dependencies) + +name: unified_chain_3mess3_2tomq +base_vocab_size: ??? # Will auto-infer as (3^3) * (4^2) = 27 * 16 = 432 +vocab_size: ??? + +instance: + _target_: simplexity.generative_processes.builder.build_factored_process_from_spec + + structure_type: chain + + spec: + # Factor 0: mess3 (unconditional, single variant) + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + + # Factor 1: mess3 (4 variants based on Factor 0's token) + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.11 + a: 0.79 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.21 + control_map: [2, 3, 0] # 3 parent tokens -> 4 variants + + # Factor 2: mess3 (4 variants based on Factor 1's token) + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.11 + a: 0.79 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.21 + control_map: [1, 3, 2] # 3 parent tokens -> 4 variants + + # Factor 3: tom_quantum (4 variants based on Factor 2's token) + - component_type: ghmm + variants: + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 1.0 + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 4.0 + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 7.141428 # sqrt(51) + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 2.0 + control_map: [2, 0, 3] # 3 parent tokens -> 4 variants + + # Factor 4: tom_quantum (4 variants based on Factor 3's token) + - component_type: ghmm + variants: + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 1.0 + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 4.0 + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 7.141428 + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 2.0 + control_map: [1, 3, 0, 2] # 4 parent tokens -> 4 variants + +bos_token: ??? +eos_token: null +batch_size: 1024 \ No newline at end of file diff --git a/tests/end_to_end/configs/generative_process/unified_chain_example.yaml b/tests/end_to_end/configs/generative_process/unified_chain_example.yaml new file mode 100644 index 00000000..7f034cd6 --- /dev/null +++ b/tests/end_to_end/configs/generative_process/unified_chain_example.yaml @@ -0,0 +1,57 @@ +# Unified API: Sequential Conditional Structure +# Sequential conditional structure creates one-way conditional dependencies: +# Factor 0 -> Factor 1 -> Factor 2 -> ... +# Each factor i>0 selects its parameter variant based on factor i-1's token. + +name: unified_chain_example +base_vocab_size: ??? # Will auto-infer as 3 * 2 * 2 = 12 +vocab_size: ??? + +instance: + _target_: simplexity.generative_processes.builder.build_factored_process_from_spec + + structure_type: chain + + spec: + # Factor 0: mess3 (root, single variant) + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + + # Factor 1: mess3 (depends on Factor 0, 2 variants) + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + control_map: [0, 1, 0] # 3 parent tokens -> 2 variants + + # Factor 2: mess3 (depends on Factor 1, 2 variants) + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + control_map: [1, 0, 1] # 3 parent tokens -> 2 variants + +bos_token: ??? +eos_token: null +batch_size: 1024 + +# Example interpretation: +# - Factor 0 always uses variant 0 +# - Factor 1: parent_token 0->variant 0, token 1->variant 1, token 2->variant 0 +# - Factor 2: parent_token 0->variant 1, token 1->variant 0, token 2->variant 1 \ No newline at end of file diff --git a/tests/end_to_end/configs/generative_process/unified_independent_example.yaml b/tests/end_to_end/configs/generative_process/unified_independent_example.yaml new file mode 100644 index 00000000..8abf8aa2 --- /dev/null +++ b/tests/end_to_end/configs/generative_process/unified_independent_example.yaml @@ -0,0 +1,53 @@ +# Unified API: Independent Structure +# Independent structure has no conditional dependencies: +# Each factor operates independently, always using variant 0. +# Joint distribution is simply the product of independent factor distributions. + +name: unified_independent_example +base_vocab_size: ??? # Will auto-infer as 3 * 3 * 3 = 27 +vocab_size: ??? + +instance: + _target_: simplexity.generative_processes.builder.build_factored_process_from_spec + + # Structure type - determines how factors interact + structure_type: independent + + # Component specifications + # Each factor has exactly one variant (independent factors don't need multiple variants) + spec: + # Factor 0: mess3 process + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + + # Factor 1: mess3 process + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + + # Factor 2: mess3 process + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.2 + a: 0.7 + +bos_token: ??? +eos_token: null +batch_size: 1024 + +# Example interpretation: +# - Each factor operates completely independently +# - Factor 0 always emits from mess3(x=0.15, a=0.6) +# - Factor 1 always emits from mess3(x=0.5, a=0.6) +# - Factor 2 always emits from mess3(x=0.2, a=0.7) +# - Joint distribution: P(t0,t1,t2) = P(t0) * P(t1) * P(t2) +# - No control maps needed - factors don't depend on each other diff --git a/tests/end_to_end/configs/generative_process/unified_symmetric_example.yaml b/tests/end_to_end/configs/generative_process/unified_symmetric_example.yaml new file mode 100644 index 00000000..b13fb2f7 --- /dev/null +++ b/tests/end_to_end/configs/generative_process/unified_symmetric_example.yaml @@ -0,0 +1,100 @@ +# Unified API: Fully Conditional Structure +# Fully conditional structure creates mutual dependencies: +# Each factor's variant selection depends on ALL OTHER factors' tokens. +# Uses product-of-experts to compute joint distribution. + +name: unified_symmetric_example +base_vocab_size: ??? # Will auto-infer as 3 * 3 * 3 = 27 +vocab_size: ??? + +instance: + _target_: simplexity.generative_processes.builder.build_factored_process_from_spec + + structure_type: symmetric + + # Component specifications + spec: + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.11 + a: 0.79 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.21 + + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.11 + a: 0.79 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.21 + + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.11 + a: 0.79 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.21 + + # Control maps: define how other-tokens select variant + # control_maps[i] has shape [prod(V_j for j!=i)] + # For 3 factors with vocab_sizes [3,3,3]: + # - Factor 0 depends on (t1, t2): 3*3=9 combinations + # - Factor 1 depends on (t0, t2): 3*3=9 combinations + # - Factor 2 depends on (t0, t1): 3*3=9 combinations + # Each maps to one of K_i=4 variants + control_maps: + # Factor 0: 9 other-token combinations -> 4 variants + - [0, 1, 2, 3, 0, 1, 2, 3, 0] + + # Factor 1: 9 other-token combinations -> 4 variants + - [1, 2, 3, 0, 1, 2, 3, 0, 1] + + # Factor 2: 9 other-token combinations -> 4 variants + - [2, 3, 0, 1, 2, 3, 0, 1, 2] + +bos_token: ??? +eos_token: null +batch_size: 1024 + +# Example interpretation: +# At each timestep, factors emit jointly using product-of-experts: +# - Factor 0 selects variant based on what tokens factors 1&2 will emit +# - Factor 1 selects variant based on what tokens factors 0&2 will emit +# - Factor 2 selects variant based on what tokens factors 0&1 will emit +# The joint distribution is computed, normalized, and sampled from. diff --git a/tests/end_to_end/configs/generative_process/unified_transition_and_emission_coupled_example.yaml b/tests/end_to_end/configs/generative_process/unified_transition_and_emission_coupled_example.yaml new file mode 100644 index 00000000..e171167d --- /dev/null +++ b/tests/end_to_end/configs/generative_process/unified_transition_and_emission_coupled_example.yaml @@ -0,0 +1,93 @@ +# Unified API: Conditional Transitions Structure +# Conditional transitions structure separates emission and transition dependencies: +# - Emissions: Can be independent OR sequential (factor i depends on i-1's token) +# - Transitions: Always mutually conditional (factor i's transition depends on all other factors) + +name: unified_transition_and_emission_coupled_example +base_vocab_size: ??? # Will auto-infer as 3 * 3 * 4 = 36 +vocab_size: ??? + +instance: + _target_: simplexity.generative_processes.builder.build_factored_process_from_spec + + structure_type: transition_coupled + + # Component specifications + spec: + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + + - component_type: ghmm + variants: + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 1.0 + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 4.0 + + # Transition control maps: how other-factors' tokens select transition variant + # control_maps_transition[i] has shape [prod(V_j for j!=i)] + # For 3 factors with vocab_sizes [3,3,4]: + # - Factor 0: depends on (t1,t2): 3*4=12 combinations -> 2 variants + # - Factor 1: depends on (t0,t2): 3*4=12 combinations -> 2 variants + # - Factor 2: depends on (t0,t1): 3*3=9 combinations -> 2 variants + control_maps_transition: + # Factor 0: 12 other-token combos -> 2 transition variants + - [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1] + + # Factor 1: 12 other-token combos -> 2 transition variants + - [1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0] + + # Factor 2: 9 other-token combos -> 2 transition variants + - [0, 1, 0, 1, 0, 1, 0, 1, 0] + + # Emission variant indices: which variant to use for emissions (shape [F]) + # This uses independent emission mode (each factor uses fixed variant) + emission_variant_indices: [0, 0, 0] + + # Optional: emission_control_maps for chain-style emissions + # If provided, emission mode becomes chain-style (factor i depends on previous tokens) + # Uncomment to enable chain emissions: + emission_control_maps: + - null # Factor 0: root + - [0, 1, 0] # Factor 1: depends on t0 (3 values) -> use emission variants + - [0, 1, 0, 1, 0, 1, 0, 1, 0] # Factor 2: depends on (t0,t1) (9 combos) + +bos_token: ??? +eos_token: null +batch_size: 1024 + +# Example interpretation (independent emission mode): +# - Emissions: Each factor independently emits using variant 0 +# P(t) = P0(t0|s0) * P1(t1|s1) * P2(t2|s2) +# - Transitions: After observing (t0,t1,t2): +# - Factor 0 selects transition variant based on (t1,t2) +# - Factor 1 selects transition variant based on (t0,t2) +# - Factor 2 selects transition variant based on (t0,t1) +# Each updates its state using the selected transition variant + +# If emission_control_maps is enabled (chain mode): +# - Emissions: P(t) = P0(t0|s0) * P1(t1|t0,s1) * P2(t2|t0,t1,s2) +# - Transitions: Same coupling as above diff --git a/tests/end_to_end/configs/generative_process/unified_transition_coupled_example.yaml b/tests/end_to_end/configs/generative_process/unified_transition_coupled_example.yaml new file mode 100644 index 00000000..ef3302e9 --- /dev/null +++ b/tests/end_to_end/configs/generative_process/unified_transition_coupled_example.yaml @@ -0,0 +1,93 @@ +# Unified API: Conditional Transitions Structure +# Conditional transitions structure separates emission and transition dependencies: +# - Emissions: Can be independent OR sequential (factor i depends on i-1's token) +# - Transitions: Always mutually conditional (factor i's transition depends on all other factors) + +name: unified_transition_coupled_example +base_vocab_size: ??? # Will auto-infer as 3 * 3 * 4 = 36 +vocab_size: ??? + +instance: + _target_: simplexity.generative_processes.builder.build_factored_process_from_spec + + structure_type: transition_coupled + + # Component specifications + spec: + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + - process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + + - component_type: ghmm + variants: + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 1.0 + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 4.0 + + # Transition control maps: how other-factors' tokens select transition variant + # control_maps_transition[i] has shape [prod(V_j for j!=i)] + # For 3 factors with vocab_sizes [3,3,4]: + # - Factor 0: depends on (t1,t2): 3*4=12 combinations -> 2 variants + # - Factor 1: depends on (t0,t2): 3*4=12 combinations -> 2 variants + # - Factor 2: depends on (t0,t1): 3*3=9 combinations -> 2 variants + control_maps_transition: + # Factor 0: 12 other-token combos -> 2 transition variants + - [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1] + + # Factor 1: 12 other-token combos -> 2 transition variants + - [1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0] + + # Factor 2: 9 other-token combos -> 2 transition variants + - [0, 1, 0, 1, 0, 1, 0, 1, 0] + + # Emission variant indices: which variant to use for emissions (shape [F]) + # This uses independent emission mode (each factor uses fixed variant) + emission_variant_indices: [0, 0, 0] + + # Optional: emission_control_maps for chain-style emissions + # If provided, emission mode becomes chain-style (factor i depends on previous tokens) + # Uncomment to enable chain emissions: + # emission_control_maps: + # - null # Factor 0: root + # - [0, 1, 0] # Factor 1: depends on t0 (3 values) -> use emission variants + # - [0, 1, 0, 1, 0, 1, 0, 1, 0] # Factor 2: depends on (t0,t1) (9 combos) + +bos_token: ??? +eos_token: null +batch_size: 1024 + +# Example interpretation (independent emission mode): +# - Emissions: Each factor independently emits using variant 0 +# P(t) = P0(t0|s0) * P1(t1|s1) * P2(t2|s2) +# - Transitions: After observing (t0,t1,t2): +# - Factor 0 selects transition variant based on (t1,t2) +# - Factor 1 selects transition variant based on (t0,t2) +# - Factor 2 selects transition variant based on (t0,t1) +# Each updates its state using the selected transition variant + +# If emission_control_maps is enabled (chain mode): +# - Emissions: P(t) = P0(t0|s0) * P1(t1|t0,s1) * P2(t2|t0,t1,s2) +# - Transitions: Same coupling as above diff --git a/tests/generative_processes/test_builder.py b/tests/generative_processes/test_builder.py index 402159d9..bd80643b 100644 --- a/tests/generative_processes/test_builder.py +++ b/tests/generative_processes/test_builder.py @@ -9,19 +9,34 @@ # (code quality, style, undefined names, etc.) to run normally while bypassing # the problematic imports checker that would crash during AST traversal. +import re + import chex import jax.numpy as jnp import pytest from simplexity.generative_processes.builder import ( add_begin_of_sequence_token, + build_chain_from_spec, + build_factored_process, + build_factored_process_from_spec, build_generalized_hidden_markov_model, build_hidden_markov_model, + build_matrices_from_spec, build_nonergodic_hidden_markov_model, build_nonergodic_initial_state, build_nonergodic_transition_matrices, + build_symmetric_from_spec, + build_transition_coupled_from_spec, build_transition_matrices, ) +from simplexity.generative_processes.factored_generative_process import FactoredGenerativeProcess +from simplexity.generative_processes.structures import ( + ConditionalTransitions, + FullyConditional, + IndependentStructure, + SequentialConditional, +) from simplexity.generative_processes.transition_matrices import HMM_MATRIX_FUNCTIONS from tests.generative_processes.test_transition_matrices import validate_hmm_transition_matrices @@ -220,3 +235,371 @@ def test_build_nonergodic_hidden_markov_model_bos(): chex.assert_trees_all_close(hmm.transition_matrices, expected_transition_matrices) assert hmm.initial_state.shape == (3,) chex.assert_trees_all_close(hmm.initial_state, jnp.array([0, 0, 1.0])) + + +@pytest.fixture +def components_spec(): + """Base specification for two HMM factors.""" + return [ + { + "component_type": "hmm", + "variants": [{"process_name": "coin", "process_params": {"p": 0.6}}], + }, + { + "component_type": "hmm", + "variants": [ + {"process_name": "coin", "process_params": {"p": 0.25}}, + {"process_name": "coin", "process_params": {"p": 0.75}}, + ], + }, + ] + + +@pytest.fixture +def chain_spec(): + """Chain specification with control map on the second factor.""" + return [ + { + "component_type": "hmm", + "variants": [{"process_name": "coin", "process_params": {"p": 0.6}}], + }, + { + "component_type": "hmm", + "variants": [ + {"process_name": "coin", "process_params": {"p": 0.25}}, + {"process_name": "coin", "process_params": {"p": 0.75}}, + ], + "control_map": [0, 1], + }, + ] + + +@pytest.fixture +def symmetric_control_maps(): + """Simple symmetric control maps fixture.""" + return [[0, 1], [1, 0]] + + +@pytest.fixture +def transition_coupled_inputs(): + """Control maps and indices for transition-coupled topology.""" + return ( + [[0, 1], [1, 0]], + [0, 1], + [None, [0, 1]], + ) + + +def test_build_matrices_from_spec_returns_consistent_arrays(components_spec): + """Factored specs should yield aligned parameter shapes.""" + component_types, transition_matrices, normalizing_eigenvectors, initial_states = build_matrices_from_spec( + components_spec + ) + assert component_types == ["hmm", "hmm"] + assert transition_matrices[0].shape == (1, 2, 1, 1) + assert transition_matrices[1].shape[0] == 2 + assert normalizing_eigenvectors[1].shape == (2, transition_matrices[1].shape[2]) + for state in initial_states: + chex.assert_trees_all_close(jnp.sum(state), jnp.array(1.0)) + + +def test_build_chain_from_spec_returns_control_maps(chain_spec): + """build_chain_from_spec should return encoded control maps.""" + ( + component_types, + transition_matrices, + normalizing_eigenvectors, + initial_states, + control_maps, + ) = build_chain_from_spec(chain_spec) + assert component_types == ["hmm", "hmm"] + assert control_maps[0] is None + chex.assert_trees_all_close(control_maps[1], jnp.array([0, 1], dtype=jnp.int32)) + + +def test_build_chain_from_spec_missing_control_map_raises(components_spec): + """Every non-root node in a chain must provide a control map.""" + with pytest.raises(ValueError, match=re.escape("chain[1].control_map is required for i>0")): + build_chain_from_spec(components_spec) + + +def test_build_symmetric_from_spec_validates_lengths(components_spec, symmetric_control_maps): + """Symmetric control maps must cover every combination of other tokens.""" + ( + component_types, + transition_matrices, + normalizing_eigenvectors, + initial_states, + control_maps, + ) = build_symmetric_from_spec(components_spec, symmetric_control_maps) + assert component_types == ["hmm", "hmm"] + assert len(control_maps) == 2 + chex.assert_trees_all_close(control_maps[0], jnp.array([0, 1], dtype=jnp.int32)) + with pytest.raises(ValueError, match=re.escape("control_maps[0] length 1 must equal prod(V_j for j!=[0]) = 2")): + build_symmetric_from_spec(components_spec, [[0], [0, 1]]) + + +def test_build_transition_coupled_from_spec_handles_emission_maps(components_spec, transition_coupled_inputs): + """Transition-coupled specs should surface emission controls when provided.""" + result = build_transition_coupled_from_spec(components_spec, *transition_coupled_inputs) + ( + component_types, + transition_matrices, + normalizing_eigenvectors, + initial_states, + control_maps_transition, + emission_variant_indices, + emission_control_maps, + ) = result + assert component_types == ["hmm", "hmm"] + assert len(control_maps_transition) == 2 + assert emission_variant_indices.shape == (2,) + assert emission_control_maps is not None + assert emission_control_maps[0] is None + chex.assert_trees_all_close(emission_control_maps[1], jnp.array([0, 1], dtype=jnp.int32)) + + +def test_build_chain_process_from_spec_returns_factored_process(chain_spec): + """Unified builder with chain structure should return a FactoredGenerativeProcess.""" + process = build_factored_process_from_spec(structure_type="chain", spec=chain_spec) + assert isinstance(process, FactoredGenerativeProcess) + assert isinstance(process.structure, SequentialConditional) + assert process.vocab_size == 4 + + +def test_build_factored_process_dispatches_topologies( + chain_spec, components_spec, symmetric_control_maps, transition_coupled_inputs +): + """build_factored_process should route to the appropriate topology builder.""" + independent_components = build_matrices_from_spec(components_spec) + independent_process = build_factored_process( + structure_type="independent", + component_types=independent_components[0], + transition_matrices=independent_components[1], + normalizing_eigenvectors=independent_components[2], + initial_states=independent_components[3], + ) + assert isinstance(independent_process.structure, IndependentStructure) + + chain_components = build_chain_from_spec(chain_spec) + chain_process = build_factored_process( + structure_type="chain", + component_types=chain_components[0], + transition_matrices=chain_components[1], + normalizing_eigenvectors=chain_components[2], + initial_states=chain_components[3], + control_maps=chain_components[4], + ) + assert isinstance(chain_process.structure, SequentialConditional) + + symmetric_components = build_symmetric_from_spec(components_spec, symmetric_control_maps) + symmetric_process = build_factored_process( + structure_type="symmetric", + component_types=symmetric_components[0], + transition_matrices=symmetric_components[1], + normalizing_eigenvectors=symmetric_components[2], + initial_states=symmetric_components[3], + control_maps=symmetric_components[4], + ) + assert isinstance(symmetric_process.structure, FullyConditional) + + transition_components = build_transition_coupled_from_spec(components_spec, *transition_coupled_inputs) + transition_process = build_factored_process( + structure_type="transition_coupled", + component_types=transition_components[0], + transition_matrices=transition_components[1], + normalizing_eigenvectors=transition_components[2], + initial_states=transition_components[3], + control_maps_transition=transition_components[4], + emission_variant_indices=transition_components[5], + emission_control_maps=transition_components[6], + ) + assert isinstance(transition_process.structure, ConditionalTransitions) + assert transition_process.vocab_size == 4 + assert transition_process.structure.use_emission_chain is True + + +def test_build_factored_process_from_spec_independent(components_spec): + """Test unified builder with independent structure.""" + process = build_factored_process_from_spec( + structure_type="independent", + spec=components_spec, + ) + assert isinstance(process, FactoredGenerativeProcess) + assert isinstance(process.structure, IndependentStructure) + assert process.vocab_size == 4 + + +def test_build_factored_process_from_spec_chain(chain_spec): + """Test unified builder with chain structure.""" + process = build_factored_process_from_spec( + structure_type="chain", + spec=chain_spec, + ) + assert isinstance(process, FactoredGenerativeProcess) + assert isinstance(process.structure, SequentialConditional) + assert process.vocab_size == 4 + + +def test_build_factored_process_from_spec_symmetric(components_spec, symmetric_control_maps): + """Test unified builder with symmetric structure.""" + process = build_factored_process_from_spec( + structure_type="symmetric", + spec=components_spec, + control_maps=symmetric_control_maps, + ) + assert isinstance(process, FactoredGenerativeProcess) + assert isinstance(process.structure, FullyConditional) + assert process.vocab_size == 4 + + +def test_build_factored_process_from_spec_transition_coupled(components_spec, transition_coupled_inputs): + """Test unified builder with transition_coupled structure.""" + control_maps_transition, emission_variant_indices, emission_control_maps = transition_coupled_inputs + process = build_factored_process_from_spec( + structure_type="transition_coupled", + spec=components_spec, + control_maps_transition=control_maps_transition, + emission_variant_indices=emission_variant_indices, + emission_control_maps=emission_control_maps, + ) + assert isinstance(process, FactoredGenerativeProcess) + assert isinstance(process.structure, ConditionalTransitions) + assert process.vocab_size == 4 + + +def test_build_factored_process_from_spec_symmetric_missing_control_maps(components_spec): + """Test unified builder validates required params for symmetric.""" + with pytest.raises(ValueError, match="symmetric structure requires 'control_maps' parameter"): + build_factored_process_from_spec( + structure_type="symmetric", + spec=components_spec, + ) + + +def test_build_factored_process_from_spec_transition_coupled_missing_params(components_spec): + """Test unified builder validates required params for transition_coupled.""" + with pytest.raises(ValueError, match="transition_coupled structure requires 'control_maps_transition' parameter"): + build_factored_process_from_spec( + structure_type="transition_coupled", + spec=components_spec, + ) + + with pytest.raises(ValueError, match="transition_coupled structure requires 'emission_variant_indices' parameter"): + build_factored_process_from_spec( + structure_type="transition_coupled", + spec=components_spec, + control_maps_transition=[[0, 1], [1, 0]], + ) + + +def test_build_factored_process_chain_missing_control_maps(components_spec): + """Test build_factored_process chain requires control_maps.""" + component_types, transition_matrices, normalizing_eigenvectors, initial_states = build_matrices_from_spec( + components_spec + ) + + with pytest.raises(ValueError, match="Missing required argument 'control_maps' for chain structure"): + build_factored_process( + structure_type="chain", + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + ) + + +def test_build_factored_process_symmetric_missing_control_maps(components_spec): + """Test build_factored_process symmetric requires control_maps.""" + component_types, transition_matrices, normalizing_eigenvectors, initial_states = build_matrices_from_spec( + components_spec + ) + + with pytest.raises(ValueError, match="Missing required argument 'control_maps' for symmetric structure"): + build_factored_process( + structure_type="symmetric", + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + ) + + +def test_build_factored_process_transition_coupled_missing_params(components_spec): + """Test build_factored_process transition_coupled requires all params.""" + component_types, transition_matrices, normalizing_eigenvectors, initial_states = build_matrices_from_spec( + components_spec + ) + + with pytest.raises( + ValueError, match="Missing required argument 'control_maps_transition' for transition_coupled structure" + ): + build_factored_process( + structure_type="transition_coupled", + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + ) + + with pytest.raises( + ValueError, match="Missing required argument 'emission_variant_indices' for transition_coupled structure" + ): + build_factored_process( + structure_type="transition_coupled", + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + control_maps_transition=(jnp.array([0, 1]),), + ) + + +def test_build_matrices_from_spec_empty_spec_raises(): + """Empty spec should raise ValueError.""" + with pytest.raises(ValueError, match="spec must contain at least one factor"): + build_matrices_from_spec([]) + + +def test_build_matrices_from_spec_empty_variants_raises(): + """Factor with empty variants should raise ValueError.""" + spec = [{"component_type": "hmm", "variants": []}] + with pytest.raises(ValueError, match="spec\\[0\\].variants must be non-empty"): + build_matrices_from_spec(spec) + + +def test_build_matrices_from_spec_mismatched_vocab_sizes_raises(): + """Variants with different vocab sizes should raise ValueError.""" + spec = [ + { + "component_type": "hmm", + "variants": [ + {"process_name": "coin", "process_params": {"p": 0.5}}, # vocab=2 + {"process_name": "mess3", "process_params": {"x": 0.5, "a": 0.6}}, # vocab=3 + ], + } + ] + with pytest.raises(ValueError, match="must have same vocab size"): + build_matrices_from_spec(spec) + + +def test_build_matrices_from_spec_with_ghmm_variants(): + """GHMM variants should stack normalizing eigenvectors.""" + spec = [ + { + "component_type": "ghmm", + "variants": [ + {"process_name": "tom_quantum", "process_params": {"alpha": 1.0, "beta": 1.0}}, + {"process_name": "tom_quantum", "process_params": {"alpha": 1.0, "beta": 4.0}}, + ], + } + ] + component_types, transition_matrices, normalizing_eigenvectors, initial_states = build_matrices_from_spec(spec) + assert component_types == ["ghmm"] + assert normalizing_eigenvectors[0].shape[0] == 2 # 2 variants + + +def test_build_chain_from_spec_empty_chain_raises(): + """Empty chain should raise ValueError.""" + with pytest.raises(ValueError, match="chain must contain at least one node"): + build_chain_from_spec([]) diff --git a/tests/generative_processes/test_factored_generative_process.py b/tests/generative_processes/test_factored_generative_process.py new file mode 100644 index 00000000..444bd48b --- /dev/null +++ b/tests/generative_processes/test_factored_generative_process.py @@ -0,0 +1,629 @@ +"""Integration tests for the factored generative process.""" + +# pylint: disable=protected-access +# Tests need to access protected methods to verify internal implementation + +import chex +import jax +import jax.numpy as jnp +import pytest + +from simplexity.generative_processes.factored_generative_process import FactoredGenerativeProcess +from simplexity.generative_processes.structures import ( + ConditionalTransitions, + FullyConditional, + IndependentStructure, + SequentialConditional, +) +from simplexity.utils.factoring_utils import transition_with_obs + + +def _tensor_from_probs(variant_probs): + arr = jnp.asarray(variant_probs, dtype=jnp.float32) + return arr[..., None, None] + + +@pytest.fixture +def simple_chain_process(): + """Simple two-factor chain with deterministic initial states.""" + component_types = ("hmm", "hmm") + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3], [0.2, 0.8]]), + ) + normalizing_eigenvectors = ( + jnp.ones((1, 1), dtype=jnp.float32), + jnp.ones((2, 1), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = SequentialConditional( + control_maps=(None, jnp.array([0, 1], dtype=jnp.int32)), + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + ) + return FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + +@pytest.fixture +def multistate_chain_process(): + """Two-factor chain with multi-state beliefs for transition testing.""" + component_types = ("hmm", "hmm") + transition_matrices = ( + jnp.array( + [ + [ + [[0.9, 0.9], [0.1, 0.1]], + [[0.2, 0.2], [0.8, 0.8]], + ] + ], + dtype=jnp.float32, + ), + jnp.array( + [ + [ + [[0.6, 0.6], [0.4, 0.4]], + [[0.3, 0.3], [0.7, 0.7]], + ], + [ + [[0.5, 0.5], [0.5, 0.5]], + [[0.1, 0.1], [0.9, 0.9]], + ], + ], + dtype=jnp.float32, + ), + ) + normalizing_eigenvectors = ( + jnp.ones((1, 2), dtype=jnp.float32), + jnp.ones((2, 2), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([0.7, 0.3], dtype=jnp.float32), + jnp.array([0.4, 0.6], dtype=jnp.float32), + ) + structure = SequentialConditional( + control_maps=(None, jnp.array([0, 1], dtype=jnp.int32)), + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + ) + return FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + +def _build_fully_conditional_process(): + component_types = ("hmm", "hmm") + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4], [0.1, 0.9]]), + _tensor_from_probs([[0.7, 0.3], [0.2, 0.8]]), + ) + normalizing_eigenvectors = ( + jnp.ones((2, 1), dtype=jnp.float32), + jnp.ones((2, 1), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = FullyConditional( + control_maps=(jnp.array([0, 1], dtype=jnp.int32), jnp.array([1, 0], dtype=jnp.int32)), + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + ) + return FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + +def _build_transition_coupled_process(*, emission_control_maps, emission_variant_indices): + component_types = ("hmm", "hmm") + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4], [0.2, 0.8]]), + _tensor_from_probs([[0.5, 0.5], [0.1, 0.9]]), + ) + normalizing_eigenvectors = ( + jnp.ones((2, 1), dtype=jnp.float32), + jnp.ones((2, 1), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = ConditionalTransitions( + control_maps_transition=( + jnp.array([0, 0], dtype=jnp.int32), + jnp.array([1, 0], dtype=jnp.int32), + ), + emission_variant_indices=emission_variant_indices, + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + emission_control_maps=emission_control_maps, + ) + return FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + +def test_factored_process_observation_and_sequence_probability(simple_chain_process): + """Observation, log-observation, and sequence probability APIs should agree.""" + process = simple_chain_process + state = process.initial_state + + dist = process.observation_probability_distribution(state) + expected = jnp.array([0.42, 0.18, 0.08, 0.32], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected) + chex.assert_trees_all_close(jnp.sum(dist), jnp.array(1.0, dtype=jnp.float32)) + + log_state = tuple(jnp.log(s) for s in state) + log_dist = process.log_observation_probability_distribution(log_state) + chex.assert_trees_all_close(log_dist, jnp.log(dist)) + + observations = jnp.array([0, 3, 1], dtype=jnp.int32) + prob = process.probability(observations) + expected_prob = jnp.array(0.42 * 0.32 * 0.18, dtype=jnp.float32) + chex.assert_trees_all_close(prob, expected_prob) + + log_prob = process.log_probability(observations) + chex.assert_trees_all_close(log_prob, jnp.log(expected_prob)) + + +def test_transition_states_uses_structure_selected_variants(multistate_chain_process): + """transition_states should respect variant selection from the structure.""" + process = multistate_chain_process + state = process.initial_state + obs_token = jnp.array(2, dtype=jnp.int32) # (factor0=1, factor1=0) + + new_state = process.transition_states(state, obs_token) + obs_tuple = process.encoder.token_to_tuple(obs_token) + + expected0 = transition_with_obs( + process.component_types[0], + state[0], + process.transition_matrices[0][0], + obs_tuple[0], + None, + ) + context = process._make_context(state) + variant1 = int(process.structure.select_variants(obs_tuple, context)[1]) + expected1 = transition_with_obs( + process.component_types[1], + state[1], + process.transition_matrices[1][variant1], + obs_tuple[1], + None, + ) + + chex.assert_trees_all_close(new_state[0], expected0) + chex.assert_trees_all_close(new_state[1], expected1) + + +def test_emit_observation_respects_vocab_size(simple_chain_process): + """Samples from emit_observation should be in the encoded vocabulary.""" + token = simple_chain_process.emit_observation(simple_chain_process.initial_state, jax.random.PRNGKey(0)) + assert token.shape == () + assert 0 <= int(token) < simple_chain_process.vocab_size + + +def test_fully_conditional_process_observation_distribution(): + """Fully conditional topology should yield expected product-of-experts distribution.""" + process = _build_fully_conditional_process() + dist = process.observation_probability_distribution(process.initial_state) + expected = jnp.array([0.16, 0.10666667, 0.37333333, 0.36], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected) + + +def test_fully_conditional_transition_variants_follow_control_maps(): + """transition_states should honor fully-conditional variant selection.""" + component_types = ("hmm", "hmm") + transition_matrices = ( + jnp.array( + [ + [ + [[0.8, 0.2], [0.0, 0.0]], + [[0.2, 0.8], [0.0, 0.0]], + ], + [ + [[0.1, 0.9], [0.0, 0.0]], + [[0.9, 0.1], [0.0, 0.0]], + ], + ], + dtype=jnp.float32, + ), + jnp.array( + [ + [ + [[0.3, 0.7], [0.0, 0.0]], + [[0.6, 0.4], [0.0, 0.0]], + ], + [ + [[0.4, 0.6], [0.0, 0.0]], + [[0.7, 0.3], [0.0, 0.0]], + ], + ], + dtype=jnp.float32, + ), + ) + normalizing_eigenvectors = ( + jnp.ones((2, 2), dtype=jnp.float32), + jnp.ones((2, 2), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([0.6, 0.4], dtype=jnp.float32), + jnp.array([0.5, 0.5], dtype=jnp.float32), + ) + structure = FullyConditional( + control_maps=(jnp.array([0, 1], dtype=jnp.int32), jnp.array([1, 0], dtype=jnp.int32)), + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + ) + process = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + obs_tuple = (jnp.array(0, dtype=jnp.int32), jnp.array(1, dtype=jnp.int32)) + obs_token = process.encoder.tuple_to_token(obs_tuple) + context = process._make_context(initial_states) + variants = process.structure.select_variants(obs_tuple, context) + new_state = process.transition_states(initial_states, obs_token) + expected = ( + transition_with_obs("hmm", initial_states[0], transition_matrices[0][int(variants[0])], obs_tuple[0], None), + transition_with_obs("hmm", initial_states[1], transition_matrices[1][int(variants[1])], obs_tuple[1], None), + ) + chex.assert_trees_all_close(new_state[0], expected[0]) + chex.assert_trees_all_close(new_state[1], expected[1]) + + +def test_transition_coupled_independent_emissions_distribution(): + """ConditionalTransitions with fixed emission variants should factorize emissions.""" + process = _build_transition_coupled_process( + emission_control_maps=None, + emission_variant_indices=jnp.array([1, 0], dtype=jnp.int32), + ) + dist = process.observation_probability_distribution(process.initial_state) + expected = jnp.array([0.1, 0.1, 0.4, 0.4], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected) + + +def test_transition_coupled_sequential_emissions_distribution(): + """ConditionalTransitions with emission control maps should follow sequential chain.""" + process = _build_transition_coupled_process( + emission_control_maps=(None, jnp.array([1, 0], dtype=jnp.int32)), + emission_variant_indices=jnp.array([0, 0], dtype=jnp.int32), + ) + dist = process.observation_probability_distribution(process.initial_state) + expected = jnp.array([0.06, 0.54, 0.20, 0.20], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected) + + +def test_transition_coupled_transition_variants_follow_control_maps(): + """Transition variants should depend on other-factor tokens as configured.""" + component_types = ("hmm", "hmm") + transition_matrices = ( + jnp.array( + [ + [ + [[0.7, 0.3], [0.2, 0.8]], + [[0.5, 0.5], [0.4, 0.6]], + ], + [ + [[0.4, 0.6], [0.3, 0.7]], + [[0.2, 0.8], [0.6, 0.4]], + ], + ], + dtype=jnp.float32, + ), + jnp.array( + [ + [ + [[0.6, 0.4], [0.1, 0.9]], + [[0.3, 0.7], [0.8, 0.2]], + ], + [ + [[0.9, 0.1], [0.2, 0.8]], + [[0.5, 0.5], [0.7, 0.3]], + ], + ], + dtype=jnp.float32, + ), + ) + normalizing_eigenvectors = ( + jnp.ones((2, 2), dtype=jnp.float32), + jnp.ones((2, 2), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([0.8, 0.2], dtype=jnp.float32), + jnp.array([0.3, 0.7], dtype=jnp.float32), + ) + structure = ConditionalTransitions( + control_maps_transition=( + jnp.array([0, 1], dtype=jnp.int32), + jnp.array([1, 0], dtype=jnp.int32), + ), + emission_variant_indices=jnp.array([0, 1], dtype=jnp.int32), + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + ) + process = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + obs_tuple = (jnp.array(1, dtype=jnp.int32), jnp.array(0, dtype=jnp.int32)) + obs_token = process.encoder.tuple_to_token(obs_tuple) + context = process._make_context(initial_states) + variants = process.structure.select_variants(obs_tuple, context) + new_state = process.transition_states(initial_states, obs_token) + expected = ( + transition_with_obs("hmm", initial_states[0], transition_matrices[0][int(variants[0])], obs_tuple[0], None), + transition_with_obs("hmm", initial_states[1], transition_matrices[1][int(variants[1])], obs_tuple[1], None), + ) + chex.assert_trees_all_close(new_state[0], expected[0]) + chex.assert_trees_all_close(new_state[1], expected[1]) + + +def test_independent_structure_observation_distribution(): + """IndependentStructure should produce product of independent factor distributions.""" + component_types = ("hmm", "hmm") + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3]]), + ) + normalizing_eigenvectors = ( + jnp.ones((1, 1), dtype=jnp.float32), + jnp.ones((1, 1), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = IndependentStructure() + process = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + dist = process.observation_probability_distribution(process.initial_state) + expected = jnp.array([0.42, 0.18, 0.28, 0.12], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected) + + +def test_independent_structure_always_uses_variant_zero(): + """IndependentStructure should always select variant 0 for all factors.""" + component_types = ("hmm", "hmm") + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4], [0.1, 0.9]]), + _tensor_from_probs([[0.7, 0.3], [0.2, 0.8]]), + ) + normalizing_eigenvectors = ( + jnp.ones((2, 1), dtype=jnp.float32), + jnp.ones((2, 1), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = IndependentStructure() + process = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + obs_tuple = (jnp.array(0, dtype=jnp.int32), jnp.array(1, dtype=jnp.int32)) + obs_token = process.encoder.tuple_to_token(obs_tuple) + context = process._make_context(initial_states) + variants = process.structure.select_variants(obs_tuple, context) + chex.assert_trees_all_equal(variants, (jnp.array(0), jnp.array(0))) + new_state = process.transition_states(initial_states, obs_token) + expected = ( + transition_with_obs("hmm", initial_states[0], transition_matrices[0][0], obs_tuple[0], None), + transition_with_obs("hmm", initial_states[1], transition_matrices[1][0], obs_tuple[1], None), + ) + chex.assert_trees_all_close(new_state[0], expected[0]) + chex.assert_trees_all_close(new_state[1], expected[1]) + + +def test_independent_structure_get_required_params(): + """IndependentStructure should have no required params.""" + structure = IndependentStructure() + required_params = structure.get_required_params() + assert required_params == {} + + +def test_factored_process_rejects_empty_components(): + """FactoredGenerativeProcess should reject empty component lists.""" + with pytest.raises(ValueError, match="Must provide at least one component"): + FactoredGenerativeProcess( + component_types=(), + transition_matrices=(), + normalizing_eigenvectors=(), + initial_states=(), + structure=IndependentStructure(), + ) + + +def test_factored_process_validates_transition_matrix_ndim(): + """FactoredGenerativeProcess should reject non-4D transition matrices.""" + component_types = ("hmm",) + # Wrong shape: should be [K, V, S, S] but providing [V, S, S] + transition_matrices = (jnp.ones((2, 3, 3), dtype=jnp.float32),) + normalizing_eigenvectors = (jnp.ones((1, 3), dtype=jnp.float32),) + initial_states = (jnp.ones(3, dtype=jnp.float32),) + structure = IndependentStructure() + + with pytest.raises(ValueError, match=r"transition_matrices\[0\] must have shape \[K, V, S, S\]"): + FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + +def test_factored_process_validates_transition_matrix_square(): + """FactoredGenerativeProcess should reject non-square transition matrices.""" + component_types = ("hmm",) + # Wrong shape: last two dims should be equal + transition_matrices = (jnp.ones((1, 2, 3, 4), dtype=jnp.float32),) + normalizing_eigenvectors = (jnp.ones((1, 3), dtype=jnp.float32),) + initial_states = (jnp.ones(3, dtype=jnp.float32),) + structure = IndependentStructure() + + with pytest.raises(ValueError, match=r"transition_matrices\[0\] square mismatch"): + FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + +def test_factored_process_computes_vocab_sizes_from_transitions(): + """FactoredGenerativeProcess should infer vocab sizes from transition matrices.""" + component_types = ("hmm", "hmm") + # Factor 0 has vocab size 2, factor 1 has vocab size 3 + transition_matrices = ( + jnp.ones((1, 2, 4, 4), dtype=jnp.float32), # K=1, V=2, S=4 + jnp.ones((1, 3, 5, 5), dtype=jnp.float32), # K=1, V=3, S=5 + ) + normalizing_eigenvectors = ( + jnp.ones((1, 4), dtype=jnp.float32), + jnp.ones((1, 5), dtype=jnp.float32), + ) + initial_states = ( + jnp.ones(4, dtype=jnp.float32), + jnp.ones(5, dtype=jnp.float32), + ) + structure = IndependentStructure() + + process = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + assert process.vocab_size == 2 * 3 # Product of vocab sizes + chex.assert_trees_all_equal(process.encoder.vocab_sizes, jnp.array([2, 3])) + + +def test_factored_process_computes_num_variants_from_transitions(): + """FactoredGenerativeProcess should infer num_variants from transition matrices.""" + component_types = ("hmm", "hmm") + # Factor 0 has 2 variants, factor 1 has 3 variants + transition_matrices = ( + jnp.ones((2, 2, 4, 4), dtype=jnp.float32), # K=2 + jnp.ones((3, 2, 5, 5), dtype=jnp.float32), # K=3 + ) + normalizing_eigenvectors = ( + jnp.ones((2, 4), dtype=jnp.float32), + jnp.ones((3, 5), dtype=jnp.float32), + ) + initial_states = ( + jnp.ones(4, dtype=jnp.float32), + jnp.ones(5, dtype=jnp.float32), + ) + structure = IndependentStructure() + + process = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + assert process.num_variants == (2, 3) + + +def test_factored_process_ghmm_component_uses_normalizing_eigenvector(): + """GHMM components should use normalizing eigenvectors in transitions.""" + component_types = ("ghmm", "hmm") + # Simple transition matrix and eigenvector for GHMM + transition_matrices = ( + jnp.array([[[[0.8, 0.2], [0.3, 0.7]]]], dtype=jnp.float32), # GHMM with K=1, V=1, S=2 + _tensor_from_probs([[0.6, 0.4]]), # HMM + ) + normalizing_eigenvectors = ( + jnp.array([[0.5, 0.5]], dtype=jnp.float32), # For GHMM + jnp.ones((1, 1), dtype=jnp.float32), # For HMM (not used) + ) + initial_states = ( + jnp.array([0.6, 0.4], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = SequentialConditional( + control_maps=(None, jnp.array([0], dtype=jnp.int32)), + vocab_sizes=jnp.array([1, 2], dtype=jnp.int32), + ) + + process = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + # Verify process initializes correctly with GHMM + assert process.component_types[0] == "ghmm" + assert process.component_types[1] == "hmm" + + # Test that transition works (it should use the eigenvector for GHMM) + obs_token = jnp.array(0, dtype=jnp.int32) + new_state = process.transition_states(initial_states, obs_token) + + # Just verify it runs without error and produces valid states + assert new_state[0].shape == initial_states[0].shape + assert new_state[1].shape == initial_states[1].shape + + +def test_factored_process_device_placement(): + """FactoredGenerativeProcess should respect device placement.""" + component_types = ("hmm",) + transition_matrices = (jnp.ones((1, 2, 3, 3), dtype=jnp.float32),) + normalizing_eigenvectors = (jnp.ones((1, 3), dtype=jnp.float32),) + initial_states = (jnp.ones(3, dtype=jnp.float32),) + structure = IndependentStructure() + + # Should work with explicit device specification + process = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + device="cpu", + ) + + # Device should be set + assert process.device is not None + # Arrays should be on the specified device + assert all(tm.device == process.device for tm in process.transition_matrices) + assert all(ev.device == process.device for ev in process.normalizing_eigenvectors) + assert all(s.device == process.device for s in process.initial_states) diff --git a/tests/generative_processes/test_factored_structures.py b/tests/generative_processes/test_factored_structures.py new file mode 100644 index 00000000..e45af442 --- /dev/null +++ b/tests/generative_processes/test_factored_structures.py @@ -0,0 +1,354 @@ +"""Tests for factored generative process conditional structures.""" + +import chex +import jax +import jax.numpy as jnp + +from simplexity.generative_processes.structures import ( + ConditionalTransitions, + FullyConditional, + IndependentStructure, + SequentialConditional, +) +from simplexity.generative_processes.structures.protocol import ConditionalContext +from simplexity.utils.factoring_utils import ComponentType + + +def _tensor_from_probs(variant_probs): + """Convert per-variant emission probabilities into transition tensors.""" + arr = jnp.asarray(variant_probs, dtype=jnp.float32) + return arr[..., None, None] + + +def _make_context(states, transition_matrices): + """Helper building a ConditionalContext for HMM components.""" + component_types: tuple[ComponentType, ...] = tuple("hmm" for _ in states) + normalizing_eigenvectors = tuple( + jnp.ones((tm.shape[0], tm.shape[-1]), dtype=jnp.float32) for tm in transition_matrices + ) + vocab_sizes = jnp.array([tm.shape[1] for tm in transition_matrices]) + num_variants = tuple(int(tm.shape[0]) for tm in transition_matrices) + return ConditionalContext( + states=states, + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + vocab_sizes=vocab_sizes, + num_variants=num_variants, + ) + + +def test_sequential_conditional_joint_distribution_and_variants(): + """SequentialConditional should respect the chain factorization.""" + states = (jnp.array([1.0], dtype=jnp.float32), jnp.array([1.0], dtype=jnp.float32)) + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3], [0.2, 0.8]]), + ) + context = _make_context(states, transition_matrices) + structure = SequentialConditional( + control_maps=(None, jnp.array([0, 1], dtype=jnp.int32)), vocab_sizes=context.vocab_sizes + ) + + dist = structure.compute_joint_distribution(context) + expected = jnp.array([0.42, 0.18, 0.08, 0.32], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected) + + variants = structure.select_variants( + (jnp.array(1, dtype=jnp.int32), jnp.array(0, dtype=jnp.int32)), + context, + ) + chex.assert_trees_all_close(variants[0], jnp.array(0, dtype=jnp.int32)) + chex.assert_trees_all_close(variants[1], jnp.array(1, dtype=jnp.int32)) + + +def test_fully_conditional_product_of_experts(): + """FullyConditional should build a normalized product-of-experts distribution.""" + states = (jnp.array([1.0], dtype=jnp.float32), jnp.array([1.0], dtype=jnp.float32)) + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4], [0.1, 0.9]]), + _tensor_from_probs([[0.7, 0.3], [0.2, 0.8]]), + ) + context = _make_context(states, transition_matrices) + structure = FullyConditional( + control_maps=(jnp.array([0, 1], dtype=jnp.int32), jnp.array([1, 0], dtype=jnp.int32)), + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + ) + + dist = structure.compute_joint_distribution(context) + expected = jnp.array([0.16, 0.10666667, 0.37333333, 0.36], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected) + + variants = structure.select_variants( + (jnp.array(0, dtype=jnp.int32), jnp.array(1, dtype=jnp.int32)), + context, + ) + chex.assert_trees_all_close(variants[0], jnp.array(1, dtype=jnp.int32)) + chex.assert_trees_all_close(variants[1], jnp.array(1, dtype=jnp.int32)) + + +def test_conditional_transitions_with_independent_emissions(): + """ConditionalTransitions should reduce to independent emissions when no chain is given.""" + states = (jnp.array([1.0], dtype=jnp.float32), jnp.array([1.0], dtype=jnp.float32)) + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4], [0.2, 0.8]]), + _tensor_from_probs([[0.5, 0.5], [0.1, 0.9]]), + ) + context = _make_context(states, transition_matrices) + structure = ConditionalTransitions( + control_maps_transition=( + jnp.array([1, 0], dtype=jnp.int32), + jnp.array([0, 1], dtype=jnp.int32), + ), + emission_variant_indices=jnp.array([1, 0], dtype=jnp.int32), + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + ) + + dist = structure.compute_joint_distribution(context) + expected = jnp.array([0.1, 0.1, 0.4, 0.4], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected) + + variants = structure.select_variants( + (jnp.array(1, dtype=jnp.int32), jnp.array(0, dtype=jnp.int32)), + context, + ) + chex.assert_trees_all_close(variants[0], jnp.array(1, dtype=jnp.int32)) + chex.assert_trees_all_close(variants[1], jnp.array(1, dtype=jnp.int32)) + + +def test_conditional_transitions_with_sequential_emissions(): + """ConditionalTransitions should honor sequential emission control maps.""" + states = (jnp.array([1.0], dtype=jnp.float32), jnp.array([1.0], dtype=jnp.float32)) + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4], [0.2, 0.8]]), + _tensor_from_probs([[0.9, 0.1], [0.3, 0.7]]), + ) + context = _make_context(states, transition_matrices) + structure = ConditionalTransitions( + control_maps_transition=( + jnp.array([0, 0], dtype=jnp.int32), + jnp.array([0, 0], dtype=jnp.int32), + ), + emission_variant_indices=jnp.array([0, 0], dtype=jnp.int32), + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + emission_control_maps=(None, jnp.array([1, 0], dtype=jnp.int32)), + ) + + assert structure.use_emission_chain is True + + dist = structure.compute_joint_distribution(context) + expected = jnp.array([0.18, 0.42, 0.36, 0.04], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected) + + +def test_independent_structure_joint_distribution(): + """IndependentStructure should compute product of independent marginals.""" + states = (jnp.array([1.0], dtype=jnp.float32), jnp.array([1.0], dtype=jnp.float32)) + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3]]), + ) + context = _make_context(states, transition_matrices) + structure = IndependentStructure() + + dist = structure.compute_joint_distribution(context) + expected = jnp.array([0.42, 0.18, 0.28, 0.12], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected) + + +def test_independent_structure_select_variants_always_zero(): + """IndependentStructure should always select variant 0 for all factors.""" + states = (jnp.array([1.0], dtype=jnp.float32), jnp.array([1.0], dtype=jnp.float32)) + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4], [0.1, 0.9]]), + _tensor_from_probs([[0.7, 0.3], [0.2, 0.8]]), + ) + context = _make_context(states, transition_matrices) + structure = IndependentStructure() + + variants = structure.select_variants( + (jnp.array(1, dtype=jnp.int32), jnp.array(0, dtype=jnp.int32)), + context, + ) + chex.assert_trees_all_close(variants[0], jnp.array(0, dtype=jnp.int32)) + chex.assert_trees_all_close(variants[1], jnp.array(0, dtype=jnp.int32)) + + +def test_independent_structure_get_required_params(): + """IndependentStructure should have no required params.""" + structure = IndependentStructure() + required_params = structure.get_required_params() + assert required_params == {} + + +def test_independent_structure_with_three_factors(): + """IndependentStructure should handle three or more factors.""" + states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + transition_matrices = ( + _tensor_from_probs([[0.5, 0.5]]), + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3]]), + ) + context = _make_context(states, transition_matrices) + structure = IndependentStructure() + + dist = structure.compute_joint_distribution(context) + # Joint = P(t0) * P(t1) * P(t2) + # For (t0, t1, t2): 0.5 * 0.6 * 0.7 = 0.21 (for 000), etc. + expected = jnp.array([0.21, 0.09, 0.14, 0.06, 0.21, 0.09, 0.14, 0.06], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected, atol=1e-6) + + +def test_sequential_conditional_with_three_factors(): + """SequentialConditional should handle chains with three or more factors.""" + states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.8, 0.2], [0.3, 0.7]]), + _tensor_from_probs([[0.9, 0.1], [0.5, 0.5]]), + ) + context = _make_context(states, transition_matrices) + structure = SequentialConditional( + control_maps=( + None, + jnp.array([0, 1], dtype=jnp.int32), + jnp.array([1, 0], dtype=jnp.int32), + ), + vocab_sizes=context.vocab_sizes, + ) + + dist = structure.compute_joint_distribution(context) + assert dist.shape == (8,) + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) + + +def test_sequential_conditional_without_vocab_sizes(): + """SequentialConditional requires vocab_sizes to be provided.""" + states = (jnp.array([1.0], dtype=jnp.float32), jnp.array([1.0], dtype=jnp.float32)) + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3], [0.2, 0.8]]), + ) + context = _make_context(states, transition_matrices) + # vocab_sizes must be provided + structure = SequentialConditional( + control_maps=(None, jnp.array([0, 1], dtype=jnp.int32)), vocab_sizes=context.vocab_sizes + ) + + dist = structure.compute_joint_distribution(context) + expected = jnp.array([0.42, 0.18, 0.08, 0.32], dtype=jnp.float32) + chex.assert_trees_all_close(dist, expected) + + +def test_sequential_conditional_get_required_params(): + """SequentialConditional should return required params.""" + structure = SequentialConditional(control_maps=(None,), vocab_sizes=jnp.array([2])) + required_params = structure.get_required_params() + assert required_params == {"control_maps": tuple} + + +def test_fully_conditional_get_required_params(): + """FullyConditional should return required params.""" + structure = FullyConditional(control_maps=(jnp.array([0], dtype=jnp.int32),), vocab_sizes=jnp.array([2])) + required_params = structure.get_required_params() + assert required_params == {"control_maps": tuple, "vocab_sizes": jax.Array} + + +def test_fully_conditional_with_zero_normalization(): + """FullyConditional should handle zero normalization edge case.""" + states = (jnp.array([1.0], dtype=jnp.float32), jnp.array([1.0], dtype=jnp.float32)) + # Create distributions that will multiply to zero + transition_matrices = ( + _tensor_from_probs([[0.0, 0.0], [1.0, 0.0]]), + _tensor_from_probs([[1.0, 0.0], [0.0, 1.0]]), + ) + context = _make_context(states, transition_matrices) + structure = FullyConditional( + control_maps=(jnp.array([0, 1], dtype=jnp.int32), jnp.array([1, 0], dtype=jnp.int32)), + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + ) + + dist = structure.compute_joint_distribution(context) + # Should fall back to uniform distribution when Z=0 + assert dist.shape == (4,) + assert jnp.all(dist >= 0.0) + + +def test_conditional_transitions_get_required_params(): + """ConditionalTransitions should return required params.""" + structure = ConditionalTransitions( + control_maps_transition=(jnp.array([0], dtype=jnp.int32),), + emission_variant_indices=jnp.array([0]), + vocab_sizes=jnp.array([2]), + ) + required_params = structure.get_required_params() + assert "control_maps_transition" in required_params + assert "emission_variant_indices" in required_params + assert "vocab_sizes" in required_params + + +def test_conditional_transitions_with_none_emission_maps(): + """ConditionalTransitions should handle None emission_control_maps.""" + states = (jnp.array([1.0], dtype=jnp.float32), jnp.array([1.0], dtype=jnp.float32)) + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4], [0.2, 0.8]]), + _tensor_from_probs([[0.5, 0.5], [0.1, 0.9]]), + ) + context = _make_context(states, transition_matrices) + # Don't pass emission_control_maps at all + structure = ConditionalTransitions( + control_maps_transition=( + jnp.array([1, 0], dtype=jnp.int32), + jnp.array([0, 1], dtype=jnp.int32), + ), + emission_variant_indices=jnp.array([0, 1], dtype=jnp.int32), + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + ) + + assert structure.use_emission_chain is False + dist = structure.compute_joint_distribution(context) + assert dist.shape == (4,) + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) + + +def test_conditional_transitions_sequential_with_none_in_chain(): + """ConditionalTransitions should handle None entries in sequential emission maps.""" + states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.8, 0.2], [0.3, 0.7]]), + _tensor_from_probs([[0.9, 0.1], [0.5, 0.5], [0.7, 0.3], [0.4, 0.6]]), + ) + context = _make_context(states, transition_matrices) + # Sequential emissions: factor 0 fixed, factor 1 uses map, factor 2 uses different map + structure = ConditionalTransitions( + control_maps_transition=( + jnp.array([0, 0, 0], dtype=jnp.int32), + jnp.array([0, 0, 0], dtype=jnp.int32), + jnp.array([0, 0, 0], dtype=jnp.int32), + ), + emission_variant_indices=jnp.array([0, 0, 0], dtype=jnp.int32), + vocab_sizes=jnp.array([2, 2, 2], dtype=jnp.int32), + emission_control_maps=( + None, + None, + jnp.array([0, 1, 2, 3], dtype=jnp.int32), + ), + ) + + assert structure.use_emission_chain is True + dist = structure.compute_joint_distribution(context) + assert dist.shape == (8,) + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) diff --git a/tests/generative_processes/test_generator.py b/tests/generative_processes/test_generator.py index acdf92ce..c83c17c6 100644 --- a/tests/generative_processes/test_generator.py +++ b/tests/generative_processes/test_generator.py @@ -103,14 +103,24 @@ def test_generate_data_batch_with_full_history(): gen_state: jax.Array = hmm.initial_state states = jnp.repeat(gen_state[None, :], batch_size, axis=0) key = jax.random.PRNGKey(0) - next_states, belief_states, prefix_probs, inputs, labels = generate_data_batch_with_full_history( + result = generate_data_batch_with_full_history( states, hmm, batch_size, sequence_len, key, ) + # Extract and type-check all fields + belief_states = result["belief_states"] + prefix_probs = result["prefix_probabilities"] + inputs = result["inputs"] + labels = result["labels"] + + assert isinstance(belief_states, jax.Array) + assert isinstance(prefix_probs, jax.Array) + assert isinstance(inputs, jax.Array) + assert isinstance(labels, jax.Array) + assert belief_states.shape == (batch_size, sequence_len, gen_state.shape[0]) assert prefix_probs.shape == (batch_size, inputs.shape[1]) - assert next_states.shape == (batch_size, gen_state.shape[0]) assert labels.shape == inputs.shape diff --git a/tests/generative_processes/test_torch_generator.py b/tests/generative_processes/test_torch_generator.py index 7052e3c4..b532c5cc 100644 --- a/tests/generative_processes/test_torch_generator.py +++ b/tests/generative_processes/test_torch_generator.py @@ -112,13 +112,21 @@ def test_generate_data_batch_with_full_history(): gen_state: jax.Array = hmm.initial_state states = jnp.repeat(gen_state[None, :], batch_size, axis=0) key = jax.random.PRNGKey(123) - next_states, belief_states, prefix_probs, inputs, _ = generate_data_batch_with_full_history( + result = generate_data_batch_with_full_history( states, hmm, batch_size, sequence_len, key, ) + # Extract and type-check all fields + belief_states = result["belief_states"] + prefix_probs = result["prefix_probabilities"] + inputs = result["inputs"] + + assert isinstance(belief_states, jax.Array) + assert isinstance(prefix_probs, jax.Array) + assert isinstance(inputs, torch.Tensor) + assert belief_states.shape == (batch_size, sequence_len, gen_state.shape[0]) assert prefix_probs.shape == (batch_size, inputs.shape[1]) - assert next_states.shape == (batch_size, gen_state.shape[0]) diff --git a/tests/generative_processes/test_transition_matrices.py b/tests/generative_processes/test_transition_matrices.py index 2d6f0960..e319834e 100644 --- a/tests/generative_processes/test_transition_matrices.py +++ b/tests/generative_processes/test_transition_matrices.py @@ -50,7 +50,7 @@ def validate_ghmm_transition_matrices(transition_matrices: jax.Array, ergodic: b def validate_hmm_transition_matrices( - transition_matrices: jnp.ndarray, ergodic: bool = True, rtol: float = 1e-6, atol: float = 0 + transition_matrices: jax.Array, ergodic: bool = True, rtol: float = 1e-6, atol: float = 0 ): """Test the validate_hmm_transition_matrices function.""" validate_ghmm_transition_matrices(transition_matrices, ergodic) diff --git a/tests/structured_configs/test_generative_process_config.py b/tests/structured_configs/test_generative_process_config.py index 898f3df0..fdcda804 100644 --- a/tests/structured_configs/test_generative_process_config.py +++ b/tests/structured_configs/test_generative_process_config.py @@ -855,3 +855,177 @@ def test_resolve_generative_process_config_with_invalid_values(self): match=re.escape("GenerativeProcessConfig.vocab_size (4) must be equal to 3"), ): resolve_generative_process_config(cfg, base_vocab_size=3) + + +class TestFactoredProcessBuilders: + """Tests for factored generative process builder configs.""" + + def test_factored_process_from_spec_config_detection(self) -> None: + """Test build_factored_process_from_spec config detection (unified builder).""" + target = "simplexity.generative_processes.builder.build_factored_process_from_spec" + + # Test independent + cfg = DictConfig( + { + "_target_": target, + "structure_type": "independent", + "spec": [ + { + "component_type": "hmm", + "variants": [{"process_name": "mess3", "x": 0.15, "a": 0.6}], + } + ], + } + ) + assert is_generative_process_target(target) + assert is_generative_process_config(cfg) + + # Test chain + cfg = DictConfig( + { + "_target_": target, + "structure_type": "chain", + "spec": [ + { + "component_type": "hmm", + "variants": [{"process_name": "mess3", "x": 0.15, "a": 0.6}], + } + ], + } + ) + assert is_generative_process_target(target) + assert is_generative_process_config(cfg) + + # Test symmetric + cfg = DictConfig( + { + "_target_": target, + "structure_type": "symmetric", + "spec": [ + { + "component_type": "hmm", + "variants": [{"process_name": "mess3", "x": 0.15, "a": 0.6}], + } + ], + "control_maps": [[0]], + } + ) + assert is_generative_process_target(target) + assert is_generative_process_config(cfg) + + # Test transition_coupled + cfg = DictConfig( + { + "_target_": target, + "structure_type": "transition_coupled", + "spec": [ + { + "component_type": "hmm", + "variants": [{"process_name": "mess3", "x": 0.15, "a": 0.6}], + } + ], + "control_maps_transition": [[0]], + "emission_variant_indices": [0], + } + ) + assert is_generative_process_target(target) + assert is_generative_process_config(cfg) + + def _make_factored_process_cfg( + self, structure_type: str, extra_instance_fields: dict[str, Any] | None = None + ) -> DictConfig: + """Helper to create a factored process config for testing.""" + spec = [ + { + "component_type": "hmm", + "variants": [{"process_name": "mess3", "x": 0.15, "a": 0.6}], + } + ] + instance_data: dict[str, Any] = { + "_target_": "simplexity.generative_processes.builder.build_factored_process_from_spec", + "structure_type": structure_type, + "spec": spec, + } + if extra_instance_fields: + instance_data.update(extra_instance_fields) + + cfg = DictConfig( + { + "instance": DictConfig(instance_data), + "base_vocab_size": MISSING, + "vocab_size": MISSING, + } + ) + return _with_missing_tokens(cfg) + + @pytest.mark.parametrize( + ("structure_type", "extra_instance_fields"), + [ + ("independent", None), + ("chain", None), + ("symmetric", {"control_maps": [[0]]}), + ("transition_coupled", {"control_maps_transition": [[0]], "emission_variant_indices": [0]}), + ], + ) + def test_validate_generative_process_config_handles_factored_process_builders( + self, structure_type: str, extra_instance_fields: dict[str, Any] | None + ) -> None: + """Test validate_generative_process_config works with unified factored process builder.""" + cfg = self._make_factored_process_cfg(structure_type, extra_instance_fields) + validate_generative_process_config(cfg) + + @pytest.mark.parametrize( + ("structure_type", "extra_instance_fields", "expected_error_pattern"), + [ + # Invalid structure_type values - these pass config validation (target is still valid) + # but would fail when the builder is called + ( + "invalid_structure_type", + None, + None, # Config validation passes, builder will raise ValueError + ), + ( + "", + None, + None, # Empty structure_type passes config validation + ), + # Missing required fields - these pass config validation + # (structure-specific fields are validated by the builder, not config validator) + ( + "symmetric", + None, + None, # Missing control_maps - builder will raise ValueError + ), + ( + "chain", + None, + None, # Missing control_maps - builder will raise ValueError + ), + ( + "transition_coupled", + None, + None, # Missing control_maps_transition - builder will raise ValueError + ), + ( + "transition_coupled", + {"control_maps_transition": [[0]]}, + None, # Missing emission_variant_indices - builder will raise ValueError + ), + ], + ) + def test_validate_generative_process_config_invalid_factored_process_configs( + self, + structure_type: str, + extra_instance_fields: dict[str, Any] | None, + expected_error_pattern: str | None, + ) -> None: + """Test validate_generative_process_config with invalid factored process configs. + + Note: Config validation doesn't check structure-specific required fields (like control_maps + for symmetric/chain or control_maps_transition/emission_variant_indices for transition_coupled) + or invalid structure_type values. These are validated by the builder when the process is + actually constructed. This test documents that config validation passes for these cases. + """ + cfg = self._make_factored_process_cfg(structure_type, extra_instance_fields) + # All these cases pass config validation (builder will validate later) + validate_generative_process_config(cfg) diff --git a/tests/structured_configs/test_predictive_model_config.py b/tests/structured_configs/test_predictive_model_config.py index 95365ee1..ffa6c5e4 100644 --- a/tests/structured_configs/test_predictive_model_config.py +++ b/tests/structured_configs/test_predictive_model_config.py @@ -1,5 +1,14 @@ """Tests for predictive-model structured configs.""" +# pylint: disable=all +# Temporarily disable all pylint checkers during AST traversal to prevent crash. +# The imports checker crashes when resolving simplexity package imports due to a bug +# in pylint/astroid: https://github.com/pylint-dev/pylint/issues/10185 +# pylint: enable=all +# Re-enable all pylint checkers for the checking phase. This allows other checks +# (code quality, style, undefined names, etc.) to run normally while bypassing +# the problematic imports checker that would crash during AST traversal. + import re from unittest.mock import call, patch diff --git a/tests/utils/test_analysis_utils.py b/tests/utils/test_analysis_utils.py index 906b4f8e..0320c307 100644 --- a/tests/utils/test_analysis_utils.py +++ b/tests/utils/test_analysis_utils.py @@ -1,5 +1,6 @@ """Tests for analysis utilities.""" +import jax import jax.numpy as jnp import pytest @@ -327,6 +328,8 @@ def test_basic_functionality(self, simple_inputs, simple_beliefs, simple_probs, assert jnp.allclose(jnp.sum(dataset.probs), 1.0) # Check shapes are consistent + + assert isinstance(dataset.beliefs, jax.Array) n_prefixes = dataset.beliefs.shape[0] assert dataset.probs.shape[0] == n_prefixes for layer_acts in dataset.activations_by_layer.values(): @@ -347,6 +350,7 @@ def test_belief_dimensions(self, simple_inputs, simple_beliefs, simple_probs, si dataset = build_prefix_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) # Beliefs should have 2 dimensions (from fixture) + assert isinstance(dataset.beliefs, jax.Array) assert dataset.beliefs.shape[1] == 2 @@ -373,6 +377,7 @@ def test_deduplication(self, simple_inputs, simple_beliefs, simple_probs, simple dataset = build_last_token_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) # Should have 2 unique sequences + assert isinstance(dataset.beliefs, jax.Array) assert dataset.beliefs.shape[0] == 2 assert dataset.probs.shape[0] == 2 @@ -381,6 +386,7 @@ def test_preserves_dimensions(self, simple_inputs, simple_beliefs, simple_probs, dataset = build_last_token_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) # Check belief dimension + assert isinstance(dataset.beliefs, jax.Array) assert dataset.beliefs.shape[1] == 2 # Check layer dimensions diff --git a/tests/utils/test_factoring_utils.py b/tests/utils/test_factoring_utils.py new file mode 100644 index 00000000..f9fb05b6 --- /dev/null +++ b/tests/utils/test_factoring_utils.py @@ -0,0 +1,111 @@ +"""Tests for factoring utilities.""" + +import chex +import jax.numpy as jnp +import pytest + +from simplexity.utils.factoring_utils import ( + TokenEncoder, + compute_obs_dist_for_variant, + transition_with_obs, +) + + +def test_compute_obs_dist_for_variant_ghmm_missing_eigenvector(): + """GHMM without normalizing eigenvector should raise ValueError.""" + state = jnp.array([0.5, 0.5]) + transition_matrix = jnp.zeros((2, 2, 2)) + + with pytest.raises(ValueError, match="GHMM requires normalizing_eigenvector"): + compute_obs_dist_for_variant("ghmm", state, transition_matrix, normalizing_eigenvector=None) + + +def test_transition_with_obs_ghmm_missing_eigenvector(): + """GHMM transition without normalizing eigenvector should raise ValueError.""" + state = jnp.array([0.5, 0.5]) + transition_matrix = jnp.eye(2)[None, :, :] # Shape: [V=1, S=2, S=2] + obs = jnp.array(0) + + with pytest.raises(ValueError, match="GHMM requires normalizing_eigenvector"): + transition_with_obs("ghmm", state, transition_matrix, obs, normalizing_eigenvector=None) + + +def test_token_encoder_extract_factors_vectorized(): + """TokenEncoder should handle batch decoding.""" + vocab_sizes = jnp.array([2, 3, 4]) + encoder = TokenEncoder(vocab_sizes) + + # Encode multiple tokens + tokens = jnp.array([0, 5, 10, 23]) # Multiple composite tokens + factors = encoder.extract_factors_vectorized(tokens) + + assert factors.shape == (4, 3) # (batch, num_factors) + # Verify each decoding is correct + for i, token in enumerate(tokens): + expected_tuple = encoder.token_to_tuple(token) + expected_array = jnp.array([t.item() for t in expected_tuple]) + chex.assert_trees_all_close(factors[i], expected_array) + + +def test_compute_obs_dist_for_variant_hmm(): + """HMM observation distribution should work without normalizing eigenvector.""" + state = jnp.array([0.6, 0.4]) + # Transition matrix: [V=2, S=2, S=2] + transition_matrix = jnp.array( + [ + [[0.8, 0.2], [0.3, 0.7]], # For obs=0 + [[0.1, 0.9], [0.4, 0.6]], # For obs=1 + ] + ) + + dist = compute_obs_dist_for_variant("hmm", state, transition_matrix, normalizing_eigenvector=None) + + assert dist.shape == (2,) # V=2 + # P(obs=0) = state @ transition_matrix[0] @ 1 = [0.6, 0.4] @ [[0.8, 0.2], [0.3, 0.7]] @ [1, 1] + expected_0 = jnp.sum(state @ transition_matrix[0]) + expected_1 = jnp.sum(state @ transition_matrix[1]) + chex.assert_trees_all_close(dist, jnp.array([expected_0, expected_1])) + + +def test_transition_with_obs_hmm(): + """HMM transition should work without normalizing eigenvector.""" + state = jnp.array([0.6, 0.4]) + transition_matrix = jnp.array( + [ + [[0.8, 0.2], [0.3, 0.7]], # For obs=0 + [[0.1, 0.9], [0.4, 0.6]], # For obs=1 + ] + ) + obs = jnp.array(0) + + new_state = transition_with_obs("hmm", state, transition_matrix, obs, normalizing_eigenvector=None) + + assert new_state.shape == (2,) + # New state should be state @ transition_matrix[0] normalized + unnormalized = state @ transition_matrix[0] + expected = unnormalized / jnp.sum(unnormalized) + chex.assert_trees_all_close(new_state, expected) + + +def test_token_encoder_tuple_to_token(): + """TokenEncoder should encode tuples to composite tokens.""" + vocab_sizes = jnp.array([2, 3, 4]) + encoder = TokenEncoder(vocab_sizes) + + # Test a few tuples + tuple0 = (jnp.array(0), jnp.array(0), jnp.array(0)) + tuple1 = (jnp.array(1), jnp.array(2), jnp.array(3)) + + token0 = encoder.tuple_to_token(tuple0) + token1 = encoder.tuple_to_token(tuple1) + + assert token0 == 0 # (0, 0, 0) -> 0 + # (1, 2, 3) -> 1 + 2*2 + 3*2*3 = 1 + 4 + 18 = 23 + assert token1 == 23 + + # Verify roundtrip + decoded0 = encoder.token_to_tuple(token0) + decoded1 = encoder.token_to_tuple(token1) + + chex.assert_trees_all_close(decoded0, tuple0) + chex.assert_trees_all_close(decoded1, tuple1) diff --git a/uv.lock b/uv.lock index ab23002c..a98d77e4 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,4 @@ version = 1 -revision = 3 requires-python = ">=3.12" resolution-markers = [ "python_full_version >= '3.13' and sys_platform == 'linux'", @@ -12,9 +11,9 @@ resolution-markers = [ name = "absl-py" version = "2.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/10/2a/c93173ffa1b39c1d0395b7e842bbdc62e556ca9d8d3b5572926f3e4ca752/absl_py-2.3.1.tar.gz", hash = "sha256:a97820526f7fbfd2ec1bce83f3f25e3a14840dac0d8e02a0b71cd75db3f77fc9", size = 116588, upload-time = "2025-07-03T09:31:44.05Z" } +sdist = { url = "https://files.pythonhosted.org/packages/10/2a/c93173ffa1b39c1d0395b7e842bbdc62e556ca9d8d3b5572926f3e4ca752/absl_py-2.3.1.tar.gz", hash = "sha256:a97820526f7fbfd2ec1bce83f3f25e3a14840dac0d8e02a0b71cd75db3f77fc9", size = 116588 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/aa/ba0014cc4659328dc818a28827be78e6d97312ab0cb98105a770924dc11e/absl_py-2.3.1-py3-none-any.whl", hash = "sha256:eeecf07f0c2a93ace0772c92e596ace6d3d3996c042b2128459aaae2a76de11d", size = 135811, upload-time = "2025-07-03T09:31:42.253Z" }, + { url = "https://files.pythonhosted.org/packages/8f/aa/ba0014cc4659328dc818a28827be78e6d97312ab0cb98105a770924dc11e/absl_py-2.3.1-py3-none-any.whl", hash = "sha256:eeecf07f0c2a93ace0772c92e596ace6d3d3996c042b2128459aaae2a76de11d", size = 135811 }, ] [[package]] @@ -30,27 +29,27 @@ dependencies = [ { name = "safetensors" }, { name = "torch" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4a/8e/ac2a9566747a93f8be36ee08532eb0160558b07630a081a6056a9f89bf1d/accelerate-1.12.0.tar.gz", hash = "sha256:70988c352feb481887077d2ab845125024b2a137a5090d6d7a32b57d03a45df6", size = 398399, upload-time = "2025-11-21T11:27:46.973Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/8e/ac2a9566747a93f8be36ee08532eb0160558b07630a081a6056a9f89bf1d/accelerate-1.12.0.tar.gz", hash = "sha256:70988c352feb481887077d2ab845125024b2a137a5090d6d7a32b57d03a45df6", size = 398399 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/d2/c581486aa6c4fbd7394c23c47b83fa1a919d34194e16944241daf9e762dd/accelerate-1.12.0-py3-none-any.whl", hash = "sha256:3e2091cd341423207e2f084a6654b1efcd250dc326f2a37d6dde446e07cabb11", size = 380935, upload-time = "2025-11-21T11:27:44.522Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d2/c581486aa6c4fbd7394c23c47b83fa1a919d34194e16944241daf9e762dd/accelerate-1.12.0-py3-none-any.whl", hash = "sha256:3e2091cd341423207e2f084a6654b1efcd250dc326f2a37d6dde446e07cabb11", size = 380935 }, ] [[package]] name = "aiofiles" version = "25.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354, upload-time = "2025-10-09T20:51:04.358Z" } +sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" }, + { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668 }, ] [[package]] name = "aiohappyeyeballs" version = "2.6.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265 }, ] [[package]] @@ -66,76 +65,76 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size = 7837994, upload-time = "2025-10-28T20:59:39.937Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/29/9b/01f00e9856d0a73260e86dd8ed0c2234a466c5c1712ce1c281548df39777/aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b", size = 737623, upload-time = "2025-10-28T20:56:30.797Z" }, - { url = "https://files.pythonhosted.org/packages/5a/1b/4be39c445e2b2bd0aab4ba736deb649fabf14f6757f405f0c9685019b9e9/aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc", size = 492664, upload-time = "2025-10-28T20:56:32.708Z" }, - { url = "https://files.pythonhosted.org/packages/28/66/d35dcfea8050e131cdd731dff36434390479b4045a8d0b9d7111b0a968f1/aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7", size = 491808, upload-time = "2025-10-28T20:56:34.57Z" }, - { url = "https://files.pythonhosted.org/packages/00/29/8e4609b93e10a853b65f8291e64985de66d4f5848c5637cddc70e98f01f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb", size = 1738863, upload-time = "2025-10-28T20:56:36.377Z" }, - { url = "https://files.pythonhosted.org/packages/9d/fa/4ebdf4adcc0def75ced1a0d2d227577cd7b1b85beb7edad85fcc87693c75/aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3", size = 1700586, upload-time = "2025-10-28T20:56:38.034Z" }, - { url = "https://files.pythonhosted.org/packages/da/04/73f5f02ff348a3558763ff6abe99c223381b0bace05cd4530a0258e52597/aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f", size = 1768625, upload-time = "2025-10-28T20:56:39.75Z" }, - { url = "https://files.pythonhosted.org/packages/f8/49/a825b79ffec124317265ca7d2344a86bcffeb960743487cb11988ffb3494/aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6", size = 1867281, upload-time = "2025-10-28T20:56:41.471Z" }, - { url = "https://files.pythonhosted.org/packages/b9/48/adf56e05f81eac31edcfae45c90928f4ad50ef2e3ea72cb8376162a368f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e", size = 1752431, upload-time = "2025-10-28T20:56:43.162Z" }, - { url = "https://files.pythonhosted.org/packages/30/ab/593855356eead019a74e862f21523db09c27f12fd24af72dbc3555b9bfd9/aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7", size = 1562846, upload-time = "2025-10-28T20:56:44.85Z" }, - { url = "https://files.pythonhosted.org/packages/39/0f/9f3d32271aa8dc35036e9668e31870a9d3b9542dd6b3e2c8a30931cb27ae/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d", size = 1699606, upload-time = "2025-10-28T20:56:46.519Z" }, - { url = "https://files.pythonhosted.org/packages/2c/3c/52d2658c5699b6ef7692a3f7128b2d2d4d9775f2a68093f74bca06cf01e1/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b", size = 1720663, upload-time = "2025-10-28T20:56:48.528Z" }, - { url = "https://files.pythonhosted.org/packages/9b/d4/8f8f3ff1fb7fb9e3f04fcad4e89d8a1cd8fc7d05de67e3de5b15b33008ff/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8", size = 1737939, upload-time = "2025-10-28T20:56:50.77Z" }, - { url = "https://files.pythonhosted.org/packages/03/d3/ddd348f8a27a634daae39a1b8e291ff19c77867af438af844bf8b7e3231b/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16", size = 1555132, upload-time = "2025-10-28T20:56:52.568Z" }, - { url = "https://files.pythonhosted.org/packages/39/b8/46790692dc46218406f94374903ba47552f2f9f90dad554eed61bfb7b64c/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169", size = 1764802, upload-time = "2025-10-28T20:56:54.292Z" }, - { url = "https://files.pythonhosted.org/packages/ba/e4/19ce547b58ab2a385e5f0b8aa3db38674785085abcf79b6e0edd1632b12f/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248", size = 1719512, upload-time = "2025-10-28T20:56:56.428Z" }, - { url = "https://files.pythonhosted.org/packages/70/30/6355a737fed29dcb6dfdd48682d5790cb5eab050f7b4e01f49b121d3acad/aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e", size = 426690, upload-time = "2025-10-28T20:56:58.736Z" }, - { url = "https://files.pythonhosted.org/packages/0a/0d/b10ac09069973d112de6ef980c1f6bb31cb7dcd0bc363acbdad58f927873/aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45", size = 453465, upload-time = "2025-10-28T20:57:00.795Z" }, - { url = "https://files.pythonhosted.org/packages/bf/78/7e90ca79e5aa39f9694dcfd74f4720782d3c6828113bb1f3197f7e7c4a56/aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be", size = 732139, upload-time = "2025-10-28T20:57:02.455Z" }, - { url = "https://files.pythonhosted.org/packages/db/ed/1f59215ab6853fbaa5c8495fa6cbc39edfc93553426152b75d82a5f32b76/aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742", size = 490082, upload-time = "2025-10-28T20:57:04.784Z" }, - { url = "https://files.pythonhosted.org/packages/68/7b/fe0fe0f5e05e13629d893c760465173a15ad0039c0a5b0d0040995c8075e/aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293", size = 489035, upload-time = "2025-10-28T20:57:06.894Z" }, - { url = "https://files.pythonhosted.org/packages/d2/04/db5279e38471b7ac801d7d36a57d1230feeee130bbe2a74f72731b23c2b1/aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811", size = 1720387, upload-time = "2025-10-28T20:57:08.685Z" }, - { url = "https://files.pythonhosted.org/packages/31/07/8ea4326bd7dae2bd59828f69d7fdc6e04523caa55e4a70f4a8725a7e4ed2/aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a", size = 1688314, upload-time = "2025-10-28T20:57:10.693Z" }, - { url = "https://files.pythonhosted.org/packages/48/ab/3d98007b5b87ffd519d065225438cc3b668b2f245572a8cb53da5dd2b1bc/aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4", size = 1756317, upload-time = "2025-10-28T20:57:12.563Z" }, - { url = "https://files.pythonhosted.org/packages/97/3d/801ca172b3d857fafb7b50c7c03f91b72b867a13abca982ed6b3081774ef/aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a", size = 1858539, upload-time = "2025-10-28T20:57:14.623Z" }, - { url = "https://files.pythonhosted.org/packages/f7/0d/4764669bdf47bd472899b3d3db91fffbe925c8e3038ec591a2fd2ad6a14d/aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e", size = 1739597, upload-time = "2025-10-28T20:57:16.399Z" }, - { url = "https://files.pythonhosted.org/packages/c4/52/7bd3c6693da58ba16e657eb904a5b6decfc48ecd06e9ac098591653b1566/aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb", size = 1555006, upload-time = "2025-10-28T20:57:18.288Z" }, - { url = "https://files.pythonhosted.org/packages/48/30/9586667acec5993b6f41d2ebcf96e97a1255a85f62f3c653110a5de4d346/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded", size = 1683220, upload-time = "2025-10-28T20:57:20.241Z" }, - { url = "https://files.pythonhosted.org/packages/71/01/3afe4c96854cfd7b30d78333852e8e851dceaec1c40fd00fec90c6402dd2/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b", size = 1712570, upload-time = "2025-10-28T20:57:22.253Z" }, - { url = "https://files.pythonhosted.org/packages/11/2c/22799d8e720f4697a9e66fd9c02479e40a49de3de2f0bbe7f9f78a987808/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8", size = 1733407, upload-time = "2025-10-28T20:57:24.37Z" }, - { url = "https://files.pythonhosted.org/packages/34/cb/90f15dd029f07cebbd91f8238a8b363978b530cd128488085b5703683594/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04", size = 1550093, upload-time = "2025-10-28T20:57:26.257Z" }, - { url = "https://files.pythonhosted.org/packages/69/46/12dce9be9d3303ecbf4d30ad45a7683dc63d90733c2d9fe512be6716cd40/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476", size = 1758084, upload-time = "2025-10-28T20:57:28.349Z" }, - { url = "https://files.pythonhosted.org/packages/f9/c8/0932b558da0c302ffd639fc6362a313b98fdf235dc417bc2493da8394df7/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23", size = 1716987, upload-time = "2025-10-28T20:57:30.233Z" }, - { url = "https://files.pythonhosted.org/packages/5d/8b/f5bd1a75003daed099baec373aed678f2e9b34f2ad40d85baa1368556396/aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254", size = 425859, upload-time = "2025-10-28T20:57:32.105Z" }, - { url = "https://files.pythonhosted.org/packages/5d/28/a8a9fc6957b2cee8902414e41816b5ab5536ecf43c3b1843c10e82c559b2/aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a", size = 452192, upload-time = "2025-10-28T20:57:34.166Z" }, - { url = "https://files.pythonhosted.org/packages/9b/36/e2abae1bd815f01c957cbf7be817b3043304e1c87bad526292a0410fdcf9/aiohttp-3.13.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2475391c29230e063ef53a66669b7b691c9bfc3f1426a0f7bcdf1216bdbac38b", size = 735234, upload-time = "2025-10-28T20:57:36.415Z" }, - { url = "https://files.pythonhosted.org/packages/ca/e3/1ee62dde9b335e4ed41db6bba02613295a0d5b41f74a783c142745a12763/aiohttp-3.13.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f33c8748abef4d8717bb20e8fb1b3e07c6adacb7fd6beaae971a764cf5f30d61", size = 490733, upload-time = "2025-10-28T20:57:38.205Z" }, - { url = "https://files.pythonhosted.org/packages/1a/aa/7a451b1d6a04e8d15a362af3e9b897de71d86feac3babf8894545d08d537/aiohttp-3.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ae32f24bbfb7dbb485a24b30b1149e2f200be94777232aeadba3eecece4d0aa4", size = 491303, upload-time = "2025-10-28T20:57:40.122Z" }, - { url = "https://files.pythonhosted.org/packages/57/1e/209958dbb9b01174870f6a7538cd1f3f28274fdbc88a750c238e2c456295/aiohttp-3.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d7f02042c1f009ffb70067326ef183a047425bb2ff3bc434ead4dd4a4a66a2b", size = 1717965, upload-time = "2025-10-28T20:57:42.28Z" }, - { url = "https://files.pythonhosted.org/packages/08/aa/6a01848d6432f241416bc4866cae8dc03f05a5a884d2311280f6a09c73d6/aiohttp-3.13.2-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:93655083005d71cd6c072cdab54c886e6570ad2c4592139c3fb967bfc19e4694", size = 1667221, upload-time = "2025-10-28T20:57:44.869Z" }, - { url = "https://files.pythonhosted.org/packages/87/4f/36c1992432d31bbc789fa0b93c768d2e9047ec8c7177e5cd84ea85155f36/aiohttp-3.13.2-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0db1e24b852f5f664cd728db140cf11ea0e82450471232a394b3d1a540b0f906", size = 1757178, upload-time = "2025-10-28T20:57:47.216Z" }, - { url = "https://files.pythonhosted.org/packages/ac/b4/8e940dfb03b7e0f68a82b88fd182b9be0a65cb3f35612fe38c038c3112cf/aiohttp-3.13.2-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b009194665bcd128e23eaddef362e745601afa4641930848af4c8559e88f18f9", size = 1838001, upload-time = "2025-10-28T20:57:49.337Z" }, - { url = "https://files.pythonhosted.org/packages/d7/ef/39f3448795499c440ab66084a9db7d20ca7662e94305f175a80f5b7e0072/aiohttp-3.13.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c038a8fdc8103cd51dbd986ecdce141473ffd9775a7a8057a6ed9c3653478011", size = 1716325, upload-time = "2025-10-28T20:57:51.327Z" }, - { url = "https://files.pythonhosted.org/packages/d7/51/b311500ffc860b181c05d91c59a1313bdd05c82960fdd4035a15740d431e/aiohttp-3.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:66bac29b95a00db411cd758fea0e4b9bdba6d549dfe333f9a945430f5f2cc5a6", size = 1547978, upload-time = "2025-10-28T20:57:53.554Z" }, - { url = "https://files.pythonhosted.org/packages/31/64/b9d733296ef79815226dab8c586ff9e3df41c6aff2e16c06697b2d2e6775/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4ebf9cfc9ba24a74cf0718f04aac2a3bbe745902cc7c5ebc55c0f3b5777ef213", size = 1682042, upload-time = "2025-10-28T20:57:55.617Z" }, - { url = "https://files.pythonhosted.org/packages/3f/30/43d3e0f9d6473a6db7d472104c4eff4417b1e9df01774cb930338806d36b/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a4b88ebe35ce54205c7074f7302bd08a4cb83256a3e0870c72d6f68a3aaf8e49", size = 1680085, upload-time = "2025-10-28T20:57:57.59Z" }, - { url = "https://files.pythonhosted.org/packages/16/51/c709f352c911b1864cfd1087577760ced64b3e5bee2aa88b8c0c8e2e4972/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:98c4fb90bb82b70a4ed79ca35f656f4281885be076f3f970ce315402b53099ae", size = 1728238, upload-time = "2025-10-28T20:57:59.525Z" }, - { url = "https://files.pythonhosted.org/packages/19/e2/19bd4c547092b773caeb48ff5ae4b1ae86756a0ee76c16727fcfd281404b/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:ec7534e63ae0f3759df3a1ed4fa6bc8f75082a924b590619c0dd2f76d7043caa", size = 1544395, upload-time = "2025-10-28T20:58:01.914Z" }, - { url = "https://files.pythonhosted.org/packages/cf/87/860f2803b27dfc5ed7be532832a3498e4919da61299b4a1f8eb89b8ff44d/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5b927cf9b935a13e33644cbed6c8c4b2d0f25b713d838743f8fe7191b33829c4", size = 1742965, upload-time = "2025-10-28T20:58:03.972Z" }, - { url = "https://files.pythonhosted.org/packages/67/7f/db2fc7618925e8c7a601094d5cbe539f732df4fb570740be88ed9e40e99a/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:88d6c017966a78c5265d996c19cdb79235be5e6412268d7e2ce7dee339471b7a", size = 1697585, upload-time = "2025-10-28T20:58:06.189Z" }, - { url = "https://files.pythonhosted.org/packages/0c/07/9127916cb09bb38284db5036036042b7b2c514c8ebaeee79da550c43a6d6/aiohttp-3.13.2-cp314-cp314-win32.whl", hash = "sha256:f7c183e786e299b5d6c49fb43a769f8eb8e04a2726a2bd5887b98b5cc2d67940", size = 431621, upload-time = "2025-10-28T20:58:08.636Z" }, - { url = "https://files.pythonhosted.org/packages/fb/41/554a8a380df6d3a2bba8a7726429a23f4ac62aaf38de43bb6d6cde7b4d4d/aiohttp-3.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:fe242cd381e0fb65758faf5ad96c2e460df6ee5b2de1072fe97e4127927e00b4", size = 457627, upload-time = "2025-10-28T20:58:11Z" }, - { url = "https://files.pythonhosted.org/packages/c7/8e/3824ef98c039d3951cb65b9205a96dd2b20f22241ee17d89c5701557c826/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f10d9c0b0188fe85398c61147bbd2a657d616c876863bfeff43376e0e3134673", size = 767360, upload-time = "2025-10-28T20:58:13.358Z" }, - { url = "https://files.pythonhosted.org/packages/a4/0f/6a03e3fc7595421274fa34122c973bde2d89344f8a881b728fa8c774e4f1/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e7c952aefdf2460f4ae55c5e9c3e80aa72f706a6317e06020f80e96253b1accd", size = 504616, upload-time = "2025-10-28T20:58:15.339Z" }, - { url = "https://files.pythonhosted.org/packages/c6/aa/ed341b670f1bc8a6f2c6a718353d13b9546e2cef3544f573c6a1ff0da711/aiohttp-3.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c20423ce14771d98353d2e25e83591fa75dfa90a3c1848f3d7c68243b4fbded3", size = 509131, upload-time = "2025-10-28T20:58:17.693Z" }, - { url = "https://files.pythonhosted.org/packages/7f/f0/c68dac234189dae5c4bbccc0f96ce0cc16b76632cfc3a08fff180045cfa4/aiohttp-3.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e96eb1a34396e9430c19d8338d2ec33015e4a87ef2b4449db94c22412e25ccdf", size = 1864168, upload-time = "2025-10-28T20:58:20.113Z" }, - { url = "https://files.pythonhosted.org/packages/8f/65/75a9a76db8364b5d0e52a0c20eabc5d52297385d9af9c35335b924fafdee/aiohttp-3.13.2-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:23fb0783bc1a33640036465019d3bba069942616a6a2353c6907d7fe1ccdaf4e", size = 1719200, upload-time = "2025-10-28T20:58:22.583Z" }, - { url = "https://files.pythonhosted.org/packages/f5/55/8df2ed78d7f41d232f6bd3ff866b6f617026551aa1d07e2f03458f964575/aiohttp-3.13.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1a9bea6244a1d05a4e57c295d69e159a5c50d8ef16aa390948ee873478d9a5", size = 1843497, upload-time = "2025-10-28T20:58:24.672Z" }, - { url = "https://files.pythonhosted.org/packages/e9/e0/94d7215e405c5a02ccb6a35c7a3a6cfff242f457a00196496935f700cde5/aiohttp-3.13.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0a3d54e822688b56e9f6b5816fb3de3a3a64660efac64e4c2dc435230ad23bad", size = 1935703, upload-time = "2025-10-28T20:58:26.758Z" }, - { url = "https://files.pythonhosted.org/packages/0b/78/1eeb63c3f9b2d1015a4c02788fb543141aad0a03ae3f7a7b669b2483f8d4/aiohttp-3.13.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7a653d872afe9f33497215745da7a943d1dc15b728a9c8da1c3ac423af35178e", size = 1792738, upload-time = "2025-10-28T20:58:29.787Z" }, - { url = "https://files.pythonhosted.org/packages/41/75/aaf1eea4c188e51538c04cc568040e3082db263a57086ea74a7d38c39e42/aiohttp-3.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:56d36e80d2003fa3fc0207fac644216d8532e9504a785ef9a8fd013f84a42c61", size = 1624061, upload-time = "2025-10-28T20:58:32.529Z" }, - { url = "https://files.pythonhosted.org/packages/9b/c2/3b6034de81fbcc43de8aeb209073a2286dfb50b86e927b4efd81cf848197/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:78cd586d8331fb8e241c2dd6b2f4061778cc69e150514b39a9e28dd050475661", size = 1789201, upload-time = "2025-10-28T20:58:34.618Z" }, - { url = "https://files.pythonhosted.org/packages/c9/38/c15dcf6d4d890217dae79d7213988f4e5fe6183d43893a9cf2fe9e84ca8d/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:20b10bbfbff766294fe99987f7bb3b74fdd2f1a2905f2562132641ad434dcf98", size = 1776868, upload-time = "2025-10-28T20:58:38.835Z" }, - { url = "https://files.pythonhosted.org/packages/04/75/f74fd178ac81adf4f283a74847807ade5150e48feda6aef024403716c30c/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9ec49dff7e2b3c85cdeaa412e9d438f0ecd71676fde61ec57027dd392f00c693", size = 1790660, upload-time = "2025-10-28T20:58:41.507Z" }, - { url = "https://files.pythonhosted.org/packages/e7/80/7368bd0d06b16b3aba358c16b919e9c46cf11587dc572091031b0e9e3ef0/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:94f05348c4406450f9d73d38efb41d669ad6cd90c7ee194810d0eefbfa875a7a", size = 1617548, upload-time = "2025-10-28T20:58:43.674Z" }, - { url = "https://files.pythonhosted.org/packages/7d/4b/a6212790c50483cb3212e507378fbe26b5086d73941e1ec4b56a30439688/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:fa4dcb605c6f82a80c7f95713c2b11c3b8e9893b3ebd2bc9bde93165ed6107be", size = 1817240, upload-time = "2025-10-28T20:58:45.787Z" }, - { url = "https://files.pythonhosted.org/packages/ff/f7/ba5f0ba4ea8d8f3c32850912944532b933acbf0f3a75546b89269b9b7dde/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf00e5db968c3f67eccd2778574cf64d8b27d95b237770aa32400bd7a1ca4f6c", size = 1762334, upload-time = "2025-10-28T20:58:47.936Z" }, - { url = "https://files.pythonhosted.org/packages/7e/83/1a5a1856574588b1cad63609ea9ad75b32a8353ac995d830bf5da9357364/aiohttp-3.13.2-cp314-cp314t-win32.whl", hash = "sha256:d23b5fe492b0805a50d3371e8a728a9134d8de5447dce4c885f5587294750734", size = 464685, upload-time = "2025-10-28T20:58:50.642Z" }, - { url = "https://files.pythonhosted.org/packages/9f/4d/d22668674122c08f4d56972297c51a624e64b3ed1efaa40187607a7cb66e/aiohttp-3.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:ff0a7b0a82a7ab905cbda74006318d1b12e37c797eb1b0d4eb3e316cf47f658f", size = 498093, upload-time = "2025-10-28T20:58:52.782Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size = 7837994 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/9b/01f00e9856d0a73260e86dd8ed0c2234a466c5c1712ce1c281548df39777/aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b", size = 737623 }, + { url = "https://files.pythonhosted.org/packages/5a/1b/4be39c445e2b2bd0aab4ba736deb649fabf14f6757f405f0c9685019b9e9/aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc", size = 492664 }, + { url = "https://files.pythonhosted.org/packages/28/66/d35dcfea8050e131cdd731dff36434390479b4045a8d0b9d7111b0a968f1/aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7", size = 491808 }, + { url = "https://files.pythonhosted.org/packages/00/29/8e4609b93e10a853b65f8291e64985de66d4f5848c5637cddc70e98f01f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb", size = 1738863 }, + { url = "https://files.pythonhosted.org/packages/9d/fa/4ebdf4adcc0def75ced1a0d2d227577cd7b1b85beb7edad85fcc87693c75/aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3", size = 1700586 }, + { url = "https://files.pythonhosted.org/packages/da/04/73f5f02ff348a3558763ff6abe99c223381b0bace05cd4530a0258e52597/aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f", size = 1768625 }, + { url = "https://files.pythonhosted.org/packages/f8/49/a825b79ffec124317265ca7d2344a86bcffeb960743487cb11988ffb3494/aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6", size = 1867281 }, + { url = "https://files.pythonhosted.org/packages/b9/48/adf56e05f81eac31edcfae45c90928f4ad50ef2e3ea72cb8376162a368f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e", size = 1752431 }, + { url = "https://files.pythonhosted.org/packages/30/ab/593855356eead019a74e862f21523db09c27f12fd24af72dbc3555b9bfd9/aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7", size = 1562846 }, + { url = "https://files.pythonhosted.org/packages/39/0f/9f3d32271aa8dc35036e9668e31870a9d3b9542dd6b3e2c8a30931cb27ae/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d", size = 1699606 }, + { url = "https://files.pythonhosted.org/packages/2c/3c/52d2658c5699b6ef7692a3f7128b2d2d4d9775f2a68093f74bca06cf01e1/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b", size = 1720663 }, + { url = "https://files.pythonhosted.org/packages/9b/d4/8f8f3ff1fb7fb9e3f04fcad4e89d8a1cd8fc7d05de67e3de5b15b33008ff/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8", size = 1737939 }, + { url = "https://files.pythonhosted.org/packages/03/d3/ddd348f8a27a634daae39a1b8e291ff19c77867af438af844bf8b7e3231b/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16", size = 1555132 }, + { url = "https://files.pythonhosted.org/packages/39/b8/46790692dc46218406f94374903ba47552f2f9f90dad554eed61bfb7b64c/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169", size = 1764802 }, + { url = "https://files.pythonhosted.org/packages/ba/e4/19ce547b58ab2a385e5f0b8aa3db38674785085abcf79b6e0edd1632b12f/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248", size = 1719512 }, + { url = "https://files.pythonhosted.org/packages/70/30/6355a737fed29dcb6dfdd48682d5790cb5eab050f7b4e01f49b121d3acad/aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e", size = 426690 }, + { url = "https://files.pythonhosted.org/packages/0a/0d/b10ac09069973d112de6ef980c1f6bb31cb7dcd0bc363acbdad58f927873/aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45", size = 453465 }, + { url = "https://files.pythonhosted.org/packages/bf/78/7e90ca79e5aa39f9694dcfd74f4720782d3c6828113bb1f3197f7e7c4a56/aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be", size = 732139 }, + { url = "https://files.pythonhosted.org/packages/db/ed/1f59215ab6853fbaa5c8495fa6cbc39edfc93553426152b75d82a5f32b76/aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742", size = 490082 }, + { url = "https://files.pythonhosted.org/packages/68/7b/fe0fe0f5e05e13629d893c760465173a15ad0039c0a5b0d0040995c8075e/aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293", size = 489035 }, + { url = "https://files.pythonhosted.org/packages/d2/04/db5279e38471b7ac801d7d36a57d1230feeee130bbe2a74f72731b23c2b1/aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811", size = 1720387 }, + { url = "https://files.pythonhosted.org/packages/31/07/8ea4326bd7dae2bd59828f69d7fdc6e04523caa55e4a70f4a8725a7e4ed2/aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a", size = 1688314 }, + { url = "https://files.pythonhosted.org/packages/48/ab/3d98007b5b87ffd519d065225438cc3b668b2f245572a8cb53da5dd2b1bc/aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4", size = 1756317 }, + { url = "https://files.pythonhosted.org/packages/97/3d/801ca172b3d857fafb7b50c7c03f91b72b867a13abca982ed6b3081774ef/aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a", size = 1858539 }, + { url = "https://files.pythonhosted.org/packages/f7/0d/4764669bdf47bd472899b3d3db91fffbe925c8e3038ec591a2fd2ad6a14d/aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e", size = 1739597 }, + { url = "https://files.pythonhosted.org/packages/c4/52/7bd3c6693da58ba16e657eb904a5b6decfc48ecd06e9ac098591653b1566/aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb", size = 1555006 }, + { url = "https://files.pythonhosted.org/packages/48/30/9586667acec5993b6f41d2ebcf96e97a1255a85f62f3c653110a5de4d346/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded", size = 1683220 }, + { url = "https://files.pythonhosted.org/packages/71/01/3afe4c96854cfd7b30d78333852e8e851dceaec1c40fd00fec90c6402dd2/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b", size = 1712570 }, + { url = "https://files.pythonhosted.org/packages/11/2c/22799d8e720f4697a9e66fd9c02479e40a49de3de2f0bbe7f9f78a987808/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8", size = 1733407 }, + { url = "https://files.pythonhosted.org/packages/34/cb/90f15dd029f07cebbd91f8238a8b363978b530cd128488085b5703683594/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04", size = 1550093 }, + { url = "https://files.pythonhosted.org/packages/69/46/12dce9be9d3303ecbf4d30ad45a7683dc63d90733c2d9fe512be6716cd40/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476", size = 1758084 }, + { url = "https://files.pythonhosted.org/packages/f9/c8/0932b558da0c302ffd639fc6362a313b98fdf235dc417bc2493da8394df7/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23", size = 1716987 }, + { url = "https://files.pythonhosted.org/packages/5d/8b/f5bd1a75003daed099baec373aed678f2e9b34f2ad40d85baa1368556396/aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254", size = 425859 }, + { url = "https://files.pythonhosted.org/packages/5d/28/a8a9fc6957b2cee8902414e41816b5ab5536ecf43c3b1843c10e82c559b2/aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a", size = 452192 }, + { url = "https://files.pythonhosted.org/packages/9b/36/e2abae1bd815f01c957cbf7be817b3043304e1c87bad526292a0410fdcf9/aiohttp-3.13.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2475391c29230e063ef53a66669b7b691c9bfc3f1426a0f7bcdf1216bdbac38b", size = 735234 }, + { url = "https://files.pythonhosted.org/packages/ca/e3/1ee62dde9b335e4ed41db6bba02613295a0d5b41f74a783c142745a12763/aiohttp-3.13.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f33c8748abef4d8717bb20e8fb1b3e07c6adacb7fd6beaae971a764cf5f30d61", size = 490733 }, + { url = "https://files.pythonhosted.org/packages/1a/aa/7a451b1d6a04e8d15a362af3e9b897de71d86feac3babf8894545d08d537/aiohttp-3.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ae32f24bbfb7dbb485a24b30b1149e2f200be94777232aeadba3eecece4d0aa4", size = 491303 }, + { url = "https://files.pythonhosted.org/packages/57/1e/209958dbb9b01174870f6a7538cd1f3f28274fdbc88a750c238e2c456295/aiohttp-3.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d7f02042c1f009ffb70067326ef183a047425bb2ff3bc434ead4dd4a4a66a2b", size = 1717965 }, + { url = "https://files.pythonhosted.org/packages/08/aa/6a01848d6432f241416bc4866cae8dc03f05a5a884d2311280f6a09c73d6/aiohttp-3.13.2-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:93655083005d71cd6c072cdab54c886e6570ad2c4592139c3fb967bfc19e4694", size = 1667221 }, + { url = "https://files.pythonhosted.org/packages/87/4f/36c1992432d31bbc789fa0b93c768d2e9047ec8c7177e5cd84ea85155f36/aiohttp-3.13.2-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0db1e24b852f5f664cd728db140cf11ea0e82450471232a394b3d1a540b0f906", size = 1757178 }, + { url = "https://files.pythonhosted.org/packages/ac/b4/8e940dfb03b7e0f68a82b88fd182b9be0a65cb3f35612fe38c038c3112cf/aiohttp-3.13.2-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b009194665bcd128e23eaddef362e745601afa4641930848af4c8559e88f18f9", size = 1838001 }, + { url = "https://files.pythonhosted.org/packages/d7/ef/39f3448795499c440ab66084a9db7d20ca7662e94305f175a80f5b7e0072/aiohttp-3.13.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c038a8fdc8103cd51dbd986ecdce141473ffd9775a7a8057a6ed9c3653478011", size = 1716325 }, + { url = "https://files.pythonhosted.org/packages/d7/51/b311500ffc860b181c05d91c59a1313bdd05c82960fdd4035a15740d431e/aiohttp-3.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:66bac29b95a00db411cd758fea0e4b9bdba6d549dfe333f9a945430f5f2cc5a6", size = 1547978 }, + { url = "https://files.pythonhosted.org/packages/31/64/b9d733296ef79815226dab8c586ff9e3df41c6aff2e16c06697b2d2e6775/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4ebf9cfc9ba24a74cf0718f04aac2a3bbe745902cc7c5ebc55c0f3b5777ef213", size = 1682042 }, + { url = "https://files.pythonhosted.org/packages/3f/30/43d3e0f9d6473a6db7d472104c4eff4417b1e9df01774cb930338806d36b/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a4b88ebe35ce54205c7074f7302bd08a4cb83256a3e0870c72d6f68a3aaf8e49", size = 1680085 }, + { url = "https://files.pythonhosted.org/packages/16/51/c709f352c911b1864cfd1087577760ced64b3e5bee2aa88b8c0c8e2e4972/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:98c4fb90bb82b70a4ed79ca35f656f4281885be076f3f970ce315402b53099ae", size = 1728238 }, + { url = "https://files.pythonhosted.org/packages/19/e2/19bd4c547092b773caeb48ff5ae4b1ae86756a0ee76c16727fcfd281404b/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:ec7534e63ae0f3759df3a1ed4fa6bc8f75082a924b590619c0dd2f76d7043caa", size = 1544395 }, + { url = "https://files.pythonhosted.org/packages/cf/87/860f2803b27dfc5ed7be532832a3498e4919da61299b4a1f8eb89b8ff44d/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5b927cf9b935a13e33644cbed6c8c4b2d0f25b713d838743f8fe7191b33829c4", size = 1742965 }, + { url = "https://files.pythonhosted.org/packages/67/7f/db2fc7618925e8c7a601094d5cbe539f732df4fb570740be88ed9e40e99a/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:88d6c017966a78c5265d996c19cdb79235be5e6412268d7e2ce7dee339471b7a", size = 1697585 }, + { url = "https://files.pythonhosted.org/packages/0c/07/9127916cb09bb38284db5036036042b7b2c514c8ebaeee79da550c43a6d6/aiohttp-3.13.2-cp314-cp314-win32.whl", hash = "sha256:f7c183e786e299b5d6c49fb43a769f8eb8e04a2726a2bd5887b98b5cc2d67940", size = 431621 }, + { url = "https://files.pythonhosted.org/packages/fb/41/554a8a380df6d3a2bba8a7726429a23f4ac62aaf38de43bb6d6cde7b4d4d/aiohttp-3.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:fe242cd381e0fb65758faf5ad96c2e460df6ee5b2de1072fe97e4127927e00b4", size = 457627 }, + { url = "https://files.pythonhosted.org/packages/c7/8e/3824ef98c039d3951cb65b9205a96dd2b20f22241ee17d89c5701557c826/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f10d9c0b0188fe85398c61147bbd2a657d616c876863bfeff43376e0e3134673", size = 767360 }, + { url = "https://files.pythonhosted.org/packages/a4/0f/6a03e3fc7595421274fa34122c973bde2d89344f8a881b728fa8c774e4f1/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e7c952aefdf2460f4ae55c5e9c3e80aa72f706a6317e06020f80e96253b1accd", size = 504616 }, + { url = "https://files.pythonhosted.org/packages/c6/aa/ed341b670f1bc8a6f2c6a718353d13b9546e2cef3544f573c6a1ff0da711/aiohttp-3.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c20423ce14771d98353d2e25e83591fa75dfa90a3c1848f3d7c68243b4fbded3", size = 509131 }, + { url = "https://files.pythonhosted.org/packages/7f/f0/c68dac234189dae5c4bbccc0f96ce0cc16b76632cfc3a08fff180045cfa4/aiohttp-3.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e96eb1a34396e9430c19d8338d2ec33015e4a87ef2b4449db94c22412e25ccdf", size = 1864168 }, + { url = "https://files.pythonhosted.org/packages/8f/65/75a9a76db8364b5d0e52a0c20eabc5d52297385d9af9c35335b924fafdee/aiohttp-3.13.2-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:23fb0783bc1a33640036465019d3bba069942616a6a2353c6907d7fe1ccdaf4e", size = 1719200 }, + { url = "https://files.pythonhosted.org/packages/f5/55/8df2ed78d7f41d232f6bd3ff866b6f617026551aa1d07e2f03458f964575/aiohttp-3.13.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1a9bea6244a1d05a4e57c295d69e159a5c50d8ef16aa390948ee873478d9a5", size = 1843497 }, + { url = "https://files.pythonhosted.org/packages/e9/e0/94d7215e405c5a02ccb6a35c7a3a6cfff242f457a00196496935f700cde5/aiohttp-3.13.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0a3d54e822688b56e9f6b5816fb3de3a3a64660efac64e4c2dc435230ad23bad", size = 1935703 }, + { url = "https://files.pythonhosted.org/packages/0b/78/1eeb63c3f9b2d1015a4c02788fb543141aad0a03ae3f7a7b669b2483f8d4/aiohttp-3.13.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7a653d872afe9f33497215745da7a943d1dc15b728a9c8da1c3ac423af35178e", size = 1792738 }, + { url = "https://files.pythonhosted.org/packages/41/75/aaf1eea4c188e51538c04cc568040e3082db263a57086ea74a7d38c39e42/aiohttp-3.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:56d36e80d2003fa3fc0207fac644216d8532e9504a785ef9a8fd013f84a42c61", size = 1624061 }, + { url = "https://files.pythonhosted.org/packages/9b/c2/3b6034de81fbcc43de8aeb209073a2286dfb50b86e927b4efd81cf848197/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:78cd586d8331fb8e241c2dd6b2f4061778cc69e150514b39a9e28dd050475661", size = 1789201 }, + { url = "https://files.pythonhosted.org/packages/c9/38/c15dcf6d4d890217dae79d7213988f4e5fe6183d43893a9cf2fe9e84ca8d/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:20b10bbfbff766294fe99987f7bb3b74fdd2f1a2905f2562132641ad434dcf98", size = 1776868 }, + { url = "https://files.pythonhosted.org/packages/04/75/f74fd178ac81adf4f283a74847807ade5150e48feda6aef024403716c30c/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9ec49dff7e2b3c85cdeaa412e9d438f0ecd71676fde61ec57027dd392f00c693", size = 1790660 }, + { url = "https://files.pythonhosted.org/packages/e7/80/7368bd0d06b16b3aba358c16b919e9c46cf11587dc572091031b0e9e3ef0/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:94f05348c4406450f9d73d38efb41d669ad6cd90c7ee194810d0eefbfa875a7a", size = 1617548 }, + { url = "https://files.pythonhosted.org/packages/7d/4b/a6212790c50483cb3212e507378fbe26b5086d73941e1ec4b56a30439688/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:fa4dcb605c6f82a80c7f95713c2b11c3b8e9893b3ebd2bc9bde93165ed6107be", size = 1817240 }, + { url = "https://files.pythonhosted.org/packages/ff/f7/ba5f0ba4ea8d8f3c32850912944532b933acbf0f3a75546b89269b9b7dde/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf00e5db968c3f67eccd2778574cf64d8b27d95b237770aa32400bd7a1ca4f6c", size = 1762334 }, + { url = "https://files.pythonhosted.org/packages/7e/83/1a5a1856574588b1cad63609ea9ad75b32a8353ac995d830bf5da9357364/aiohttp-3.13.2-cp314-cp314t-win32.whl", hash = "sha256:d23b5fe492b0805a50d3371e8a728a9134d8de5447dce4c885f5587294750734", size = 464685 }, + { url = "https://files.pythonhosted.org/packages/9f/4d/d22668674122c08f4d56972297c51a624e64b3ed1efaa40187607a7cb66e/aiohttp-3.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:ff0a7b0a82a7ab905cbda74006318d1b12e37c797eb1b0d4eb3e316cf47f658f", size = 498093 }, ] [[package]] @@ -146,9 +145,9 @@ dependencies = [ { name = "frozenlist" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490 }, ] [[package]] @@ -160,34 +159,34 @@ dependencies = [ { name = "sqlalchemy" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/02/a6/74c8cadc2882977d80ad756a13857857dbcf9bd405bc80b662eb10651282/alembic-1.17.2.tar.gz", hash = "sha256:bbe9751705c5e0f14877f02d46c53d10885e377e3d90eda810a016f9baa19e8e", size = 1988064, upload-time = "2025-11-14T20:35:04.057Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/a6/74c8cadc2882977d80ad756a13857857dbcf9bd405bc80b662eb10651282/alembic-1.17.2.tar.gz", hash = "sha256:bbe9751705c5e0f14877f02d46c53d10885e377e3d90eda810a016f9baa19e8e", size = 1988064 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/88/6237e97e3385b57b5f1528647addea5cc03d4d65d5979ab24327d41fb00d/alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6", size = 248554, upload-time = "2025-11-14T20:35:05.699Z" }, + { url = "https://files.pythonhosted.org/packages/ba/88/6237e97e3385b57b5f1528647addea5cc03d4d65d5979ab24327d41fb00d/alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6", size = 248554 }, ] [[package]] name = "annotated-doc" version = "0.0.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303 }, ] [[package]] name = "annotated-types" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, ] [[package]] name = "antlr4-python3-runtime" version = "4.9.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3e/38/7859ff46355f76f8d19459005ca000b6e7012f2f1ca597746cbcd1fbfe5e/antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b", size = 117034, upload-time = "2021-11-06T17:52:23.524Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/38/7859ff46355f76f8d19459005ca000b6e7012f2f1ca597746cbcd1fbfe5e/antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b", size = 117034 } [[package]] name = "anyio" @@ -197,109 +196,109 @@ dependencies = [ { name = "idna" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" } +sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" }, + { url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362 }, ] [[package]] name = "astroid" version = "4.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b7/22/97df040e15d964e592d3a180598ace67e91b7c559d8298bdb3c949dc6e42/astroid-4.0.2.tar.gz", hash = "sha256:ac8fb7ca1c08eb9afec91ccc23edbd8ac73bb22cbdd7da1d488d9fb8d6579070", size = 405714, upload-time = "2025-11-09T21:21:18.373Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/22/97df040e15d964e592d3a180598ace67e91b7c559d8298bdb3c949dc6e42/astroid-4.0.2.tar.gz", hash = "sha256:ac8fb7ca1c08eb9afec91ccc23edbd8ac73bb22cbdd7da1d488d9fb8d6579070", size = 405714 } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/ac/a85b4bfb4cf53221513e27f33cc37ad158fce02ac291d18bee6b49ab477d/astroid-4.0.2-py3-none-any.whl", hash = "sha256:d7546c00a12efc32650b19a2bb66a153883185d3179ab0d4868086f807338b9b", size = 276354, upload-time = "2025-11-09T21:21:16.54Z" }, + { url = "https://files.pythonhosted.org/packages/93/ac/a85b4bfb4cf53221513e27f33cc37ad158fce02ac291d18bee6b49ab477d/astroid-4.0.2-py3-none-any.whl", hash = "sha256:d7546c00a12efc32650b19a2bb66a153883185d3179ab0d4868086f807338b9b", size = 276354 }, ] [[package]] name = "attrs" version = "25.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615 }, ] [[package]] name = "autopage" version = "0.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9f/9e/559b0cfdba9f3ed6744d8cbcdbda58880d3695c43c053a31773cefcedde3/autopage-0.5.2.tar.gz", hash = "sha256:826996d74c5aa9f4b6916195547312ac6384bac3810b8517063f293248257b72", size = 33031, upload-time = "2023-10-16T09:22:19.54Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9f/9e/559b0cfdba9f3ed6744d8cbcdbda58880d3695c43c053a31773cefcedde3/autopage-0.5.2.tar.gz", hash = "sha256:826996d74c5aa9f4b6916195547312ac6384bac3810b8517063f293248257b72", size = 33031 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/63/f1c3fa431e91a52bad5e3602e9d5df6c94d8d095ac485424efa4eeddb4d2/autopage-0.5.2-py3-none-any.whl", hash = "sha256:f5eae54dd20ccc8b1ff611263fc87bc46608a9cde749bbcfc93339713a429c55", size = 30231, upload-time = "2023-10-16T09:22:17.316Z" }, + { url = "https://files.pythonhosted.org/packages/9b/63/f1c3fa431e91a52bad5e3602e9d5df6c94d8d095ac485424efa4eeddb4d2/autopage-0.5.2-py3-none-any.whl", hash = "sha256:f5eae54dd20ccc8b1ff611263fc87bc46608a9cde749bbcfc93339713a429c55", size = 30231 }, ] [[package]] name = "beartype" version = "0.14.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/3b/9ecfc75d1f8bb75cbdc87fcb3df7c6ec4bc8f7481cb7102859ade1736c9d/beartype-0.14.1.tar.gz", hash = "sha256:23df4715d19cebb2ce60e53c3cf44cd925843f00c71938222d777ea6332de3cb", size = 964899, upload-time = "2023-06-07T05:38:56.905Z" } +sdist = { url = "https://files.pythonhosted.org/packages/89/3b/9ecfc75d1f8bb75cbdc87fcb3df7c6ec4bc8f7481cb7102859ade1736c9d/beartype-0.14.1.tar.gz", hash = "sha256:23df4715d19cebb2ce60e53c3cf44cd925843f00c71938222d777ea6332de3cb", size = 964899 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/db/8d01583b4175e0e45a6e6cd0c28db2dae38ffe5477141a7ac3a5a09c8bb9/beartype-0.14.1-py3-none-any.whl", hash = "sha256:0f70fccdb8eb6d7ddfaa3ffe3a0b66cf2edeb13452bd71ad46615775c2fa34f6", size = 739737, upload-time = "2023-06-07T05:38:54.076Z" }, + { url = "https://files.pythonhosted.org/packages/f6/db/8d01583b4175e0e45a6e6cd0c28db2dae38ffe5477141a7ac3a5a09c8bb9/beartype-0.14.1-py3-none-any.whl", hash = "sha256:0f70fccdb8eb6d7ddfaa3ffe3a0b66cf2edeb13452bd71ad46615775c2fa34f6", size = 739737 }, ] [[package]] name = "better-abc" version = "0.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8b/72/3d630f781659015357cc08cad32aa636b252e007df0bae31184a3d872427/better-abc-0.0.3.tar.gz", hash = "sha256:a880fd6bc9675da2ec991e8712a555bffa0f12722efed78c739f78343cf989f6", size = 2852, upload-time = "2020-11-10T22:47:31.303Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/72/3d630f781659015357cc08cad32aa636b252e007df0bae31184a3d872427/better-abc-0.0.3.tar.gz", hash = "sha256:a880fd6bc9675da2ec991e8712a555bffa0f12722efed78c739f78343cf989f6", size = 2852 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/e8/7d00a23039ab74c5741736ce05d7700eb6237e83747aac4df07a5bf2d074/better_abc-0.0.3-py3-none-any.whl", hash = "sha256:3ae73b473fbeb536a548f542984976e80b821676ae6e18f14e24d8e180647187", size = 3475, upload-time = "2020-11-10T22:47:30.354Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e8/7d00a23039ab74c5741736ce05d7700eb6237e83747aac4df07a5bf2d074/better_abc-0.0.3-py3-none-any.whl", hash = "sha256:3ae73b473fbeb536a548f542984976e80b821676ae6e18f14e24d8e180647187", size = 3475 }, ] [[package]] name = "blinker" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" } +sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460 } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458 }, ] [[package]] name = "boto3" -version = "1.42.2" +version = "1.42.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, { name = "jmespath" }, { name = "s3transfer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e4/3b/be2c0175ff58d6abef88467d7e93225231059621181bc0cfd32fd8b05260/boto3-1.42.2.tar.gz", hash = "sha256:2b403f503bfe8486fd273e41f0b5a033d0e8dad5d94c5a5c0669e92272bd4f17", size = 112828, upload-time = "2025-12-03T17:50:22.599Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/91/c00b45b5ca95184f7ab6140f586ba7d23074168ee3feae3eaf6954cc11c3/boto3-1.42.5.tar.gz", hash = "sha256:e3b7be255e5e29272b6424af4417005384f5a3f1caf6ca3352258ee1d9b8551a", size = 112754 } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/3f/ba8d7e362ab595279663fdc77902ee46592252ea7210596bdcc3d2eee12b/boto3-1.42.2-py3-none-any.whl", hash = "sha256:e93c55fecfecc6f05de604288d216a49b06a3c2c53421848ca4afad55f0614b7", size = 140622, upload-time = "2025-12-03T17:50:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/a1/3c/e70f47afdaf9172f90e80615f923fbb09f7fb4e5ea89e2d95562ec7f95c2/boto3-1.42.5-py3-none-any.whl", hash = "sha256:7d22cd102c77c37d552783308eeb01a088c0e3f6e707157dd6d1842b205ffce7", size = 140572 }, ] [[package]] name = "botocore" -version = "1.42.2" +version = "1.42.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath" }, { name = "python-dateutil" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d7/29/1c9b4f73a738d1464c07ccd8b74ed7f21d7f00319bfd6e397a0e897fd61f/botocore-1.42.2.tar.gz", hash = "sha256:3dbeba76168764219cbe392aa67cbc9265cfa05b09970ed5f2e0c786b8ac5010", size = 14843197, upload-time = "2025-12-03T17:50:11.065Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/46/5b40b1deb780869ca9f0c1de47062a78a0494b53d6f9d6bad10fc38eef9d/botocore-1.42.5.tar.gz", hash = "sha256:37bfc487f14286d9795920807fcb8318b940835b18fff6bec5253449f377136f", size = 14851117 } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/b3/4d413a69696a5d096af3f27c91e40f841886aecd849ee62dbb366c50d7ae/botocore-1.42.2-py3-none-any.whl", hash = "sha256:8bb3f0ce39c6a7f63b404a2632ab1a5189187b27317c7b97fe45494677633b5d", size = 14517436, upload-time = "2025-12-03T17:50:07.589Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9a/da5e6cabf4da855d182fcdacf3573b69f30899e0e6c3e0d91ce6ad92ce74/botocore-1.42.5-py3-none-any.whl", hash = "sha256:6aa487f1876c881e2143f6a186b7d8faaf042fc05e0ba7421d821f145356a0c9", size = 14525346 }, ] [[package]] name = "cachetools" version = "6.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fb/44/ca1675be2a83aeee1886ab745b28cda92093066590233cc501890eb8417a/cachetools-6.2.2.tar.gz", hash = "sha256:8e6d266b25e539df852251cfd6f990b4bc3a141db73b939058d809ebd2590fc6", size = 31571, upload-time = "2025-11-13T17:42:51.465Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/44/ca1675be2a83aeee1886ab745b28cda92093066590233cc501890eb8417a/cachetools-6.2.2.tar.gz", hash = "sha256:8e6d266b25e539df852251cfd6f990b4bc3a141db73b939058d809ebd2590fc6", size = 31571 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/46/eb6eca305c77a4489affe1c5d8f4cae82f285d9addd8de4ec084a7184221/cachetools-6.2.2-py3-none-any.whl", hash = "sha256:6c09c98183bf58560c97b2abfcedcbaf6a896a490f534b031b661d3723b45ace", size = 11503, upload-time = "2025-11-13T17:42:50.232Z" }, + { url = "https://files.pythonhosted.org/packages/e6/46/eb6eca305c77a4489affe1c5d8f4cae82f285d9addd8de4ec084a7184221/cachetools-6.2.2-py3-none-any.whl", hash = "sha256:6c09c98183bf58560c97b2abfcedcbaf6a896a490f534b031b661d3723b45ace", size = 11503 }, ] [[package]] name = "certifi" version = "2025.11.12" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538 } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, + { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438 }, ] [[package]] @@ -309,120 +308,120 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser", marker = "implementation_name != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, - { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, - { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, - { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, - { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, - { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, - { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, - { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, - { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, - { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, - { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, - { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, - { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, - { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, - { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, - { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, - { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, - { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, - { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, - { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, - { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, - { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, - { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, - { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, - { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, - { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, - { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, - { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, - { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, - { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, - { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, - { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, - { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, - { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, - { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, - { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, - { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, - { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, - { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, - { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, - { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, - { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, - { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, - { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, - { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, - { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271 }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048 }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529 }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097 }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983 }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519 }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572 }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963 }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361 }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932 }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557 }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762 }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230 }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043 }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446 }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101 }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948 }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422 }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499 }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928 }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302 }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909 }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402 }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780 }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320 }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487 }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049 }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793 }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300 }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244 }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828 }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926 }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328 }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650 }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687 }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773 }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013 }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593 }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354 }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480 }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584 }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443 }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437 }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487 }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726 }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195 }, ] [[package]] name = "chardet" version = "5.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618, upload-time = "2023-08-01T19:23:02.662Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618 } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385, upload-time = "2023-08-01T19:23:00.661Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385 }, ] [[package]] name = "charset-normalizer" version = "3.4.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, - { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, - { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, - { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, - { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, - { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, - { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, - { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, - { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, - { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, - { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, - { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, - { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, - { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, - { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, - { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, - { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, - { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, - { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, - { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, - { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, - { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, - { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, - { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, - { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, - { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, - { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, - { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, - { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, - { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, - { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, - { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, - { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, - { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, - { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, - { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, - { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, - { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, - { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, - { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, - { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425 }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162 }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558 }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497 }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240 }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471 }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864 }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647 }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110 }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839 }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667 }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535 }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816 }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694 }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131 }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390 }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091 }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936 }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180 }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346 }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874 }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076 }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601 }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376 }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825 }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583 }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366 }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300 }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465 }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404 }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092 }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408 }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746 }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889 }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641 }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779 }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035 }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542 }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524 }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395 }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680 }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045 }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687 }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014 }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044 }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940 }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104 }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743 }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402 }, ] [[package]] @@ -437,9 +436,9 @@ dependencies = [ { name = "toolz" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5b/7d/812f01e7b2ddf28a0caa8dde56bd951a2c8f691c9bbfce38d469458d1502/chex-0.1.91.tar.gz", hash = "sha256:65367a521415ada905b8c0222b0a41a68337fcadf79a1fb6fc992dbd95dd9f76", size = 90302, upload-time = "2025-09-01T21:49:32.834Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/7d/812f01e7b2ddf28a0caa8dde56bd951a2c8f691c9bbfce38d469458d1502/chex-0.1.91.tar.gz", hash = "sha256:65367a521415ada905b8c0222b0a41a68337fcadf79a1fb6fc992dbd95dd9f76", size = 90302 } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/0c/96102c01dd02ae740d4afc3644d5c7d7fc51d3feefd67300a2aa1ddbf7cb/chex-0.1.91-py3-none-any.whl", hash = "sha256:6fc4cbfc22301c08d4a7ef706045668410100962eba8ba6af03fa07f4e5dcf9b", size = 100965, upload-time = "2025-09-01T21:49:31.141Z" }, + { url = "https://files.pythonhosted.org/packages/12/0c/96102c01dd02ae740d4afc3644d5c7d7fc51d3feefd67300a2aa1ddbf7cb/chex-0.1.91-py3-none-any.whl", hash = "sha256:6fc4cbfc22301c08d4a7ef706045668410100962eba8ba6af03fa07f4e5dcf9b", size = 100965 }, ] [[package]] @@ -449,9 +448,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065 } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274 }, ] [[package]] @@ -465,18 +464,18 @@ dependencies = [ { name = "pyyaml" }, { name = "stevedore" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e5/4b/54c6c59c35559c0364981a96dba327673368e91ea5a078e5ae71c201b98c/cliff-4.13.0.tar.gz", hash = "sha256:54df5434f12d3d9f0724f50feef950ee4b79ed1bd560b42fa28901a1c9656e7f", size = 89337, upload-time = "2025-12-04T05:49:15.059Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/4b/54c6c59c35559c0364981a96dba327673368e91ea5a078e5ae71c201b98c/cliff-4.13.0.tar.gz", hash = "sha256:54df5434f12d3d9f0724f50feef950ee4b79ed1bd560b42fa28901a1c9656e7f", size = 89337 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/b3/c6d1c385eb332e49cd55e311927290641e8d823707bafc6c2fe3ef7b9d38/cliff-4.13.0-py3-none-any.whl", hash = "sha256:3eb366951244a8f2a08e59a97f9ef4e72b11df93facf4ee83d87a319152d26dd", size = 86908, upload-time = "2025-12-04T05:49:14.028Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b3/c6d1c385eb332e49cd55e311927290641e8d823707bafc6c2fe3ef7b9d38/cliff-4.13.0-py3-none-any.whl", hash = "sha256:3eb366951244a8f2a08e59a97f9ef4e72b11df93facf4ee83d87a319152d26dd", size = 86908 }, ] [[package]] name = "cloudpickle" version = "3.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/27/fb/576f067976d320f5f0114a8d9fa1215425441bb35627b1993e5afd8111e5/cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414", size = 22330, upload-time = "2025-11-03T09:25:26.604Z" } +sdist = { url = "https://files.pythonhosted.org/packages/27/fb/576f067976d320f5f0114a8d9fa1215425441bb35627b1993e5afd8111e5/cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414", size = 22330 } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a", size = 22228, upload-time = "2025-11-03T09:25:25.534Z" }, + { url = "https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a", size = 22228 }, ] [[package]] @@ -486,34 +485,34 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5b/4b/9633e72dcd9ac28ab72c661feeb7ece5d01b55e7c9b0ef3331fb102e1506/cmaes-0.12.0.tar.gz", hash = "sha256:6aab41eee2f38bf917560a7e7d1ba0060632cd44cdf7ac2a10704da994624182", size = 52779, upload-time = "2025-07-23T07:01:53.576Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/4b/9633e72dcd9ac28ab72c661feeb7ece5d01b55e7c9b0ef3331fb102e1506/cmaes-0.12.0.tar.gz", hash = "sha256:6aab41eee2f38bf917560a7e7d1ba0060632cd44cdf7ac2a10704da994624182", size = 52779 } wheels = [ - { url = "https://files.pythonhosted.org/packages/33/57/f78b7ed51b3536cc80b4322db2cbbb9d1f409736b852eef0493d9fd8474d/cmaes-0.12.0-py3-none-any.whl", hash = "sha256:d0e3e50ce28a36294bffa16a5626c15d23155824cf6b0a373db30dbbea9b2256", size = 64519, upload-time = "2025-07-23T07:01:52.358Z" }, + { url = "https://files.pythonhosted.org/packages/33/57/f78b7ed51b3536cc80b4322db2cbbb9d1f409736b852eef0493d9fd8474d/cmaes-0.12.0-py3-none-any.whl", hash = "sha256:d0e3e50ce28a36294bffa16a5626c15d23155824cf6b0a373db30dbbea9b2256", size = 64519 }, ] [[package]] name = "cmd2" -version = "2.7.0" +version = "3.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "gnureadline", marker = "sys_platform == 'darwin'" }, { name = "pyperclip" }, { name = "pyreadline3", marker = "sys_platform == 'win32'" }, + { name = "rich" }, { name = "rich-argparse" }, - { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/75/68/4bf43d284e41c01c6011146e5c2824aa6f17a3bb1ef10ba3dbbae5cf31dc/cmd2-2.7.0.tar.gz", hash = "sha256:81d8135b46210e1d03a5a810baf859069a62214788ceeec3588f44eed86fbeeb", size = 593131, upload-time = "2025-06-30T16:54:26.983Z" } +sdist = { url = "https://files.pythonhosted.org/packages/04/0c/391fcec0218d7319a9b2aefeb42c4f92ee80f27d36b09b995a792a74b4ae/cmd2-3.0.0.tar.gz", hash = "sha256:f6fab21d2b344a3ab9fe174a6286c9fb4f43a185ad1dfacd13ef017a26a2c333", size = 1002183 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/83/0f65933b7daa436912173f3d63232d158b60686318fccc7cf458ff15bfe8/cmd2-2.7.0-py3-none-any.whl", hash = "sha256:c85faf603e8cfeb4302206f49c0530a83d63386b0d90ff6a957f2c816eb767d7", size = 154309, upload-time = "2025-06-30T16:54:25.039Z" }, + { url = "https://files.pythonhosted.org/packages/17/32/39dd1cf5b912b7674aca93c9926e66bac368da7eec95408aa83779c60570/cmd2-3.0.0-py3-none-any.whl", hash = "sha256:a564065a35f1bb172dfca1c2b9e8c17c930ab49af74f9cae8dfdf1891cff4595", size = 148644 }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, ] [[package]] @@ -523,9 +522,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a2/61/f083b5ac52e505dfc1c624eafbf8c7589a0d7f32daa398d2e7590efa5fda/colorlog-6.10.1.tar.gz", hash = "sha256:eb4ae5cb65fe7fec7773c2306061a8e63e02efc2c72eba9d27b0fa23c94f1321", size = 17162, upload-time = "2025-10-16T16:14:11.978Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/61/f083b5ac52e505dfc1c624eafbf8c7589a0d7f32daa398d2e7590efa5fda/colorlog-6.10.1.tar.gz", hash = "sha256:eb4ae5cb65fe7fec7773c2306061a8e63e02efc2c72eba9d27b0fa23c94f1321", size = 17162 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/c1/e419ef3723a074172b68aaa89c9f3de486ed4c2399e2dbd8113a4fdcaf9e/colorlog-6.10.1-py3-none-any.whl", hash = "sha256:2d7e8348291948af66122cff006c9f8da6255d224e7cf8e37d8de2df3bad8c9c", size = 11743, upload-time = "2025-10-16T16:14:10.512Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c1/e419ef3723a074172b68aaa89c9f3de486ed4c2399e2dbd8113a4fdcaf9e/colorlog-6.10.1-py3-none-any.whl", hash = "sha256:2d7e8348291948af66122cff006c9f8da6255d224e7cf8e37d8de2df3bad8c9c", size = 11743 }, ] [[package]] @@ -535,137 +534,137 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" }, - { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" }, - { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" }, - { url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536, upload-time = "2025-07-26T12:01:25.91Z" }, - { url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397, upload-time = "2025-07-26T12:01:27.152Z" }, - { url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601, upload-time = "2025-07-26T12:01:28.808Z" }, - { url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288, upload-time = "2025-07-26T12:01:31.198Z" }, - { url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386, upload-time = "2025-07-26T12:01:33.947Z" }, - { url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018, upload-time = "2025-07-26T12:01:35.64Z" }, - { url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567, upload-time = "2025-07-26T12:01:36.804Z" }, - { url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655, upload-time = "2025-07-26T12:01:37.999Z" }, - { url = "https://files.pythonhosted.org/packages/68/35/0167aad910bbdb9599272bd96d01a9ec6852f36b9455cf2ca67bd4cc2d23/contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5", size = 293257, upload-time = "2025-07-26T12:01:39.367Z" }, - { url = "https://files.pythonhosted.org/packages/96/e4/7adcd9c8362745b2210728f209bfbcf7d91ba868a2c5f40d8b58f54c509b/contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1", size = 274034, upload-time = "2025-07-26T12:01:40.645Z" }, - { url = "https://files.pythonhosted.org/packages/73/23/90e31ceeed1de63058a02cb04b12f2de4b40e3bef5e082a7c18d9c8ae281/contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286", size = 334672, upload-time = "2025-07-26T12:01:41.942Z" }, - { url = "https://files.pythonhosted.org/packages/ed/93/b43d8acbe67392e659e1d984700e79eb67e2acb2bd7f62012b583a7f1b55/contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5", size = 381234, upload-time = "2025-07-26T12:01:43.499Z" }, - { url = "https://files.pythonhosted.org/packages/46/3b/bec82a3ea06f66711520f75a40c8fc0b113b2a75edb36aa633eb11c4f50f/contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67", size = 385169, upload-time = "2025-07-26T12:01:45.219Z" }, - { url = "https://files.pythonhosted.org/packages/4b/32/e0f13a1c5b0f8572d0ec6ae2f6c677b7991fafd95da523159c19eff0696a/contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9", size = 362859, upload-time = "2025-07-26T12:01:46.519Z" }, - { url = "https://files.pythonhosted.org/packages/33/71/e2a7945b7de4e58af42d708a219f3b2f4cff7386e6b6ab0a0fa0033c49a9/contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659", size = 1332062, upload-time = "2025-07-26T12:01:48.964Z" }, - { url = "https://files.pythonhosted.org/packages/12/fc/4e87ac754220ccc0e807284f88e943d6d43b43843614f0a8afa469801db0/contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7", size = 1403932, upload-time = "2025-07-26T12:01:51.979Z" }, - { url = "https://files.pythonhosted.org/packages/a6/2e/adc197a37443f934594112222ac1aa7dc9a98faf9c3842884df9a9d8751d/contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d", size = 185024, upload-time = "2025-07-26T12:01:53.245Z" }, - { url = "https://files.pythonhosted.org/packages/18/0b/0098c214843213759692cc638fce7de5c289200a830e5035d1791d7a2338/contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263", size = 226578, upload-time = "2025-07-26T12:01:54.422Z" }, - { url = "https://files.pythonhosted.org/packages/8a/9a/2f6024a0c5995243cd63afdeb3651c984f0d2bc727fd98066d40e141ad73/contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9", size = 193524, upload-time = "2025-07-26T12:01:55.73Z" }, - { url = "https://files.pythonhosted.org/packages/c0/b3/f8a1a86bd3298513f500e5b1f5fd92b69896449f6cab6a146a5d52715479/contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d", size = 306730, upload-time = "2025-07-26T12:01:57.051Z" }, - { url = "https://files.pythonhosted.org/packages/3f/11/4780db94ae62fc0c2053909b65dc3246bd7cecfc4f8a20d957ad43aa4ad8/contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216", size = 287897, upload-time = "2025-07-26T12:01:58.663Z" }, - { url = "https://files.pythonhosted.org/packages/ae/15/e59f5f3ffdd6f3d4daa3e47114c53daabcb18574a26c21f03dc9e4e42ff0/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae", size = 326751, upload-time = "2025-07-26T12:02:00.343Z" }, - { url = "https://files.pythonhosted.org/packages/0f/81/03b45cfad088e4770b1dcf72ea78d3802d04200009fb364d18a493857210/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20", size = 375486, upload-time = "2025-07-26T12:02:02.128Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ba/49923366492ffbdd4486e970d421b289a670ae8cf539c1ea9a09822b371a/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99", size = 388106, upload-time = "2025-07-26T12:02:03.615Z" }, - { url = "https://files.pythonhosted.org/packages/9f/52/5b00ea89525f8f143651f9f03a0df371d3cbd2fccd21ca9b768c7a6500c2/contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b", size = 352548, upload-time = "2025-07-26T12:02:05.165Z" }, - { url = "https://files.pythonhosted.org/packages/32/1d/a209ec1a3a3452d490f6b14dd92e72280c99ae3d1e73da74f8277d4ee08f/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a", size = 1322297, upload-time = "2025-07-26T12:02:07.379Z" }, - { url = "https://files.pythonhosted.org/packages/bc/9e/46f0e8ebdd884ca0e8877e46a3f4e633f6c9c8c4f3f6e72be3fe075994aa/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e", size = 1391023, upload-time = "2025-07-26T12:02:10.171Z" }, - { url = "https://files.pythonhosted.org/packages/b9/70/f308384a3ae9cd2209e0849f33c913f658d3326900d0ff5d378d6a1422d2/contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3", size = 196157, upload-time = "2025-07-26T12:02:11.488Z" }, - { url = "https://files.pythonhosted.org/packages/b2/dd/880f890a6663b84d9e34a6f88cded89d78f0091e0045a284427cb6b18521/contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8", size = 240570, upload-time = "2025-07-26T12:02:12.754Z" }, - { url = "https://files.pythonhosted.org/packages/80/99/2adc7d8ffead633234817ef8e9a87115c8a11927a94478f6bb3d3f4d4f7d/contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301", size = 199713, upload-time = "2025-07-26T12:02:14.4Z" }, - { url = "https://files.pythonhosted.org/packages/72/8b/4546f3ab60f78c514ffb7d01a0bd743f90de36f0019d1be84d0a708a580a/contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a", size = 292189, upload-time = "2025-07-26T12:02:16.095Z" }, - { url = "https://files.pythonhosted.org/packages/fd/e1/3542a9cb596cadd76fcef413f19c79216e002623158befe6daa03dbfa88c/contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77", size = 273251, upload-time = "2025-07-26T12:02:17.524Z" }, - { url = "https://files.pythonhosted.org/packages/b1/71/f93e1e9471d189f79d0ce2497007731c1e6bf9ef6d1d61b911430c3db4e5/contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5", size = 335810, upload-time = "2025-07-26T12:02:18.9Z" }, - { url = "https://files.pythonhosted.org/packages/91/f9/e35f4c1c93f9275d4e38681a80506b5510e9327350c51f8d4a5a724d178c/contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4", size = 382871, upload-time = "2025-07-26T12:02:20.418Z" }, - { url = "https://files.pythonhosted.org/packages/b5/71/47b512f936f66a0a900d81c396a7e60d73419868fba959c61efed7a8ab46/contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36", size = 386264, upload-time = "2025-07-26T12:02:21.916Z" }, - { url = "https://files.pythonhosted.org/packages/04/5f/9ff93450ba96b09c7c2b3f81c94de31c89f92292f1380261bd7195bea4ea/contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3", size = 363819, upload-time = "2025-07-26T12:02:23.759Z" }, - { url = "https://files.pythonhosted.org/packages/3e/a6/0b185d4cc480ee494945cde102cb0149ae830b5fa17bf855b95f2e70ad13/contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b", size = 1333650, upload-time = "2025-07-26T12:02:26.181Z" }, - { url = "https://files.pythonhosted.org/packages/43/d7/afdc95580ca56f30fbcd3060250f66cedbde69b4547028863abd8aa3b47e/contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36", size = 1404833, upload-time = "2025-07-26T12:02:28.782Z" }, - { url = "https://files.pythonhosted.org/packages/e2/e2/366af18a6d386f41132a48f033cbd2102e9b0cf6345d35ff0826cd984566/contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d", size = 189692, upload-time = "2025-07-26T12:02:30.128Z" }, - { url = "https://files.pythonhosted.org/packages/7d/c2/57f54b03d0f22d4044b8afb9ca0e184f8b1afd57b4f735c2fa70883dc601/contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd", size = 232424, upload-time = "2025-07-26T12:02:31.395Z" }, - { url = "https://files.pythonhosted.org/packages/18/79/a9416650df9b525737ab521aa181ccc42d56016d2123ddcb7b58e926a42c/contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339", size = 198300, upload-time = "2025-07-26T12:02:32.956Z" }, - { url = "https://files.pythonhosted.org/packages/1f/42/38c159a7d0f2b7b9c04c64ab317042bb6952b713ba875c1681529a2932fe/contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772", size = 306769, upload-time = "2025-07-26T12:02:34.2Z" }, - { url = "https://files.pythonhosted.org/packages/c3/6c/26a8205f24bca10974e77460de68d3d7c63e282e23782f1239f226fcae6f/contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77", size = 287892, upload-time = "2025-07-26T12:02:35.807Z" }, - { url = "https://files.pythonhosted.org/packages/66/06/8a475c8ab718ebfd7925661747dbb3c3ee9c82ac834ccb3570be49d129f4/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13", size = 326748, upload-time = "2025-07-26T12:02:37.193Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a3/c5ca9f010a44c223f098fccd8b158bb1cb287378a31ac141f04730dc49be/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe", size = 375554, upload-time = "2025-07-26T12:02:38.894Z" }, - { url = "https://files.pythonhosted.org/packages/80/5b/68bd33ae63fac658a4145088c1e894405e07584a316738710b636c6d0333/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f", size = 388118, upload-time = "2025-07-26T12:02:40.642Z" }, - { url = "https://files.pythonhosted.org/packages/40/52/4c285a6435940ae25d7410a6c36bda5145839bc3f0beb20c707cda18b9d2/contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0", size = 352555, upload-time = "2025-07-26T12:02:42.25Z" }, - { url = "https://files.pythonhosted.org/packages/24/ee/3e81e1dd174f5c7fefe50e85d0892de05ca4e26ef1c9a59c2a57e43b865a/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4", size = 1322295, upload-time = "2025-07-26T12:02:44.668Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/6d913d4d04e14379de429057cd169e5e00f6c2af3bb13e1710bcbdb5da12/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f", size = 1391027, upload-time = "2025-07-26T12:02:47.09Z" }, - { url = "https://files.pythonhosted.org/packages/93/8a/68a4ec5c55a2971213d29a9374913f7e9f18581945a7a31d1a39b5d2dfe5/contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae", size = 202428, upload-time = "2025-07-26T12:02:48.691Z" }, - { url = "https://files.pythonhosted.org/packages/fa/96/fd9f641ffedc4fa3ace923af73b9d07e869496c9cc7a459103e6e978992f/contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc", size = 250331, upload-time = "2025-07-26T12:02:50.137Z" }, - { url = "https://files.pythonhosted.org/packages/ae/8c/469afb6465b853afff216f9528ffda78a915ff880ed58813ba4faf4ba0b6/contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b", size = 203831, upload-time = "2025-07-26T12:02:51.449Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419 }, + { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979 }, + { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653 }, + { url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536 }, + { url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397 }, + { url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601 }, + { url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288 }, + { url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386 }, + { url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018 }, + { url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567 }, + { url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655 }, + { url = "https://files.pythonhosted.org/packages/68/35/0167aad910bbdb9599272bd96d01a9ec6852f36b9455cf2ca67bd4cc2d23/contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5", size = 293257 }, + { url = "https://files.pythonhosted.org/packages/96/e4/7adcd9c8362745b2210728f209bfbcf7d91ba868a2c5f40d8b58f54c509b/contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1", size = 274034 }, + { url = "https://files.pythonhosted.org/packages/73/23/90e31ceeed1de63058a02cb04b12f2de4b40e3bef5e082a7c18d9c8ae281/contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286", size = 334672 }, + { url = "https://files.pythonhosted.org/packages/ed/93/b43d8acbe67392e659e1d984700e79eb67e2acb2bd7f62012b583a7f1b55/contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5", size = 381234 }, + { url = "https://files.pythonhosted.org/packages/46/3b/bec82a3ea06f66711520f75a40c8fc0b113b2a75edb36aa633eb11c4f50f/contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67", size = 385169 }, + { url = "https://files.pythonhosted.org/packages/4b/32/e0f13a1c5b0f8572d0ec6ae2f6c677b7991fafd95da523159c19eff0696a/contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9", size = 362859 }, + { url = "https://files.pythonhosted.org/packages/33/71/e2a7945b7de4e58af42d708a219f3b2f4cff7386e6b6ab0a0fa0033c49a9/contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659", size = 1332062 }, + { url = "https://files.pythonhosted.org/packages/12/fc/4e87ac754220ccc0e807284f88e943d6d43b43843614f0a8afa469801db0/contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7", size = 1403932 }, + { url = "https://files.pythonhosted.org/packages/a6/2e/adc197a37443f934594112222ac1aa7dc9a98faf9c3842884df9a9d8751d/contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d", size = 185024 }, + { url = "https://files.pythonhosted.org/packages/18/0b/0098c214843213759692cc638fce7de5c289200a830e5035d1791d7a2338/contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263", size = 226578 }, + { url = "https://files.pythonhosted.org/packages/8a/9a/2f6024a0c5995243cd63afdeb3651c984f0d2bc727fd98066d40e141ad73/contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9", size = 193524 }, + { url = "https://files.pythonhosted.org/packages/c0/b3/f8a1a86bd3298513f500e5b1f5fd92b69896449f6cab6a146a5d52715479/contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d", size = 306730 }, + { url = "https://files.pythonhosted.org/packages/3f/11/4780db94ae62fc0c2053909b65dc3246bd7cecfc4f8a20d957ad43aa4ad8/contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216", size = 287897 }, + { url = "https://files.pythonhosted.org/packages/ae/15/e59f5f3ffdd6f3d4daa3e47114c53daabcb18574a26c21f03dc9e4e42ff0/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae", size = 326751 }, + { url = "https://files.pythonhosted.org/packages/0f/81/03b45cfad088e4770b1dcf72ea78d3802d04200009fb364d18a493857210/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20", size = 375486 }, + { url = "https://files.pythonhosted.org/packages/0c/ba/49923366492ffbdd4486e970d421b289a670ae8cf539c1ea9a09822b371a/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99", size = 388106 }, + { url = "https://files.pythonhosted.org/packages/9f/52/5b00ea89525f8f143651f9f03a0df371d3cbd2fccd21ca9b768c7a6500c2/contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b", size = 352548 }, + { url = "https://files.pythonhosted.org/packages/32/1d/a209ec1a3a3452d490f6b14dd92e72280c99ae3d1e73da74f8277d4ee08f/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a", size = 1322297 }, + { url = "https://files.pythonhosted.org/packages/bc/9e/46f0e8ebdd884ca0e8877e46a3f4e633f6c9c8c4f3f6e72be3fe075994aa/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e", size = 1391023 }, + { url = "https://files.pythonhosted.org/packages/b9/70/f308384a3ae9cd2209e0849f33c913f658d3326900d0ff5d378d6a1422d2/contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3", size = 196157 }, + { url = "https://files.pythonhosted.org/packages/b2/dd/880f890a6663b84d9e34a6f88cded89d78f0091e0045a284427cb6b18521/contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8", size = 240570 }, + { url = "https://files.pythonhosted.org/packages/80/99/2adc7d8ffead633234817ef8e9a87115c8a11927a94478f6bb3d3f4d4f7d/contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301", size = 199713 }, + { url = "https://files.pythonhosted.org/packages/72/8b/4546f3ab60f78c514ffb7d01a0bd743f90de36f0019d1be84d0a708a580a/contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a", size = 292189 }, + { url = "https://files.pythonhosted.org/packages/fd/e1/3542a9cb596cadd76fcef413f19c79216e002623158befe6daa03dbfa88c/contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77", size = 273251 }, + { url = "https://files.pythonhosted.org/packages/b1/71/f93e1e9471d189f79d0ce2497007731c1e6bf9ef6d1d61b911430c3db4e5/contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5", size = 335810 }, + { url = "https://files.pythonhosted.org/packages/91/f9/e35f4c1c93f9275d4e38681a80506b5510e9327350c51f8d4a5a724d178c/contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4", size = 382871 }, + { url = "https://files.pythonhosted.org/packages/b5/71/47b512f936f66a0a900d81c396a7e60d73419868fba959c61efed7a8ab46/contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36", size = 386264 }, + { url = "https://files.pythonhosted.org/packages/04/5f/9ff93450ba96b09c7c2b3f81c94de31c89f92292f1380261bd7195bea4ea/contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3", size = 363819 }, + { url = "https://files.pythonhosted.org/packages/3e/a6/0b185d4cc480ee494945cde102cb0149ae830b5fa17bf855b95f2e70ad13/contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b", size = 1333650 }, + { url = "https://files.pythonhosted.org/packages/43/d7/afdc95580ca56f30fbcd3060250f66cedbde69b4547028863abd8aa3b47e/contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36", size = 1404833 }, + { url = "https://files.pythonhosted.org/packages/e2/e2/366af18a6d386f41132a48f033cbd2102e9b0cf6345d35ff0826cd984566/contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d", size = 189692 }, + { url = "https://files.pythonhosted.org/packages/7d/c2/57f54b03d0f22d4044b8afb9ca0e184f8b1afd57b4f735c2fa70883dc601/contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd", size = 232424 }, + { url = "https://files.pythonhosted.org/packages/18/79/a9416650df9b525737ab521aa181ccc42d56016d2123ddcb7b58e926a42c/contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339", size = 198300 }, + { url = "https://files.pythonhosted.org/packages/1f/42/38c159a7d0f2b7b9c04c64ab317042bb6952b713ba875c1681529a2932fe/contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772", size = 306769 }, + { url = "https://files.pythonhosted.org/packages/c3/6c/26a8205f24bca10974e77460de68d3d7c63e282e23782f1239f226fcae6f/contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77", size = 287892 }, + { url = "https://files.pythonhosted.org/packages/66/06/8a475c8ab718ebfd7925661747dbb3c3ee9c82ac834ccb3570be49d129f4/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13", size = 326748 }, + { url = "https://files.pythonhosted.org/packages/b4/a3/c5ca9f010a44c223f098fccd8b158bb1cb287378a31ac141f04730dc49be/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe", size = 375554 }, + { url = "https://files.pythonhosted.org/packages/80/5b/68bd33ae63fac658a4145088c1e894405e07584a316738710b636c6d0333/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f", size = 388118 }, + { url = "https://files.pythonhosted.org/packages/40/52/4c285a6435940ae25d7410a6c36bda5145839bc3f0beb20c707cda18b9d2/contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0", size = 352555 }, + { url = "https://files.pythonhosted.org/packages/24/ee/3e81e1dd174f5c7fefe50e85d0892de05ca4e26ef1c9a59c2a57e43b865a/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4", size = 1322295 }, + { url = "https://files.pythonhosted.org/packages/3c/b2/6d913d4d04e14379de429057cd169e5e00f6c2af3bb13e1710bcbdb5da12/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f", size = 1391027 }, + { url = "https://files.pythonhosted.org/packages/93/8a/68a4ec5c55a2971213d29a9374913f7e9f18581945a7a31d1a39b5d2dfe5/contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae", size = 202428 }, + { url = "https://files.pythonhosted.org/packages/fa/96/fd9f641ffedc4fa3ace923af73b9d07e869496c9cc7a459103e6e978992f/contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc", size = 250331 }, + { url = "https://files.pythonhosted.org/packages/ae/8c/469afb6465b853afff216f9528ffda78a915ff880ed58813ba4faf4ba0b6/contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b", size = 203831 }, ] [[package]] name = "coverage" -version = "7.12.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/26/4a96807b193b011588099c3b5c89fbb05294e5b90e71018e065465f34eb6/coverage-7.12.0.tar.gz", hash = "sha256:fc11e0a4e372cb5f282f16ef90d4a585034050ccda536451901abfb19a57f40c", size = 819341, upload-time = "2025-11-18T13:34:20.766Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/02/bf/638c0427c0f0d47638242e2438127f3c8ee3cfc06c7fdeb16778ed47f836/coverage-7.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:29644c928772c78512b48e14156b81255000dcfd4817574ff69def189bcb3647", size = 217704, upload-time = "2025-11-18T13:32:28.906Z" }, - { url = "https://files.pythonhosted.org/packages/08/e1/706fae6692a66c2d6b871a608bbde0da6281903fa0e9f53a39ed441da36a/coverage-7.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8638cbb002eaa5d7c8d04da667813ce1067080b9a91099801a0053086e52b736", size = 218064, upload-time = "2025-11-18T13:32:30.161Z" }, - { url = "https://files.pythonhosted.org/packages/a9/8b/eb0231d0540f8af3ffda39720ff43cb91926489d01524e68f60e961366e4/coverage-7.12.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:083631eeff5eb9992c923e14b810a179798bb598e6a0dd60586819fc23be6e60", size = 249560, upload-time = "2025-11-18T13:32:31.835Z" }, - { url = "https://files.pythonhosted.org/packages/e9/a1/67fb52af642e974d159b5b379e4d4c59d0ebe1288677fbd04bbffe665a82/coverage-7.12.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:99d5415c73ca12d558e07776bd957c4222c687b9f1d26fa0e1b57e3598bdcde8", size = 252318, upload-time = "2025-11-18T13:32:33.178Z" }, - { url = "https://files.pythonhosted.org/packages/41/e5/38228f31b2c7665ebf9bdfdddd7a184d56450755c7e43ac721c11a4b8dab/coverage-7.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e949ebf60c717c3df63adb4a1a366c096c8d7fd8472608cd09359e1bd48ef59f", size = 253403, upload-time = "2025-11-18T13:32:34.45Z" }, - { url = "https://files.pythonhosted.org/packages/ec/4b/df78e4c8188f9960684267c5a4897836f3f0f20a20c51606ee778a1d9749/coverage-7.12.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d907ddccbca819afa2cd014bc69983b146cca2735a0b1e6259b2a6c10be1e70", size = 249984, upload-time = "2025-11-18T13:32:35.747Z" }, - { url = "https://files.pythonhosted.org/packages/ba/51/bb163933d195a345c6f63eab9e55743413d064c291b6220df754075c2769/coverage-7.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b1518ecbad4e6173f4c6e6c4a46e49555ea5679bf3feda5edb1b935c7c44e8a0", size = 251339, upload-time = "2025-11-18T13:32:37.352Z" }, - { url = "https://files.pythonhosted.org/packages/15/40/c9b29cdb8412c837cdcbc2cfa054547dd83affe6cbbd4ce4fdb92b6ba7d1/coverage-7.12.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51777647a749abdf6f6fd8c7cffab12de68ab93aab15efc72fbbb83036c2a068", size = 249489, upload-time = "2025-11-18T13:32:39.212Z" }, - { url = "https://files.pythonhosted.org/packages/c8/da/b3131e20ba07a0de4437a50ef3b47840dfabf9293675b0cd5c2c7f66dd61/coverage-7.12.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:42435d46d6461a3b305cdfcad7cdd3248787771f53fe18305548cba474e6523b", size = 249070, upload-time = "2025-11-18T13:32:40.598Z" }, - { url = "https://files.pythonhosted.org/packages/70/81/b653329b5f6302c08d683ceff6785bc60a34be9ae92a5c7b63ee7ee7acec/coverage-7.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5bcead88c8423e1855e64b8057d0544e33e4080b95b240c2a355334bb7ced937", size = 250929, upload-time = "2025-11-18T13:32:42.915Z" }, - { url = "https://files.pythonhosted.org/packages/a3/00/250ac3bca9f252a5fb1338b5ad01331ebb7b40223f72bef5b1b2cb03aa64/coverage-7.12.0-cp312-cp312-win32.whl", hash = "sha256:dcbb630ab034e86d2a0f79aefd2be07e583202f41e037602d438c80044957baa", size = 220241, upload-time = "2025-11-18T13:32:44.665Z" }, - { url = "https://files.pythonhosted.org/packages/64/1c/77e79e76d37ce83302f6c21980b45e09f8aa4551965213a10e62d71ce0ab/coverage-7.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:2fd8354ed5d69775ac42986a691fbf68b4084278710cee9d7c3eaa0c28fa982a", size = 221051, upload-time = "2025-11-18T13:32:46.008Z" }, - { url = "https://files.pythonhosted.org/packages/31/f5/641b8a25baae564f9e52cac0e2667b123de961985709a004e287ee7663cc/coverage-7.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:737c3814903be30695b2de20d22bcc5428fdae305c61ba44cdc8b3252984c49c", size = 219692, upload-time = "2025-11-18T13:32:47.372Z" }, - { url = "https://files.pythonhosted.org/packages/b8/14/771700b4048774e48d2c54ed0c674273702713c9ee7acdfede40c2666747/coverage-7.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:47324fffca8d8eae7e185b5bb20c14645f23350f870c1649003618ea91a78941", size = 217725, upload-time = "2025-11-18T13:32:49.22Z" }, - { url = "https://files.pythonhosted.org/packages/17/a7/3aa4144d3bcb719bf67b22d2d51c2d577bf801498c13cb08f64173e80497/coverage-7.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ccf3b2ede91decd2fb53ec73c1f949c3e034129d1e0b07798ff1d02ea0c8fa4a", size = 218098, upload-time = "2025-11-18T13:32:50.78Z" }, - { url = "https://files.pythonhosted.org/packages/fc/9c/b846bbc774ff81091a12a10203e70562c91ae71badda00c5ae5b613527b1/coverage-7.12.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b365adc70a6936c6b0582dc38746b33b2454148c02349345412c6e743efb646d", size = 249093, upload-time = "2025-11-18T13:32:52.554Z" }, - { url = "https://files.pythonhosted.org/packages/76/b6/67d7c0e1f400b32c883e9342de4a8c2ae7c1a0b57c5de87622b7262e2309/coverage-7.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bc13baf85cd8a4cfcf4a35c7bc9d795837ad809775f782f697bf630b7e200211", size = 251686, upload-time = "2025-11-18T13:32:54.862Z" }, - { url = "https://files.pythonhosted.org/packages/cc/75/b095bd4b39d49c3be4bffbb3135fea18a99a431c52dd7513637c0762fecb/coverage-7.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:099d11698385d572ceafb3288a5b80fe1fc58bf665b3f9d362389de488361d3d", size = 252930, upload-time = "2025-11-18T13:32:56.417Z" }, - { url = "https://files.pythonhosted.org/packages/6e/f3/466f63015c7c80550bead3093aacabf5380c1220a2a93c35d374cae8f762/coverage-7.12.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:473dc45d69694069adb7680c405fb1e81f60b2aff42c81e2f2c3feaf544d878c", size = 249296, upload-time = "2025-11-18T13:32:58.074Z" }, - { url = "https://files.pythonhosted.org/packages/27/86/eba2209bf2b7e28c68698fc13437519a295b2d228ba9e0ec91673e09fa92/coverage-7.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:583f9adbefd278e9de33c33d6846aa8f5d164fa49b47144180a0e037f0688bb9", size = 251068, upload-time = "2025-11-18T13:32:59.646Z" }, - { url = "https://files.pythonhosted.org/packages/ec/55/ca8ae7dbba962a3351f18940b359b94c6bafdd7757945fdc79ec9e452dc7/coverage-7.12.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2089cc445f2dc0af6f801f0d1355c025b76c24481935303cf1af28f636688f0", size = 249034, upload-time = "2025-11-18T13:33:01.481Z" }, - { url = "https://files.pythonhosted.org/packages/7a/d7/39136149325cad92d420b023b5fd900dabdd1c3a0d1d5f148ef4a8cedef5/coverage-7.12.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:950411f1eb5d579999c5f66c62a40961f126fc71e5e14419f004471957b51508", size = 248853, upload-time = "2025-11-18T13:33:02.935Z" }, - { url = "https://files.pythonhosted.org/packages/fe/b6/76e1add8b87ef60e00643b0b7f8f7bb73d4bf5249a3be19ebefc5793dd25/coverage-7.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b1aab7302a87bafebfe76b12af681b56ff446dc6f32ed178ff9c092ca776e6bc", size = 250619, upload-time = "2025-11-18T13:33:04.336Z" }, - { url = "https://files.pythonhosted.org/packages/95/87/924c6dc64f9203f7a3c1832a6a0eee5a8335dbe5f1bdadcc278d6f1b4d74/coverage-7.12.0-cp313-cp313-win32.whl", hash = "sha256:d7e0d0303c13b54db495eb636bc2465b2fb8475d4c8bcec8fe4b5ca454dfbae8", size = 220261, upload-time = "2025-11-18T13:33:06.493Z" }, - { url = "https://files.pythonhosted.org/packages/91/77/dd4aff9af16ff776bf355a24d87eeb48fc6acde54c907cc1ea89b14a8804/coverage-7.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:ce61969812d6a98a981d147d9ac583a36ac7db7766f2e64a9d4d059c2fe29d07", size = 221072, upload-time = "2025-11-18T13:33:07.926Z" }, - { url = "https://files.pythonhosted.org/packages/70/49/5c9dc46205fef31b1b226a6e16513193715290584317fd4df91cdaf28b22/coverage-7.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bcec6f47e4cb8a4c2dc91ce507f6eefc6a1b10f58df32cdc61dff65455031dfc", size = 219702, upload-time = "2025-11-18T13:33:09.631Z" }, - { url = "https://files.pythonhosted.org/packages/9b/62/f87922641c7198667994dd472a91e1d9b829c95d6c29529ceb52132436ad/coverage-7.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:459443346509476170d553035e4a3eed7b860f4fe5242f02de1010501956ce87", size = 218420, upload-time = "2025-11-18T13:33:11.153Z" }, - { url = "https://files.pythonhosted.org/packages/85/dd/1cc13b2395ef15dbb27d7370a2509b4aee77890a464fb35d72d428f84871/coverage-7.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:04a79245ab2b7a61688958f7a855275997134bc84f4a03bc240cf64ff132abf6", size = 218773, upload-time = "2025-11-18T13:33:12.569Z" }, - { url = "https://files.pythonhosted.org/packages/74/40/35773cc4bb1e9d4658d4fb669eb4195b3151bef3bbd6f866aba5cd5dac82/coverage-7.12.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:09a86acaaa8455f13d6a99221d9654df249b33937b4e212b4e5a822065f12aa7", size = 260078, upload-time = "2025-11-18T13:33:14.037Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ee/231bb1a6ffc2905e396557585ebc6bdc559e7c66708376d245a1f1d330fc/coverage-7.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:907e0df1b71ba77463687a74149c6122c3f6aac56c2510a5d906b2f368208560", size = 262144, upload-time = "2025-11-18T13:33:15.601Z" }, - { url = "https://files.pythonhosted.org/packages/28/be/32f4aa9f3bf0b56f3971001b56508352c7753915345d45fab4296a986f01/coverage-7.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9b57e2d0ddd5f0582bae5437c04ee71c46cd908e7bc5d4d0391f9a41e812dd12", size = 264574, upload-time = "2025-11-18T13:33:17.354Z" }, - { url = "https://files.pythonhosted.org/packages/68/7c/00489fcbc2245d13ab12189b977e0cf06ff3351cb98bc6beba8bd68c5902/coverage-7.12.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:58c1c6aa677f3a1411fe6fb28ec3a942e4f665df036a3608816e0847fad23296", size = 259298, upload-time = "2025-11-18T13:33:18.958Z" }, - { url = "https://files.pythonhosted.org/packages/96/b4/f0760d65d56c3bea95b449e02570d4abd2549dc784bf39a2d4721a2d8ceb/coverage-7.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4c589361263ab2953e3c4cd2a94db94c4ad4a8e572776ecfbad2389c626e4507", size = 262150, upload-time = "2025-11-18T13:33:20.644Z" }, - { url = "https://files.pythonhosted.org/packages/c5/71/9a9314df00f9326d78c1e5a910f520d599205907432d90d1c1b7a97aa4b1/coverage-7.12.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:91b810a163ccad2e43b1faa11d70d3cf4b6f3d83f9fd5f2df82a32d47b648e0d", size = 259763, upload-time = "2025-11-18T13:33:22.189Z" }, - { url = "https://files.pythonhosted.org/packages/10/34/01a0aceed13fbdf925876b9a15d50862eb8845454301fe3cdd1df08b2182/coverage-7.12.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:40c867af715f22592e0d0fb533a33a71ec9e0f73a6945f722a0c85c8c1cbe3a2", size = 258653, upload-time = "2025-11-18T13:33:24.239Z" }, - { url = "https://files.pythonhosted.org/packages/8d/04/81d8fd64928acf1574bbb0181f66901c6c1c6279c8ccf5f84259d2c68ae9/coverage-7.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:68b0d0a2d84f333de875666259dadf28cc67858bc8fd8b3f1eae84d3c2bec455", size = 260856, upload-time = "2025-11-18T13:33:26.365Z" }, - { url = "https://files.pythonhosted.org/packages/f2/76/fa2a37bfaeaf1f766a2d2360a25a5297d4fb567098112f6517475eee120b/coverage-7.12.0-cp313-cp313t-win32.whl", hash = "sha256:73f9e7fbd51a221818fd11b7090eaa835a353ddd59c236c57b2199486b116c6d", size = 220936, upload-time = "2025-11-18T13:33:28.165Z" }, - { url = "https://files.pythonhosted.org/packages/f9/52/60f64d932d555102611c366afb0eb434b34266b1d9266fc2fe18ab641c47/coverage-7.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:24cff9d1f5743f67db7ba46ff284018a6e9aeb649b67aa1e70c396aa1b7cb23c", size = 222001, upload-time = "2025-11-18T13:33:29.656Z" }, - { url = "https://files.pythonhosted.org/packages/77/df/c303164154a5a3aea7472bf323b7c857fed93b26618ed9fc5c2955566bb0/coverage-7.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c87395744f5c77c866d0f5a43d97cc39e17c7f1cb0115e54a2fe67ca75c5d14d", size = 220273, upload-time = "2025-11-18T13:33:31.415Z" }, - { url = "https://files.pythonhosted.org/packages/bf/2e/fc12db0883478d6e12bbd62d481210f0c8daf036102aa11434a0c5755825/coverage-7.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a1c59b7dc169809a88b21a936eccf71c3895a78f5592051b1af8f4d59c2b4f92", size = 217777, upload-time = "2025-11-18T13:33:32.86Z" }, - { url = "https://files.pythonhosted.org/packages/1f/c1/ce3e525d223350c6ec16b9be8a057623f54226ef7f4c2fee361ebb6a02b8/coverage-7.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8787b0f982e020adb732b9f051f3e49dd5054cebbc3f3432061278512a2b1360", size = 218100, upload-time = "2025-11-18T13:33:34.532Z" }, - { url = "https://files.pythonhosted.org/packages/15/87/113757441504aee3808cb422990ed7c8bcc2d53a6779c66c5adef0942939/coverage-7.12.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5ea5a9f7dc8877455b13dd1effd3202e0bca72f6f3ab09f9036b1bcf728f69ac", size = 249151, upload-time = "2025-11-18T13:33:36.135Z" }, - { url = "https://files.pythonhosted.org/packages/d9/1d/9529d9bd44049b6b05bb319c03a3a7e4b0a8a802d28fa348ad407e10706d/coverage-7.12.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fdba9f15849534594f60b47c9a30bc70409b54947319a7c4fd0e8e3d8d2f355d", size = 251667, upload-time = "2025-11-18T13:33:37.996Z" }, - { url = "https://files.pythonhosted.org/packages/11/bb/567e751c41e9c03dc29d3ce74b8c89a1e3396313e34f255a2a2e8b9ebb56/coverage-7.12.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a00594770eb715854fb1c57e0dea08cce6720cfbc531accdb9850d7c7770396c", size = 253003, upload-time = "2025-11-18T13:33:39.553Z" }, - { url = "https://files.pythonhosted.org/packages/e4/b3/c2cce2d8526a02fb9e9ca14a263ca6fc074449b33a6afa4892838c903528/coverage-7.12.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5560c7e0d82b42eb1951e4f68f071f8017c824ebfd5a6ebe42c60ac16c6c2434", size = 249185, upload-time = "2025-11-18T13:33:42.086Z" }, - { url = "https://files.pythonhosted.org/packages/0e/a7/967f93bb66e82c9113c66a8d0b65ecf72fc865adfba5a145f50c7af7e58d/coverage-7.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2e26b481c9159c2773a37947a9718cfdc58893029cdfb177531793e375cfc", size = 251025, upload-time = "2025-11-18T13:33:43.634Z" }, - { url = "https://files.pythonhosted.org/packages/b9/b2/f2f6f56337bc1af465d5b2dc1ee7ee2141b8b9272f3bf6213fcbc309a836/coverage-7.12.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6e1a8c066dabcde56d5d9fed6a66bc19a2883a3fe051f0c397a41fc42aedd4cc", size = 248979, upload-time = "2025-11-18T13:33:46.04Z" }, - { url = "https://files.pythonhosted.org/packages/f4/7a/bf4209f45a4aec09d10a01a57313a46c0e0e8f4c55ff2965467d41a92036/coverage-7.12.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f7ba9da4726e446d8dd8aae5a6cd872511184a5d861de80a86ef970b5dacce3e", size = 248800, upload-time = "2025-11-18T13:33:47.546Z" }, - { url = "https://files.pythonhosted.org/packages/b8/b7/1e01b8696fb0521810f60c5bbebf699100d6754183e6cc0679bf2ed76531/coverage-7.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e0f483ab4f749039894abaf80c2f9e7ed77bbf3c737517fb88c8e8e305896a17", size = 250460, upload-time = "2025-11-18T13:33:49.537Z" }, - { url = "https://files.pythonhosted.org/packages/71/ae/84324fb9cb46c024760e706353d9b771a81b398d117d8c1fe010391c186f/coverage-7.12.0-cp314-cp314-win32.whl", hash = "sha256:76336c19a9ef4a94b2f8dc79f8ac2da3f193f625bb5d6f51a328cd19bfc19933", size = 220533, upload-time = "2025-11-18T13:33:51.16Z" }, - { url = "https://files.pythonhosted.org/packages/e2/71/1033629deb8460a8f97f83e6ac4ca3b93952e2b6f826056684df8275e015/coverage-7.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:7c1059b600aec6ef090721f8f633f60ed70afaffe8ecab85b59df748f24b31fe", size = 221348, upload-time = "2025-11-18T13:33:52.776Z" }, - { url = "https://files.pythonhosted.org/packages/0a/5f/ac8107a902f623b0c251abdb749be282dc2ab61854a8a4fcf49e276fce2f/coverage-7.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:172cf3a34bfef42611963e2b661302a8931f44df31629e5b1050567d6b90287d", size = 219922, upload-time = "2025-11-18T13:33:54.316Z" }, - { url = "https://files.pythonhosted.org/packages/79/6e/f27af2d4da367f16077d21ef6fe796c874408219fa6dd3f3efe7751bd910/coverage-7.12.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:aa7d48520a32cb21c7a9b31f81799e8eaec7239db36c3b670be0fa2403828d1d", size = 218511, upload-time = "2025-11-18T13:33:56.343Z" }, - { url = "https://files.pythonhosted.org/packages/67/dd/65fd874aa460c30da78f9d259400d8e6a4ef457d61ab052fd248f0050558/coverage-7.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:90d58ac63bc85e0fb919f14d09d6caa63f35a5512a2205284b7816cafd21bb03", size = 218771, upload-time = "2025-11-18T13:33:57.966Z" }, - { url = "https://files.pythonhosted.org/packages/55/e0/7c6b71d327d8068cb79c05f8f45bf1b6145f7a0de23bbebe63578fe5240a/coverage-7.12.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca8ecfa283764fdda3eae1bdb6afe58bf78c2c3ec2b2edcb05a671f0bba7b3f9", size = 260151, upload-time = "2025-11-18T13:33:59.597Z" }, - { url = "https://files.pythonhosted.org/packages/49/ce/4697457d58285b7200de6b46d606ea71066c6e674571a946a6ea908fb588/coverage-7.12.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:874fe69a0785d96bd066059cd4368022cebbec1a8958f224f0016979183916e6", size = 262257, upload-time = "2025-11-18T13:34:01.166Z" }, - { url = "https://files.pythonhosted.org/packages/2f/33/acbc6e447aee4ceba88c15528dbe04a35fb4d67b59d393d2e0d6f1e242c1/coverage-7.12.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b3c889c0b8b283a24d721a9eabc8ccafcfc3aebf167e4cd0d0e23bf8ec4e339", size = 264671, upload-time = "2025-11-18T13:34:02.795Z" }, - { url = "https://files.pythonhosted.org/packages/87/ec/e2822a795c1ed44d569980097be839c5e734d4c0c1119ef8e0a073496a30/coverage-7.12.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8bb5b894b3ec09dcd6d3743229dc7f2c42ef7787dc40596ae04c0edda487371e", size = 259231, upload-time = "2025-11-18T13:34:04.397Z" }, - { url = "https://files.pythonhosted.org/packages/72/c5/a7ec5395bb4a49c9b7ad97e63f0c92f6bf4a9e006b1393555a02dae75f16/coverage-7.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:79a44421cd5fba96aa57b5e3b5a4d3274c449d4c622e8f76882d76635501fd13", size = 262137, upload-time = "2025-11-18T13:34:06.068Z" }, - { url = "https://files.pythonhosted.org/packages/67/0c/02c08858b764129f4ecb8e316684272972e60777ae986f3865b10940bdd6/coverage-7.12.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:33baadc0efd5c7294f436a632566ccc1f72c867f82833eb59820ee37dc811c6f", size = 259745, upload-time = "2025-11-18T13:34:08.04Z" }, - { url = "https://files.pythonhosted.org/packages/5a/04/4fd32b7084505f3829a8fe45c1a74a7a728cb251aaadbe3bec04abcef06d/coverage-7.12.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:c406a71f544800ef7e9e0000af706b88465f3573ae8b8de37e5f96c59f689ad1", size = 258570, upload-time = "2025-11-18T13:34:09.676Z" }, - { url = "https://files.pythonhosted.org/packages/48/35/2365e37c90df4f5342c4fa202223744119fe31264ee2924f09f074ea9b6d/coverage-7.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e71bba6a40883b00c6d571599b4627f50c360b3d0d02bfc658168936be74027b", size = 260899, upload-time = "2025-11-18T13:34:11.259Z" }, - { url = "https://files.pythonhosted.org/packages/05/56/26ab0464ca733fa325e8e71455c58c1c374ce30f7c04cebb88eabb037b18/coverage-7.12.0-cp314-cp314t-win32.whl", hash = "sha256:9157a5e233c40ce6613dead4c131a006adfda70e557b6856b97aceed01b0e27a", size = 221313, upload-time = "2025-11-18T13:34:12.863Z" }, - { url = "https://files.pythonhosted.org/packages/da/1c/017a3e1113ed34d998b27d2c6dba08a9e7cb97d362f0ec988fcd873dcf81/coverage-7.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:e84da3a0fd233aeec797b981c51af1cabac74f9bd67be42458365b30d11b5291", size = 222423, upload-time = "2025-11-18T13:34:15.14Z" }, - { url = "https://files.pythonhosted.org/packages/4c/36/bcc504fdd5169301b52568802bb1b9cdde2e27a01d39fbb3b4b508ab7c2c/coverage-7.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:01d24af36fedda51c2b1aca56e4330a3710f83b02a5ff3743a6b015ffa7c9384", size = 220459, upload-time = "2025-11-18T13:34:17.222Z" }, - { url = "https://files.pythonhosted.org/packages/ce/a3/43b749004e3c09452e39bb56347a008f0a0668aad37324a99b5c8ca91d9e/coverage-7.12.0-py3-none-any.whl", hash = "sha256:159d50c0b12e060b15ed3d39f87ed43d4f7f7ad40b8a534f4dd331adbb51104a", size = 209503, upload-time = "2025-11-18T13:34:18.892Z" }, +version = "7.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/45/2c665ca77ec32ad67e25c77daf1cee28ee4558f3bc571cdbaf88a00b9f23/coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936", size = 820905 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/f1/2619559f17f31ba00fc40908efd1fbf1d0a5536eb75dc8341e7d660a08de/coverage-7.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0b3d67d31383c4c68e19a88e28fc4c2e29517580f1b0ebec4a069d502ce1e0bf", size = 218274 }, + { url = "https://files.pythonhosted.org/packages/2b/11/30d71ae5d6e949ff93b2a79a2c1b4822e00423116c5c6edfaeef37301396/coverage-7.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:581f086833d24a22c89ae0fe2142cfaa1c92c930adf637ddf122d55083fb5a0f", size = 218638 }, + { url = "https://files.pythonhosted.org/packages/79/c2/fce80fc6ded8d77e53207489d6065d0fed75db8951457f9213776615e0f5/coverage-7.13.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0a3a30f0e257df382f5f9534d4ce3d4cf06eafaf5192beb1a7bd066cb10e78fb", size = 250129 }, + { url = "https://files.pythonhosted.org/packages/5b/b6/51b5d1eb6fcbb9a1d5d6984e26cbe09018475c2922d554fd724dd0f056ee/coverage-7.13.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:583221913fbc8f53b88c42e8dbb8fca1d0f2e597cb190ce45916662b8b9d9621", size = 252885 }, + { url = "https://files.pythonhosted.org/packages/0d/f8/972a5affea41de798691ab15d023d3530f9f56a72e12e243f35031846ff7/coverage-7.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f5d9bd30756fff3e7216491a0d6d520c448d5124d3d8e8f56446d6412499e74", size = 253974 }, + { url = "https://files.pythonhosted.org/packages/8a/56/116513aee860b2c7968aa3506b0f59b22a959261d1dbf3aea7b4450a7520/coverage-7.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a23e5a1f8b982d56fa64f8e442e037f6ce29322f1f9e6c2344cd9e9f4407ee57", size = 250538 }, + { url = "https://files.pythonhosted.org/packages/d6/75/074476d64248fbadf16dfafbf93fdcede389ec821f74ca858d7c87d2a98c/coverage-7.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b01c22bc74a7fb44066aaf765224c0d933ddf1f5047d6cdfe4795504a4493f8", size = 251912 }, + { url = "https://files.pythonhosted.org/packages/f2/d2/aa4f8acd1f7c06024705c12609d8698c51b27e4d635d717cd1934c9668e2/coverage-7.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:898cce66d0836973f48dda4e3514d863d70142bdf6dfab932b9b6a90ea5b222d", size = 250054 }, + { url = "https://files.pythonhosted.org/packages/19/98/8df9e1af6a493b03694a1e8070e024e7d2cdc77adedc225a35e616d505de/coverage-7.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3ab483ea0e251b5790c2aac03acde31bff0c736bf8a86829b89382b407cd1c3b", size = 249619 }, + { url = "https://files.pythonhosted.org/packages/d8/71/f8679231f3353018ca66ef647fa6fe7b77e6bff7845be54ab84f86233363/coverage-7.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1d84e91521c5e4cb6602fe11ece3e1de03b2760e14ae4fcf1a4b56fa3c801fcd", size = 251496 }, + { url = "https://files.pythonhosted.org/packages/04/86/9cb406388034eaf3c606c22094edbbb82eea1fa9d20c0e9efadff20d0733/coverage-7.13.0-cp312-cp312-win32.whl", hash = "sha256:193c3887285eec1dbdb3f2bd7fbc351d570ca9c02ca756c3afbc71b3c98af6ef", size = 220808 }, + { url = "https://files.pythonhosted.org/packages/1c/59/af483673df6455795daf5f447c2f81a3d2fcfc893a22b8ace983791f6f34/coverage-7.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:4f3e223b2b2db5e0db0c2b97286aba0036ca000f06aca9b12112eaa9af3d92ae", size = 221616 }, + { url = "https://files.pythonhosted.org/packages/64/b0/959d582572b30a6830398c60dd419c1965ca4b5fb38ac6b7093a0d50ca8d/coverage-7.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:086cede306d96202e15a4b77ace8472e39d9f4e5f9fd92dd4fecdfb2313b2080", size = 220261 }, + { url = "https://files.pythonhosted.org/packages/7c/cc/bce226595eb3bf7d13ccffe154c3c487a22222d87ff018525ab4dd2e9542/coverage-7.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:28ee1c96109974af104028a8ef57cec21447d42d0e937c0275329272e370ebcf", size = 218297 }, + { url = "https://files.pythonhosted.org/packages/3b/9f/73c4d34600aae03447dff3d7ad1d0ac649856bfb87d1ca7d681cfc913f9e/coverage-7.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1e97353dcc5587b85986cda4ff3ec98081d7e84dd95e8b2a6d59820f0545f8a", size = 218673 }, + { url = "https://files.pythonhosted.org/packages/63/ab/8fa097db361a1e8586535ae5073559e6229596b3489ec3ef2f5b38df8cb2/coverage-7.13.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:99acd4dfdfeb58e1937629eb1ab6ab0899b131f183ee5f23e0b5da5cba2fec74", size = 249652 }, + { url = "https://files.pythonhosted.org/packages/90/3a/9bfd4de2ff191feb37ef9465855ca56a6f2f30a3bca172e474130731ac3d/coverage-7.13.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ff45e0cd8451e293b63ced93161e189780baf444119391b3e7d25315060368a6", size = 252251 }, + { url = "https://files.pythonhosted.org/packages/df/61/b5d8105f016e1b5874af0d7c67542da780ccd4a5f2244a433d3e20ceb1ad/coverage-7.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4f72a85316d8e13234cafe0a9f81b40418ad7a082792fa4165bd7d45d96066b", size = 253492 }, + { url = "https://files.pythonhosted.org/packages/f3/b8/0fad449981803cc47a4694768b99823fb23632150743f9c83af329bb6090/coverage-7.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:11c21557d0e0a5a38632cbbaca5f008723b26a89d70db6315523df6df77d6232", size = 249850 }, + { url = "https://files.pythonhosted.org/packages/9a/e9/8d68337c3125014d918cf4327d5257553a710a2995a6a6de2ac77e5aa429/coverage-7.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76541dc8d53715fb4f7a3a06b34b0dc6846e3c69bc6204c55653a85dd6220971", size = 251633 }, + { url = "https://files.pythonhosted.org/packages/55/14/d4112ab26b3a1bc4b3c1295d8452dcf399ed25be4cf649002fb3e64b2d93/coverage-7.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6e9e451dee940a86789134b6b0ffbe31c454ade3b849bb8a9d2cca2541a8e91d", size = 249586 }, + { url = "https://files.pythonhosted.org/packages/2c/a9/22b0000186db663b0d82f86c2f1028099ae9ac202491685051e2a11a5218/coverage-7.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:5c67dace46f361125e6b9cace8fe0b729ed8479f47e70c89b838d319375c8137", size = 249412 }, + { url = "https://files.pythonhosted.org/packages/a1/2e/42d8e0d9e7527fba439acdc6ed24a2b97613b1dc85849b1dd935c2cffef0/coverage-7.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f59883c643cb19630500f57016f76cfdcd6845ca8c5b5ea1f6e17f74c8e5f511", size = 251191 }, + { url = "https://files.pythonhosted.org/packages/a4/af/8c7af92b1377fd8860536aadd58745119252aaaa71a5213e5a8e8007a9f5/coverage-7.13.0-cp313-cp313-win32.whl", hash = "sha256:58632b187be6f0be500f553be41e277712baa278147ecb7559983c6d9faf7ae1", size = 220829 }, + { url = "https://files.pythonhosted.org/packages/58/f9/725e8bf16f343d33cbe076c75dc8370262e194ff10072c0608b8e5cf33a3/coverage-7.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:73419b89f812f498aca53f757dd834919b48ce4799f9d5cad33ca0ae442bdb1a", size = 221640 }, + { url = "https://files.pythonhosted.org/packages/8a/ff/e98311000aa6933cc79274e2b6b94a2fe0fe3434fca778eba82003675496/coverage-7.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb76670874fdd6091eedcc856128ee48c41a9bbbb9c3f1c7c3cf169290e3ffd6", size = 220269 }, + { url = "https://files.pythonhosted.org/packages/cf/cf/bbaa2e1275b300343ea865f7d424cc0a2e2a1df6925a070b2b2d5d765330/coverage-7.13.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6e63ccc6e0ad8986386461c3c4b737540f20426e7ec932f42e030320896c311a", size = 218990 }, + { url = "https://files.pythonhosted.org/packages/21/1d/82f0b3323b3d149d7672e7744c116e9c170f4957e0c42572f0366dbb4477/coverage-7.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:494f5459ffa1bd45e18558cd98710c36c0b8fbfa82a5eabcbe671d80ecffbfe8", size = 219340 }, + { url = "https://files.pythonhosted.org/packages/fb/e3/fe3fd4702a3832a255f4d43013eacb0ef5fc155a5960ea9269d8696db28b/coverage-7.13.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:06cac81bf10f74034e055e903f5f946e3e26fc51c09fc9f584e4a1605d977053", size = 260638 }, + { url = "https://files.pythonhosted.org/packages/ad/01/63186cb000307f2b4da463f72af9b85d380236965574c78e7e27680a2593/coverage-7.13.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f2ffc92b46ed6e6760f1d47a71e56b5664781bc68986dbd1836b2b70c0ce2071", size = 262705 }, + { url = "https://files.pythonhosted.org/packages/7c/a1/c0dacef0cc865f2455d59eed3548573ce47ed603205ffd0735d1d78b5906/coverage-7.13.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0602f701057c6823e5db1b74530ce85f17c3c5be5c85fc042ac939cbd909426e", size = 265125 }, + { url = "https://files.pythonhosted.org/packages/ef/92/82b99223628b61300bd382c205795533bed021505eab6dd86e11fb5d7925/coverage-7.13.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:25dc33618d45456ccb1d37bce44bc78cf269909aa14c4db2e03d63146a8a1493", size = 259844 }, + { url = "https://files.pythonhosted.org/packages/cf/2c/89b0291ae4e6cd59ef042708e1c438e2290f8c31959a20055d8768349ee2/coverage-7.13.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:71936a8b3b977ddd0b694c28c6a34f4fff2e9dd201969a4ff5d5fc7742d614b0", size = 262700 }, + { url = "https://files.pythonhosted.org/packages/bf/f9/a5f992efae1996245e796bae34ceb942b05db275e4b34222a9a40b9fbd3b/coverage-7.13.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:936bc20503ce24770c71938d1369461f0c5320830800933bc3956e2a4ded930e", size = 260321 }, + { url = "https://files.pythonhosted.org/packages/4c/89/a29f5d98c64fedbe32e2ac3c227fbf78edc01cc7572eee17d61024d89889/coverage-7.13.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:af0a583efaacc52ae2521f8d7910aff65cdb093091d76291ac5820d5e947fc1c", size = 259222 }, + { url = "https://files.pythonhosted.org/packages/b3/c3/940fe447aae302a6701ee51e53af7e08b86ff6eed7631e5740c157ee22b9/coverage-7.13.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f1c23e24a7000da892a312fb17e33c5f94f8b001de44b7cf8ba2e36fbd15859e", size = 261411 }, + { url = "https://files.pythonhosted.org/packages/eb/31/12a4aec689cb942a89129587860ed4d0fd522d5fda81237147fde554b8ae/coverage-7.13.0-cp313-cp313t-win32.whl", hash = "sha256:5f8a0297355e652001015e93be345ee54393e45dc3050af4a0475c5a2b767d46", size = 221505 }, + { url = "https://files.pythonhosted.org/packages/65/8c/3b5fe3259d863572d2b0827642c50c3855d26b3aefe80bdc9eba1f0af3b0/coverage-7.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6abb3a4c52f05e08460bd9acf04fec027f8718ecaa0d09c40ffbc3fbd70ecc39", size = 222569 }, + { url = "https://files.pythonhosted.org/packages/b0/39/f71fa8316a96ac72fc3908839df651e8eccee650001a17f2c78cdb355624/coverage-7.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:3ad968d1e3aa6ce5be295ab5fe3ae1bf5bb4769d0f98a80a0252d543a2ef2e9e", size = 220841 }, + { url = "https://files.pythonhosted.org/packages/f8/4b/9b54bedda55421449811dcd5263a2798a63f48896c24dfb92b0f1b0845bd/coverage-7.13.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:453b7ec753cf5e4356e14fe858064e5520c460d3bbbcb9c35e55c0d21155c256", size = 218343 }, + { url = "https://files.pythonhosted.org/packages/59/df/c3a1f34d4bba2e592c8979f924da4d3d4598b0df2392fbddb7761258e3dc/coverage-7.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:af827b7cbb303e1befa6c4f94fd2bf72f108089cfa0f8abab8f4ca553cf5ca5a", size = 218672 }, + { url = "https://files.pythonhosted.org/packages/07/62/eec0659e47857698645ff4e6ad02e30186eb8afd65214fd43f02a76537cb/coverage-7.13.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9987a9e4f8197a1000280f7cc089e3ea2c8b3c0a64d750537809879a7b4ceaf9", size = 249715 }, + { url = "https://files.pythonhosted.org/packages/23/2d/3c7ff8b2e0e634c1f58d095f071f52ed3c23ff25be524b0ccae8b71f99f8/coverage-7.13.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3188936845cd0cb114fa6a51842a304cdbac2958145d03be2377ec41eb285d19", size = 252225 }, + { url = "https://files.pythonhosted.org/packages/aa/ac/fb03b469d20e9c9a81093575003f959cf91a4a517b783aab090e4538764b/coverage-7.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2bdb3babb74079f021696cb46b8bb5f5661165c385d3a238712b031a12355be", size = 253559 }, + { url = "https://files.pythonhosted.org/packages/29/62/14afa9e792383c66cc0a3b872a06ded6e4ed1079c7d35de274f11d27064e/coverage-7.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7464663eaca6adba4175f6c19354feea61ebbdd735563a03d1e472c7072d27bb", size = 249724 }, + { url = "https://files.pythonhosted.org/packages/31/b7/333f3dab2939070613696ab3ee91738950f0467778c6e5a5052e840646b7/coverage-7.13.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8069e831f205d2ff1f3d355e82f511eb7c5522d7d413f5db5756b772ec8697f8", size = 251582 }, + { url = "https://files.pythonhosted.org/packages/81/cb/69162bda9381f39b2287265d7e29ee770f7c27c19f470164350a38318764/coverage-7.13.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6fb2d5d272341565f08e962cce14cdf843a08ac43bd621783527adb06b089c4b", size = 249538 }, + { url = "https://files.pythonhosted.org/packages/e0/76/350387b56a30f4970abe32b90b2a434f87d29f8b7d4ae40d2e8a85aacfb3/coverage-7.13.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5e70f92ef89bac1ac8a99b3324923b4749f008fdbd7aa9cb35e01d7a284a04f9", size = 249349 }, + { url = "https://files.pythonhosted.org/packages/86/0d/7f6c42b8d59f4c7e43ea3059f573c0dcfed98ba46eb43c68c69e52ae095c/coverage-7.13.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4b5de7d4583e60d5fd246dd57fcd3a8aa23c6e118a8c72b38adf666ba8e7e927", size = 251011 }, + { url = "https://files.pythonhosted.org/packages/d7/f1/4bb2dff379721bb0b5c649d5c5eaf438462cad824acf32eb1b7ca0c7078e/coverage-7.13.0-cp314-cp314-win32.whl", hash = "sha256:a6c6e16b663be828a8f0b6c5027d36471d4a9f90d28444aa4ced4d48d7d6ae8f", size = 221091 }, + { url = "https://files.pythonhosted.org/packages/ba/44/c239da52f373ce379c194b0ee3bcc121020e397242b85f99e0afc8615066/coverage-7.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:0900872f2fdb3ee5646b557918d02279dc3af3dfb39029ac4e945458b13f73bc", size = 221904 }, + { url = "https://files.pythonhosted.org/packages/89/1f/b9f04016d2a29c2e4a0307baefefad1a4ec5724946a2b3e482690486cade/coverage-7.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:3a10260e6a152e5f03f26db4a407c4c62d3830b9af9b7c0450b183615f05d43b", size = 220480 }, + { url = "https://files.pythonhosted.org/packages/16/d4/364a1439766c8e8647860584171c36010ca3226e6e45b1753b1b249c5161/coverage-7.13.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9097818b6cc1cfb5f174e3263eba4a62a17683bcfe5c4b5d07f4c97fa51fbf28", size = 219074 }, + { url = "https://files.pythonhosted.org/packages/ce/f4/71ba8be63351e099911051b2089662c03d5671437a0ec2171823c8e03bec/coverage-7.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0018f73dfb4301a89292c73be6ba5f58722ff79f51593352759c1790ded1cabe", size = 219342 }, + { url = "https://files.pythonhosted.org/packages/5e/25/127d8ed03d7711a387d96f132589057213e3aef7475afdaa303412463f22/coverage-7.13.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:166ad2a22ee770f5656e1257703139d3533b4a0b6909af67c6b4a3adc1c98657", size = 260713 }, + { url = "https://files.pythonhosted.org/packages/fd/db/559fbb6def07d25b2243663b46ba9eb5a3c6586c0c6f4e62980a68f0ee1c/coverage-7.13.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f6aaef16d65d1787280943f1c8718dc32e9cf141014e4634d64446702d26e0ff", size = 262825 }, + { url = "https://files.pythonhosted.org/packages/37/99/6ee5bf7eff884766edb43bd8736b5e1c5144d0fe47498c3779326fe75a35/coverage-7.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e999e2dcc094002d6e2c7bbc1fb85b58ba4f465a760a8014d97619330cdbbbf3", size = 265233 }, + { url = "https://files.pythonhosted.org/packages/d8/90/92f18fe0356ea69e1f98f688ed80cec39f44e9f09a1f26a1bbf017cc67f2/coverage-7.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:00c3d22cf6fb1cf3bf662aaaa4e563be8243a5ed2630339069799835a9cc7f9b", size = 259779 }, + { url = "https://files.pythonhosted.org/packages/90/5d/b312a8b45b37a42ea7d27d7d3ff98ade3a6c892dd48d1d503e773503373f/coverage-7.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22ccfe8d9bb0d6134892cbe1262493a8c70d736b9df930f3f3afae0fe3ac924d", size = 262700 }, + { url = "https://files.pythonhosted.org/packages/63/f8/b1d0de5c39351eb71c366f872376d09386640840a2e09b0d03973d791e20/coverage-7.13.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:9372dff5ea15930fea0445eaf37bbbafbc771a49e70c0aeed8b4e2c2614cc00e", size = 260302 }, + { url = "https://files.pythonhosted.org/packages/aa/7c/d42f4435bc40c55558b3109a39e2d456cddcec37434f62a1f1230991667a/coverage-7.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:69ac2c492918c2461bc6ace42d0479638e60719f2a4ef3f0815fa2df88e9f940", size = 259136 }, + { url = "https://files.pythonhosted.org/packages/b8/d3/23413241dc04d47cfe19b9a65b32a2edd67ecd0b817400c2843ebc58c847/coverage-7.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:739c6c051a7540608d097b8e13c76cfa85263ced467168dc6b477bae3df7d0e2", size = 261467 }, + { url = "https://files.pythonhosted.org/packages/13/e6/6e063174500eee216b96272c0d1847bf215926786f85c2bd024cf4d02d2f/coverage-7.13.0-cp314-cp314t-win32.whl", hash = "sha256:fe81055d8c6c9de76d60c94ddea73c290b416e061d40d542b24a5871bad498b7", size = 221875 }, + { url = "https://files.pythonhosted.org/packages/3b/46/f4fb293e4cbe3620e3ac2a3e8fd566ed33affb5861a9b20e3dd6c1896cbc/coverage-7.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:445badb539005283825959ac9fa4a28f712c214b65af3a2c464f1adc90f5fcbc", size = 222982 }, + { url = "https://files.pythonhosted.org/packages/68/62/5b3b9018215ed9733fbd1ae3b2ed75c5de62c3b55377a52cae732e1b7805/coverage-7.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:de7f6748b890708578fc4b7bb967d810aeb6fcc9bff4bb77dbca77dab2f9df6a", size = 221016 }, + { url = "https://files.pythonhosted.org/packages/8d/4c/1968f32fb9a2604645827e11ff84a31e59d532e01995f904723b4f5328b3/coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904", size = 210068 }, ] [[package]] @@ -675,62 +674,62 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, - { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, - { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, - { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, - { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, - { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, - { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, - { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, - { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, - { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, - { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, - { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, - { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, - { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, - { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, - { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, - { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, - { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, - { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, - { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, - { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, - { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, - { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, - { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, - { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, - { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, - { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, - { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, - { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, - { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, - { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, - { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, - { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004 }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667 }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807 }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615 }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800 }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707 }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541 }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464 }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838 }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596 }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782 }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381 }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988 }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451 }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007 }, + { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012 }, + { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728 }, + { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078 }, + { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460 }, + { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237 }, + { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344 }, + { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564 }, + { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415 }, + { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457 }, + { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074 }, + { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569 }, + { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941 }, + { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339 }, + { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315 }, + { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331 }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248 }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089 }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029 }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222 }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280 }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958 }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714 }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970 }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236 }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642 }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126 }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573 }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695 }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720 }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740 }, ] [[package]] name = "cycler" version = "0.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615, upload-time = "2023-10-07T05:32:18.335Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" }, + { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321 }, ] [[package]] @@ -742,9 +741,9 @@ dependencies = [ { name = "protobuf" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/7f/cfb2a00d10f6295332616e5b22f2ae3aaf2841a3afa6c49262acb6b94f5b/databricks_sdk-0.73.0.tar.gz", hash = "sha256:db09eaaacd98e07dded78d3e7ab47d2f6c886e0380cb577977bd442bace8bd8d", size = 801017, upload-time = "2025-11-05T06:52:58.509Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/7f/cfb2a00d10f6295332616e5b22f2ae3aaf2841a3afa6c49262acb6b94f5b/databricks_sdk-0.73.0.tar.gz", hash = "sha256:db09eaaacd98e07dded78d3e7ab47d2f6c886e0380cb577977bd442bace8bd8d", size = 801017 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/27/b822b474aaefb684d11df358d52e012699a2a8af231f9b47c54b73f280cb/databricks_sdk-0.73.0-py3-none-any.whl", hash = "sha256:a4d3cfd19357a2b459d2dc3101454d7f0d1b62865ce099c35d0c342b66ac64ff", size = 753896, upload-time = "2025-11-05T06:52:56.451Z" }, + { url = "https://files.pythonhosted.org/packages/a7/27/b822b474aaefb684d11df358d52e012699a2a8af231f9b47c54b73f280cb/databricks_sdk-0.73.0-py3-none-any.whl", hash = "sha256:a4d3cfd19357a2b459d2dc3101454d7f0d1b62865ce099c35d0c342b66ac64ff", size = 753896 }, ] [[package]] @@ -767,9 +766,9 @@ dependencies = [ { name = "tqdm" }, { name = "xxhash" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/bf/0dae295d6d1ba0b1a200a9dd216838464b5bbd05da01407cb1330b377445/datasets-4.4.1.tar.gz", hash = "sha256:80322699aa8c0bbbdb7caa87906da689c3c2e29523cff698775c67f28fdab1fc", size = 585341, upload-time = "2025-11-05T16:00:38.162Z" } +sdist = { url = "https://files.pythonhosted.org/packages/93/bf/0dae295d6d1ba0b1a200a9dd216838464b5bbd05da01407cb1330b377445/datasets-4.4.1.tar.gz", hash = "sha256:80322699aa8c0bbbdb7caa87906da689c3c2e29523cff698775c67f28fdab1fc", size = 585341 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/5e/6f8d874366788ad5d549e9ba258037d974dda6e004843be1bda794571701/datasets-4.4.1-py3-none-any.whl", hash = "sha256:c1163de5211e42546079ab355cc0250c7e6db16eb209ac5ac6252f801f596c44", size = 511591, upload-time = "2025-11-05T16:00:36.365Z" }, + { url = "https://files.pythonhosted.org/packages/3b/5e/6f8d874366788ad5d549e9ba258037d974dda6e004843be1bda794571701/datasets-4.4.1-py3-none-any.whl", hash = "sha256:c1163de5211e42546079ab355cc0250c7e6db16eb209ac5ac6252f801f596c44", size = 511591 }, ] [[package]] @@ -782,18 +781,18 @@ dependencies = [ { name = "pluggy" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/66/c200febf00c8b0115825a99c3fdb2e77798419b8a632b1f8e4c81a75e734/diff_cover-9.7.2.tar.gz", hash = "sha256:872c820d2ecbf79c61d52c7dc70419015e0ab9289589566c791dd270fc0c6e3b", size = 100374, upload-time = "2025-11-11T02:49:34.883Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/66/c200febf00c8b0115825a99c3fdb2e77798419b8a632b1f8e4c81a75e734/diff_cover-9.7.2.tar.gz", hash = "sha256:872c820d2ecbf79c61d52c7dc70419015e0ab9289589566c791dd270fc0c6e3b", size = 100374 } wheels = [ - { url = "https://files.pythonhosted.org/packages/21/dd/6db45462ffbfe3791c6de81be5db1c9e883e6b664ae39fc8adf947c738a6/diff_cover-9.7.2-py3-none-any.whl", hash = "sha256:cd6498620c747c2493a6c83c14362c32868bfd91cd8d0dd093f136070ec4ffc5", size = 56015, upload-time = "2025-11-11T02:49:33.294Z" }, + { url = "https://files.pythonhosted.org/packages/21/dd/6db45462ffbfe3791c6de81be5db1c9e883e6b664ae39fc8adf947c738a6/diff_cover-9.7.2-py3-none-any.whl", hash = "sha256:cd6498620c747c2493a6c83c14362c32868bfd91cd8d0dd093f136070ec4ffc5", size = 56015 }, ] [[package]] name = "dill" version = "0.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/12/80/630b4b88364e9a8c8c5797f4602d0f76ef820909ee32f0bacb9f90654042/dill-0.4.0.tar.gz", hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0", size = 186976, upload-time = "2025-04-16T00:41:48.867Z" } +sdist = { url = "https://files.pythonhosted.org/packages/12/80/630b4b88364e9a8c8c5797f4602d0f76ef820909ee32f0bacb9f90654042/dill-0.4.0.tar.gz", hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0", size = 186976 } wheels = [ - { url = "https://files.pythonhosted.org/packages/50/3d/9373ad9c56321fdab5b41197068e1d8c25883b3fea29dd361f9b55116869/dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049", size = 119668, upload-time = "2025-04-16T00:41:47.671Z" }, + { url = "https://files.pythonhosted.org/packages/50/3d/9373ad9c56321fdab5b41197068e1d8c25883b3fea29dd361f9b55116869/dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049", size = 119668 }, ] [[package]] @@ -805,9 +804,9 @@ dependencies = [ { name = "requests" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 }, ] [[package]] @@ -818,16 +817,16 @@ dependencies = [ { name = "python-dotenv" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/b7/545d2c10c1fc15e48653c91efde329a790f2eecfbbf2bd16003b5db2bab0/dotenv-0.9.9-py2.py3-none-any.whl", hash = "sha256:29cf74a087b31dafdb5a446b6d7e11cbce8ed2741540e2339c69fbef92c94ce9", size = 1892, upload-time = "2025-02-19T22:15:01.647Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/545d2c10c1fc15e48653c91efde329a790f2eecfbbf2bd16003b5db2bab0/dotenv-0.9.9-py2.py3-none-any.whl", hash = "sha256:29cf74a087b31dafdb5a446b6d7e11cbce8ed2741540e2339c69fbef92c94ce9", size = 1892 }, ] [[package]] name = "einops" version = "0.8.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e5/81/df4fbe24dff8ba3934af99044188e20a98ed441ad17a274539b74e82e126/einops-0.8.1.tar.gz", hash = "sha256:de5d960a7a761225532e0f1959e5315ebeafc0cd43394732f103ca44b9837e84", size = 54805, upload-time = "2025-02-09T03:17:00.434Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/81/df4fbe24dff8ba3934af99044188e20a98ed441ad17a274539b74e82e126/einops-0.8.1.tar.gz", hash = "sha256:de5d960a7a761225532e0f1959e5315ebeafc0cd43394732f103ca44b9837e84", size = 54805 } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/62/9773de14fe6c45c23649e98b83231fffd7b9892b6cf863251dc2afa73643/einops-0.8.1-py3-none-any.whl", hash = "sha256:919387eb55330f5757c6bea9165c5ff5cfe63a642682ea788a6d472576d81737", size = 64359, upload-time = "2025-02-09T03:17:01.998Z" }, + { url = "https://files.pythonhosted.org/packages/87/62/9773de14fe6c45c23649e98b83231fffd7b9892b6cf863251dc2afa73643/einops-0.8.1-py3-none-any.whl", hash = "sha256:919387eb55330f5757c6bea9165c5ff5cfe63a642682ea788a6d472576d81737", size = 64359 }, ] [[package]] @@ -840,18 +839,18 @@ dependencies = [ { name = "typing-extensions" }, { name = "wadler-lindig" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/17/96a3b61a80c63c75974bd84318cee2818a23d60628e77fb6ed00eb623a69/equinox-0.13.2.tar.gz", hash = "sha256:509ad744ff99b7c684d45230d6890f9e78eac1a556d7a06db1eff664a3cac74f", size = 139386, upload-time = "2025-10-09T10:15:39.595Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/17/96a3b61a80c63c75974bd84318cee2818a23d60628e77fb6ed00eb623a69/equinox-0.13.2.tar.gz", hash = "sha256:509ad744ff99b7c684d45230d6890f9e78eac1a556d7a06db1eff664a3cac74f", size = 139386 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/ce/c3bc53bdda6edb050859939a8e91e206123f96f50906e7ba7c027c6c535f/equinox-0.13.2-py3-none-any.whl", hash = "sha256:bc1ee687e4841945d8b776664403839639a05e2f2c02c1da353ff3386e0e43b0", size = 179235, upload-time = "2025-10-09T10:15:37.991Z" }, + { url = "https://files.pythonhosted.org/packages/c6/ce/c3bc53bdda6edb050859939a8e91e206123f96f50906e7ba7c027c6c535f/equinox-0.13.2-py3-none-any.whl", hash = "sha256:bc1ee687e4841945d8b776664403839639a05e2f2c02c1da353ff3386e0e43b0", size = 179235 }, ] [[package]] name = "etils" version = "1.13.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9b/a0/522bbff0f3cdd37968f90dd7f26c7aa801ed87f5ba335f156de7f2b88a48/etils-1.13.0.tar.gz", hash = "sha256:a5b60c71f95bcd2d43d4e9fb3dc3879120c1f60472bb5ce19f7a860b1d44f607", size = 106368, upload-time = "2025-07-15T10:29:10.563Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9b/a0/522bbff0f3cdd37968f90dd7f26c7aa801ed87f5ba335f156de7f2b88a48/etils-1.13.0.tar.gz", hash = "sha256:a5b60c71f95bcd2d43d4e9fb3dc3879120c1f60472bb5ce19f7a860b1d44f607", size = 106368 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/98/87b5946356095738cb90a6df7b35ff69ac5750f6e783d5fbcc5cb3b6cbd7/etils-1.13.0-py3-none-any.whl", hash = "sha256:d9cd4f40fbe77ad6613b7348a18132cc511237b6c076dbb89105c0b520a4c6bb", size = 170603, upload-time = "2025-07-15T10:29:09.076Z" }, + { url = "https://files.pythonhosted.org/packages/e7/98/87b5946356095738cb90a6df7b35ff69ac5750f6e783d5fbcc5cb3b6cbd7/etils-1.13.0-py3-none-any.whl", hash = "sha256:d9cd4f40fbe77ad6613b7348a18132cc511237b6c076dbb89105c0b520a4c6bb", size = 170603 }, ] [package.optional-dependencies] @@ -869,14 +868,14 @@ epy = [ name = "fancy-einsum" version = "0.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b9/b1/f5a13cdc05b9a16502d760ead310a689a1538f3fee9618b92011200b9717/fancy_einsum-0.0.3.tar.gz", hash = "sha256:05ca6689999d0949bdaa5320c81117effa13644ec68a200121e93d7ebf3d3356", size = 4916, upload-time = "2022-02-04T01:53:46.028Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/b1/f5a13cdc05b9a16502d760ead310a689a1538f3fee9618b92011200b9717/fancy_einsum-0.0.3.tar.gz", hash = "sha256:05ca6689999d0949bdaa5320c81117effa13644ec68a200121e93d7ebf3d3356", size = 4916 } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/14/26fc262ba70976eea9a42e67b05c67aa78a0ee38332d9d094cca5d2c5ec3/fancy_einsum-0.0.3-py3-none-any.whl", hash = "sha256:e0bf33587a61822b0668512ada237a0ffa5662adfb9acfcbb0356ee15a0396a1", size = 6239, upload-time = "2022-02-04T01:53:44.44Z" }, + { url = "https://files.pythonhosted.org/packages/27/14/26fc262ba70976eea9a42e67b05c67aa78a0ee38332d9d094cca5d2c5ec3/fancy_einsum-0.0.3-py3-none-any.whl", hash = "sha256:e0bf33587a61822b0668512ada237a0ffa5662adfb9acfcbb0356ee15a0396a1", size = 6239 }, ] [[package]] name = "fastapi" -version = "0.123.8" +version = "0.124.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc" }, @@ -884,18 +883,18 @@ dependencies = [ { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b8/99/8f2d4be9af90b3e56b865a07bdd390398e53d67c9c95c729b5772e528179/fastapi-0.123.8.tar.gz", hash = "sha256:d106de125c8dd3d4341517fa2ae36d9cffe82a6500bd910d3c080e6c42b1b490", size = 354253, upload-time = "2025-12-04T13:02:54.58Z" } +sdist = { url = "https://files.pythonhosted.org/packages/48/9c/11969bd3e3bc4aa3a711f83dd3720239d3565a934929c74fc32f6c9f3638/fastapi-0.124.0.tar.gz", hash = "sha256:260cd178ad75e6d259991f2fd9b0fee924b224850079df576a3ba604ce58f4e6", size = 357623 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/23/dd53f49e8309454e2c52bdfffe7493cc0f00d10e2fc885d3f4d64c90731f/fastapi-0.123.8-py3-none-any.whl", hash = "sha256:d7c8db95f61d398f7e1491ad52e6b2362755f8ec61c7a740b29e70f18a2901e3", size = 111645, upload-time = "2025-12-04T13:02:53.163Z" }, + { url = "https://files.pythonhosted.org/packages/4d/29/9e1e82e16e9a1763d3b55bfbe9b2fa39d7175a1fd97685c482fa402e111d/fastapi-0.124.0-py3-none-any.whl", hash = "sha256:91596bdc6dde303c318f06e8d2bc75eafb341fc793a0c9c92c0bc1db1ac52480", size = 112505 }, ] [[package]] name = "filelock" version = "3.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922 } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, + { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054 }, ] [[package]] @@ -910,9 +909,9 @@ dependencies = [ { name = "markupsafe" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dc/6d/cfe3c0fcc5e477df242b98bfe186a4c34357b4847e87ecaef04507332dab/flask-3.1.2.tar.gz", hash = "sha256:bf656c15c80190ed628ad08cdfd3aaa35beb087855e2f494910aa3774cc4fd87", size = 720160, upload-time = "2025-08-19T21:03:21.205Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/6d/cfe3c0fcc5e477df242b98bfe186a4c34357b4847e87ecaef04507332dab/flask-3.1.2.tar.gz", hash = "sha256:bf656c15c80190ed628ad08cdfd3aaa35beb087855e2f494910aa3774cc4fd87", size = 720160 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/f9/7f9263c5695f4bd0023734af91bedb2ff8209e8de6ead162f35d8dc762fd/flask-3.1.2-py3-none-any.whl", hash = "sha256:ca1d8112ec8a6158cc29ea4858963350011b5c846a414cdb7a954aa9e967d03c", size = 103308, upload-time = "2025-08-19T21:03:19.499Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f9/7f9263c5695f4bd0023734af91bedb2ff8209e8de6ead162f35d8dc762fd/flask-3.1.2-py3-none-any.whl", hash = "sha256:ca1d8112ec8a6158cc29ea4858963350011b5c846a414cdb7a954aa9e967d03c", size = 103308 }, ] [[package]] @@ -923,148 +922,148 @@ dependencies = [ { name = "flask" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/37/bcfa6c7d5eec777c4c7cf45ce6b27631cebe5230caf88d85eadd63edd37a/flask_cors-6.0.1.tar.gz", hash = "sha256:d81bcb31f07b0985be7f48406247e9243aced229b7747219160a0559edd678db", size = 13463, upload-time = "2025-06-11T01:32:08.518Z" } +sdist = { url = "https://files.pythonhosted.org/packages/76/37/bcfa6c7d5eec777c4c7cf45ce6b27631cebe5230caf88d85eadd63edd37a/flask_cors-6.0.1.tar.gz", hash = "sha256:d81bcb31f07b0985be7f48406247e9243aced229b7747219160a0559edd678db", size = 13463 } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/f8/01bf35a3afd734345528f98d0353f2a978a476528ad4d7e78b70c4d149dd/flask_cors-6.0.1-py3-none-any.whl", hash = "sha256:c7b2cbfb1a31aa0d2e5341eea03a6805349f7a61647daee1a15c46bbe981494c", size = 13244, upload-time = "2025-06-11T01:32:07.352Z" }, + { url = "https://files.pythonhosted.org/packages/17/f8/01bf35a3afd734345528f98d0353f2a978a476528ad4d7e78b70c4d149dd/flask_cors-6.0.1-py3-none-any.whl", hash = "sha256:c7b2cbfb1a31aa0d2e5341eea03a6805349f7a61647daee1a15c46bbe981494c", size = 13244 }, ] [[package]] name = "fonttools" version = "4.61.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/33/f9/0e84d593c0e12244150280a630999835a64f2852276161b62a0f98318de0/fonttools-4.61.0.tar.gz", hash = "sha256:ec520a1f0c7758d7a858a00f090c1745f6cde6a7c5e76fb70ea4044a15f712e7", size = 3561884, upload-time = "2025-11-28T17:05:49.491Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/5d/19e5939f773c7cb05480fe2e881d63870b63ee2b4bdb9a77d55b1d36c7b9/fonttools-4.61.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e24a1565c4e57111ec7f4915f8981ecbb61adf66a55f378fdc00e206059fcfef", size = 2846930, upload-time = "2025-11-28T17:04:46.639Z" }, - { url = "https://files.pythonhosted.org/packages/25/b2/0658faf66f705293bd7e739a4f038302d188d424926be9c59bdad945664b/fonttools-4.61.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2bfacb5351303cae9f072ccf3fc6ecb437a6f359c0606bae4b1ab6715201d87", size = 2383016, upload-time = "2025-11-28T17:04:48.525Z" }, - { url = "https://files.pythonhosted.org/packages/29/a3/1fa90b95b690f0d7541f48850adc40e9019374d896c1b8148d15012b2458/fonttools-4.61.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0bdcf2e29d65c26299cc3d502f4612365e8b90a939f46cd92d037b6cb7bb544a", size = 4949425, upload-time = "2025-11-28T17:04:50.482Z" }, - { url = "https://files.pythonhosted.org/packages/af/00/acf18c00f6c501bd6e05ee930f926186f8a8e268265407065688820f1c94/fonttools-4.61.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e6cd0d9051b8ddaf7385f99dd82ec2a058e2b46cf1f1961e68e1ff20fcbb61af", size = 4999632, upload-time = "2025-11-28T17:04:52.508Z" }, - { url = "https://files.pythonhosted.org/packages/5f/e0/19a2b86e54109b1d2ee8743c96a1d297238ae03243897bc5345c0365f34d/fonttools-4.61.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e074bc07c31406f45c418e17c1722e83560f181d122c412fa9e815df0ff74810", size = 4939438, upload-time = "2025-11-28T17:04:54.437Z" }, - { url = "https://files.pythonhosted.org/packages/04/35/7b57a5f57d46286360355eff8d6b88c64ab6331107f37a273a71c803798d/fonttools-4.61.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a9b78da5d5faa17e63b2404b77feeae105c1b7e75f26020ab7a27b76e02039f", size = 5088960, upload-time = "2025-11-28T17:04:56.348Z" }, - { url = "https://files.pythonhosted.org/packages/3e/0e/6c5023eb2e0fe5d1ababc7e221e44acd3ff668781489cc1937a6f83d620a/fonttools-4.61.0-cp312-cp312-win32.whl", hash = "sha256:9821ed77bb676736b88fa87a737c97b6af06e8109667e625a4f00158540ce044", size = 2264404, upload-time = "2025-11-28T17:04:58.149Z" }, - { url = "https://files.pythonhosted.org/packages/36/0b/63273128c7c5df19b1e4cd92e0a1e6ea5bb74a400c4905054c96ad60a675/fonttools-4.61.0-cp312-cp312-win_amd64.whl", hash = "sha256:0011d640afa61053bc6590f9a3394bd222de7cfde19346588beabac374e9d8ac", size = 2314427, upload-time = "2025-11-28T17:04:59.812Z" }, - { url = "https://files.pythonhosted.org/packages/17/45/334f0d7f181e5473cfb757e1b60f4e60e7fc64f28d406e5d364a952718c0/fonttools-4.61.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba774b8cbd8754f54b8eb58124e8bd45f736b2743325ab1a5229698942b9b433", size = 2841801, upload-time = "2025-11-28T17:05:01.621Z" }, - { url = "https://files.pythonhosted.org/packages/cc/63/97b9c78e1f79bc741d4efe6e51f13872d8edb2b36e1b9fb2bab0d4491bb7/fonttools-4.61.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c84b430616ed73ce46e9cafd0bf0800e366a3e02fb7e1ad7c1e214dbe3862b1f", size = 2379024, upload-time = "2025-11-28T17:05:03.668Z" }, - { url = "https://files.pythonhosted.org/packages/4e/80/c87bc524a90dbeb2a390eea23eae448286983da59b7e02c67fa0ca96a8c5/fonttools-4.61.0-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b2b734d8391afe3c682320840c8191de9bd24e7eb85768dd4dc06ed1b63dbb1b", size = 4923706, upload-time = "2025-11-28T17:05:05.494Z" }, - { url = "https://files.pythonhosted.org/packages/6d/f6/a3b0374811a1de8c3f9207ec88f61ad1bb96f938ed89babae26c065c2e46/fonttools-4.61.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5c5fff72bf31b0e558ed085e4fd7ed96eb85881404ecc39ed2a779e7cf724eb", size = 4979751, upload-time = "2025-11-28T17:05:07.665Z" }, - { url = "https://files.pythonhosted.org/packages/a5/3b/30f63b4308b449091573285f9d27619563a84f399946bca3eadc9554afbe/fonttools-4.61.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:14a290c5c93fcab76b7f451e6a4b7721b712d90b3b5ed6908f1abcf794e90d6d", size = 4921113, upload-time = "2025-11-28T17:05:09.551Z" }, - { url = "https://files.pythonhosted.org/packages/41/6c/58e6e9b7d9d8bf2d7010bd7bb493060b39b02a12d1cda64a8bfb116ce760/fonttools-4.61.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:13e3e20a5463bfeb77b3557d04b30bd6a96a6bb5c15c7b2e7908903e69d437a0", size = 5063183, upload-time = "2025-11-28T17:05:11.677Z" }, - { url = "https://files.pythonhosted.org/packages/3f/e3/52c790ab2b07492df059947a1fd7778e105aac5848c0473029a4d20481a2/fonttools-4.61.0-cp313-cp313-win32.whl", hash = "sha256:6781e7a4bb010be1cd69a29927b0305c86b843395f2613bdabe115f7d6ea7f34", size = 2263159, upload-time = "2025-11-28T17:05:13.292Z" }, - { url = "https://files.pythonhosted.org/packages/e9/1f/116013b200fbeba871046554d5d2a45fefa69a05c40e9cdfd0d4fff53edc/fonttools-4.61.0-cp313-cp313-win_amd64.whl", hash = "sha256:c53b47834ae41e8e4829171cc44fec0fdf125545a15f6da41776b926b9645a9a", size = 2313530, upload-time = "2025-11-28T17:05:14.848Z" }, - { url = "https://files.pythonhosted.org/packages/d3/99/59b1e25987787cb714aa9457cee4c9301b7c2153f0b673e2b8679d37669d/fonttools-4.61.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:96dfc9bc1f2302224e48e6ee37e656eddbab810b724b52e9d9c13a57a6abad01", size = 2841429, upload-time = "2025-11-28T17:05:16.671Z" }, - { url = "https://files.pythonhosted.org/packages/2b/b2/4c1911d4332c8a144bb3b44416e274ccca0e297157c971ea1b3fbb855590/fonttools-4.61.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3b2065d94e5d63aafc2591c8b6ccbdb511001d9619f1bca8ad39b745ebeb5efa", size = 2378987, upload-time = "2025-11-28T17:05:18.69Z" }, - { url = "https://files.pythonhosted.org/packages/24/b0/f442e90fde5d2af2ae0cb54008ab6411edc557ee33b824e13e1d04925ac9/fonttools-4.61.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e0d87e81e4d869549585ba0beb3f033718501c1095004f5e6aef598d13ebc216", size = 4873270, upload-time = "2025-11-28T17:05:20.625Z" }, - { url = "https://files.pythonhosted.org/packages/bb/04/f5d5990e33053c8a59b90b1d7e10ad9b97a73f42c745304da0e709635fab/fonttools-4.61.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cfa2eb9bae650e58f0e8ad53c49d19a844d6034d6b259f30f197238abc1ccee", size = 4968270, upload-time = "2025-11-28T17:05:22.515Z" }, - { url = "https://files.pythonhosted.org/packages/94/9f/2091402e0d27c9c8c4bab5de0e5cd146d9609a2d7d1c666bbb75c0011c1a/fonttools-4.61.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4238120002e68296d55e091411c09eab94e111c8ce64716d17df53fd0eb3bb3d", size = 4919799, upload-time = "2025-11-28T17:05:24.437Z" }, - { url = "https://files.pythonhosted.org/packages/a8/72/86adab22fde710b829f8ffbc8f264df01928e5b7a8f6177fa29979ebf256/fonttools-4.61.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b6ceac262cc62bec01b3bb59abccf41b24ef6580869e306a4e88b7e56bb4bdda", size = 5030966, upload-time = "2025-11-28T17:05:26.115Z" }, - { url = "https://files.pythonhosted.org/packages/e8/a7/7c8e31b003349e845b853f5e0a67b95ff6b052fa4f5224f8b72624f5ac69/fonttools-4.61.0-cp314-cp314-win32.whl", hash = "sha256:adbb4ecee1a779469a77377bbe490565effe8fce6fb2e6f95f064de58f8bac85", size = 2267243, upload-time = "2025-11-28T17:05:27.807Z" }, - { url = "https://files.pythonhosted.org/packages/20/ee/f434fe7749360497c52b7dcbcfdbccdaab0a71c59f19d572576066717122/fonttools-4.61.0-cp314-cp314-win_amd64.whl", hash = "sha256:02bdf8e04d1a70476564b8640380f04bb4ac74edc1fc71f1bacb840b3e398ee9", size = 2318822, upload-time = "2025-11-28T17:05:29.882Z" }, - { url = "https://files.pythonhosted.org/packages/33/b3/c16255320255e5c1863ca2b2599bb61a46e2f566db0bbb9948615a8fe692/fonttools-4.61.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:627216062d90ab0d98215176d8b9562c4dd5b61271d35f130bcd30f6a8aaa33a", size = 2924917, upload-time = "2025-11-28T17:05:31.46Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b8/08067ae21de705a817777c02ef36ab0b953cbe91d8adf134f9c2da75ed6d/fonttools-4.61.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:7b446623c9cd5f14a59493818eaa80255eec2468c27d2c01b56e05357c263195", size = 2413576, upload-time = "2025-11-28T17:05:33.343Z" }, - { url = "https://files.pythonhosted.org/packages/42/f1/96ff43f92addce2356780fdc203f2966206f3d22ea20e242c27826fd7442/fonttools-4.61.0-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:70e2a0c0182ee75e493ef33061bfebf140ea57e035481d2f95aa03b66c7a0e05", size = 4877447, upload-time = "2025-11-28T17:05:35.278Z" }, - { url = "https://files.pythonhosted.org/packages/d0/1e/a3d8e51ed9ccfd7385e239ae374b78d258a0fb82d82cab99160a014a45d1/fonttools-4.61.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9064b0f55b947e929ac669af5311ab1f26f750214db6dd9a0c97e091e918f486", size = 5095681, upload-time = "2025-11-28T17:05:37.142Z" }, - { url = "https://files.pythonhosted.org/packages/eb/f6/d256bd6c1065c146a0bdddf1c62f542e08ae5b3405dbf3fcc52be272f674/fonttools-4.61.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2cb5e45a824ce14b90510024d0d39dae51bd4fbb54c42a9334ea8c8cf4d95cbe", size = 4974140, upload-time = "2025-11-28T17:05:39.5Z" }, - { url = "https://files.pythonhosted.org/packages/5d/0c/96633eb4b26f138cc48561c6e0c44b4ea48acea56b20b507d6b14f8e80ce/fonttools-4.61.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6e5ca8c62efdec7972dfdfd454415c4db49b89aeaefaaacada432f3b7eea9866", size = 5001741, upload-time = "2025-11-28T17:05:41.424Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/3b536bad3be4f26186f296e749ff17bad3e6d57232c104d752d24b2e265b/fonttools-4.61.0-cp314-cp314t-win32.whl", hash = "sha256:63c7125d31abe3e61d7bb917329b5543c5b3448db95f24081a13aaf064360fc8", size = 2330707, upload-time = "2025-11-28T17:05:43.548Z" }, - { url = "https://files.pythonhosted.org/packages/18/ea/e6b9ac610451ee9f04477c311ad126de971f6112cb579fa391d2a8edb00b/fonttools-4.61.0-cp314-cp314t-win_amd64.whl", hash = "sha256:67d841aa272be5500de7f447c40d1d8452783af33b4c3599899319f6ef9ad3c1", size = 2395950, upload-time = "2025-11-28T17:05:45.638Z" }, - { url = "https://files.pythonhosted.org/packages/0c/14/634f7daea5ffe6a5f7a0322ba8e1a0e23c9257b80aa91458107896d1dfc7/fonttools-4.61.0-py3-none-any.whl", hash = "sha256:276f14c560e6f98d24ef7f5f44438e55ff5a67f78fa85236b218462c9f5d0635", size = 1144485, upload-time = "2025-11-28T17:05:47.573Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/33/f9/0e84d593c0e12244150280a630999835a64f2852276161b62a0f98318de0/fonttools-4.61.0.tar.gz", hash = "sha256:ec520a1f0c7758d7a858a00f090c1745f6cde6a7c5e76fb70ea4044a15f712e7", size = 3561884 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/5d/19e5939f773c7cb05480fe2e881d63870b63ee2b4bdb9a77d55b1d36c7b9/fonttools-4.61.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e24a1565c4e57111ec7f4915f8981ecbb61adf66a55f378fdc00e206059fcfef", size = 2846930 }, + { url = "https://files.pythonhosted.org/packages/25/b2/0658faf66f705293bd7e739a4f038302d188d424926be9c59bdad945664b/fonttools-4.61.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2bfacb5351303cae9f072ccf3fc6ecb437a6f359c0606bae4b1ab6715201d87", size = 2383016 }, + { url = "https://files.pythonhosted.org/packages/29/a3/1fa90b95b690f0d7541f48850adc40e9019374d896c1b8148d15012b2458/fonttools-4.61.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0bdcf2e29d65c26299cc3d502f4612365e8b90a939f46cd92d037b6cb7bb544a", size = 4949425 }, + { url = "https://files.pythonhosted.org/packages/af/00/acf18c00f6c501bd6e05ee930f926186f8a8e268265407065688820f1c94/fonttools-4.61.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e6cd0d9051b8ddaf7385f99dd82ec2a058e2b46cf1f1961e68e1ff20fcbb61af", size = 4999632 }, + { url = "https://files.pythonhosted.org/packages/5f/e0/19a2b86e54109b1d2ee8743c96a1d297238ae03243897bc5345c0365f34d/fonttools-4.61.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e074bc07c31406f45c418e17c1722e83560f181d122c412fa9e815df0ff74810", size = 4939438 }, + { url = "https://files.pythonhosted.org/packages/04/35/7b57a5f57d46286360355eff8d6b88c64ab6331107f37a273a71c803798d/fonttools-4.61.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a9b78da5d5faa17e63b2404b77feeae105c1b7e75f26020ab7a27b76e02039f", size = 5088960 }, + { url = "https://files.pythonhosted.org/packages/3e/0e/6c5023eb2e0fe5d1ababc7e221e44acd3ff668781489cc1937a6f83d620a/fonttools-4.61.0-cp312-cp312-win32.whl", hash = "sha256:9821ed77bb676736b88fa87a737c97b6af06e8109667e625a4f00158540ce044", size = 2264404 }, + { url = "https://files.pythonhosted.org/packages/36/0b/63273128c7c5df19b1e4cd92e0a1e6ea5bb74a400c4905054c96ad60a675/fonttools-4.61.0-cp312-cp312-win_amd64.whl", hash = "sha256:0011d640afa61053bc6590f9a3394bd222de7cfde19346588beabac374e9d8ac", size = 2314427 }, + { url = "https://files.pythonhosted.org/packages/17/45/334f0d7f181e5473cfb757e1b60f4e60e7fc64f28d406e5d364a952718c0/fonttools-4.61.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba774b8cbd8754f54b8eb58124e8bd45f736b2743325ab1a5229698942b9b433", size = 2841801 }, + { url = "https://files.pythonhosted.org/packages/cc/63/97b9c78e1f79bc741d4efe6e51f13872d8edb2b36e1b9fb2bab0d4491bb7/fonttools-4.61.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c84b430616ed73ce46e9cafd0bf0800e366a3e02fb7e1ad7c1e214dbe3862b1f", size = 2379024 }, + { url = "https://files.pythonhosted.org/packages/4e/80/c87bc524a90dbeb2a390eea23eae448286983da59b7e02c67fa0ca96a8c5/fonttools-4.61.0-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b2b734d8391afe3c682320840c8191de9bd24e7eb85768dd4dc06ed1b63dbb1b", size = 4923706 }, + { url = "https://files.pythonhosted.org/packages/6d/f6/a3b0374811a1de8c3f9207ec88f61ad1bb96f938ed89babae26c065c2e46/fonttools-4.61.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5c5fff72bf31b0e558ed085e4fd7ed96eb85881404ecc39ed2a779e7cf724eb", size = 4979751 }, + { url = "https://files.pythonhosted.org/packages/a5/3b/30f63b4308b449091573285f9d27619563a84f399946bca3eadc9554afbe/fonttools-4.61.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:14a290c5c93fcab76b7f451e6a4b7721b712d90b3b5ed6908f1abcf794e90d6d", size = 4921113 }, + { url = "https://files.pythonhosted.org/packages/41/6c/58e6e9b7d9d8bf2d7010bd7bb493060b39b02a12d1cda64a8bfb116ce760/fonttools-4.61.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:13e3e20a5463bfeb77b3557d04b30bd6a96a6bb5c15c7b2e7908903e69d437a0", size = 5063183 }, + { url = "https://files.pythonhosted.org/packages/3f/e3/52c790ab2b07492df059947a1fd7778e105aac5848c0473029a4d20481a2/fonttools-4.61.0-cp313-cp313-win32.whl", hash = "sha256:6781e7a4bb010be1cd69a29927b0305c86b843395f2613bdabe115f7d6ea7f34", size = 2263159 }, + { url = "https://files.pythonhosted.org/packages/e9/1f/116013b200fbeba871046554d5d2a45fefa69a05c40e9cdfd0d4fff53edc/fonttools-4.61.0-cp313-cp313-win_amd64.whl", hash = "sha256:c53b47834ae41e8e4829171cc44fec0fdf125545a15f6da41776b926b9645a9a", size = 2313530 }, + { url = "https://files.pythonhosted.org/packages/d3/99/59b1e25987787cb714aa9457cee4c9301b7c2153f0b673e2b8679d37669d/fonttools-4.61.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:96dfc9bc1f2302224e48e6ee37e656eddbab810b724b52e9d9c13a57a6abad01", size = 2841429 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/4c1911d4332c8a144bb3b44416e274ccca0e297157c971ea1b3fbb855590/fonttools-4.61.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3b2065d94e5d63aafc2591c8b6ccbdb511001d9619f1bca8ad39b745ebeb5efa", size = 2378987 }, + { url = "https://files.pythonhosted.org/packages/24/b0/f442e90fde5d2af2ae0cb54008ab6411edc557ee33b824e13e1d04925ac9/fonttools-4.61.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e0d87e81e4d869549585ba0beb3f033718501c1095004f5e6aef598d13ebc216", size = 4873270 }, + { url = "https://files.pythonhosted.org/packages/bb/04/f5d5990e33053c8a59b90b1d7e10ad9b97a73f42c745304da0e709635fab/fonttools-4.61.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cfa2eb9bae650e58f0e8ad53c49d19a844d6034d6b259f30f197238abc1ccee", size = 4968270 }, + { url = "https://files.pythonhosted.org/packages/94/9f/2091402e0d27c9c8c4bab5de0e5cd146d9609a2d7d1c666bbb75c0011c1a/fonttools-4.61.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4238120002e68296d55e091411c09eab94e111c8ce64716d17df53fd0eb3bb3d", size = 4919799 }, + { url = "https://files.pythonhosted.org/packages/a8/72/86adab22fde710b829f8ffbc8f264df01928e5b7a8f6177fa29979ebf256/fonttools-4.61.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b6ceac262cc62bec01b3bb59abccf41b24ef6580869e306a4e88b7e56bb4bdda", size = 5030966 }, + { url = "https://files.pythonhosted.org/packages/e8/a7/7c8e31b003349e845b853f5e0a67b95ff6b052fa4f5224f8b72624f5ac69/fonttools-4.61.0-cp314-cp314-win32.whl", hash = "sha256:adbb4ecee1a779469a77377bbe490565effe8fce6fb2e6f95f064de58f8bac85", size = 2267243 }, + { url = "https://files.pythonhosted.org/packages/20/ee/f434fe7749360497c52b7dcbcfdbccdaab0a71c59f19d572576066717122/fonttools-4.61.0-cp314-cp314-win_amd64.whl", hash = "sha256:02bdf8e04d1a70476564b8640380f04bb4ac74edc1fc71f1bacb840b3e398ee9", size = 2318822 }, + { url = "https://files.pythonhosted.org/packages/33/b3/c16255320255e5c1863ca2b2599bb61a46e2f566db0bbb9948615a8fe692/fonttools-4.61.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:627216062d90ab0d98215176d8b9562c4dd5b61271d35f130bcd30f6a8aaa33a", size = 2924917 }, + { url = "https://files.pythonhosted.org/packages/e2/b8/08067ae21de705a817777c02ef36ab0b953cbe91d8adf134f9c2da75ed6d/fonttools-4.61.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:7b446623c9cd5f14a59493818eaa80255eec2468c27d2c01b56e05357c263195", size = 2413576 }, + { url = "https://files.pythonhosted.org/packages/42/f1/96ff43f92addce2356780fdc203f2966206f3d22ea20e242c27826fd7442/fonttools-4.61.0-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:70e2a0c0182ee75e493ef33061bfebf140ea57e035481d2f95aa03b66c7a0e05", size = 4877447 }, + { url = "https://files.pythonhosted.org/packages/d0/1e/a3d8e51ed9ccfd7385e239ae374b78d258a0fb82d82cab99160a014a45d1/fonttools-4.61.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9064b0f55b947e929ac669af5311ab1f26f750214db6dd9a0c97e091e918f486", size = 5095681 }, + { url = "https://files.pythonhosted.org/packages/eb/f6/d256bd6c1065c146a0bdddf1c62f542e08ae5b3405dbf3fcc52be272f674/fonttools-4.61.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2cb5e45a824ce14b90510024d0d39dae51bd4fbb54c42a9334ea8c8cf4d95cbe", size = 4974140 }, + { url = "https://files.pythonhosted.org/packages/5d/0c/96633eb4b26f138cc48561c6e0c44b4ea48acea56b20b507d6b14f8e80ce/fonttools-4.61.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6e5ca8c62efdec7972dfdfd454415c4db49b89aeaefaaacada432f3b7eea9866", size = 5001741 }, + { url = "https://files.pythonhosted.org/packages/6f/9a/3b536bad3be4f26186f296e749ff17bad3e6d57232c104d752d24b2e265b/fonttools-4.61.0-cp314-cp314t-win32.whl", hash = "sha256:63c7125d31abe3e61d7bb917329b5543c5b3448db95f24081a13aaf064360fc8", size = 2330707 }, + { url = "https://files.pythonhosted.org/packages/18/ea/e6b9ac610451ee9f04477c311ad126de971f6112cb579fa391d2a8edb00b/fonttools-4.61.0-cp314-cp314t-win_amd64.whl", hash = "sha256:67d841aa272be5500de7f447c40d1d8452783af33b4c3599899319f6ef9ad3c1", size = 2395950 }, + { url = "https://files.pythonhosted.org/packages/0c/14/634f7daea5ffe6a5f7a0322ba8e1a0e23c9257b80aa91458107896d1dfc7/fonttools-4.61.0-py3-none-any.whl", hash = "sha256:276f14c560e6f98d24ef7f5f44438e55ff5a67f78fa85236b218462c9f5d0635", size = 1144485 }, ] [[package]] name = "frozenlist" version = "1.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, - { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, - { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, - { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, - { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, - { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, - { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, - { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, - { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, - { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, - { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, - { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, - { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, - { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, - { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, - { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, - { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, - { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, - { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, - { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, - { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, - { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, - { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, - { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, - { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, - { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, - { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, - { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, - { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, - { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, - { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, - { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, - { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, - { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, - { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, - { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, - { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, - { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, - { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, - { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, - { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, - { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, - { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, - { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, - { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, - { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, - { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, - { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, - { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, - { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, - { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, - { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, - { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, - { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, - { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, - { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, - { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, - { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, - { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, - { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, - { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, - { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, - { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, - { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, - { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, - { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, - { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, - { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, - { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, - { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, - { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, - { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, - { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, - { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, - { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, - { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782 }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594 }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448 }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411 }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014 }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909 }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049 }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485 }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619 }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320 }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820 }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518 }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096 }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985 }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591 }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102 }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717 }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651 }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417 }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391 }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048 }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549 }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833 }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363 }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314 }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365 }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763 }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110 }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717 }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628 }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882 }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676 }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235 }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742 }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725 }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506 }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161 }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676 }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638 }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067 }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101 }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901 }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395 }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659 }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492 }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034 }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749 }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127 }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698 }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749 }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298 }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015 }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038 }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130 }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845 }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131 }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542 }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308 }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210 }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972 }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536 }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330 }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627 }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238 }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738 }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739 }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186 }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196 }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830 }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289 }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318 }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814 }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762 }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470 }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042 }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148 }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676 }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451 }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507 }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409 }, ] [[package]] name = "fsspec" version = "2025.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/7f/2747c0d332b9acfa75dc84447a066fdf812b5a6b8d30472b74d309bfe8cb/fsspec-2025.10.0.tar.gz", hash = "sha256:b6789427626f068f9a83ca4e8a3cc050850b6c0f71f99ddb4f542b8266a26a59", size = 309285, upload-time = "2025-10-30T14:58:44.036Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/7f/2747c0d332b9acfa75dc84447a066fdf812b5a6b8d30472b74d309bfe8cb/fsspec-2025.10.0.tar.gz", hash = "sha256:b6789427626f068f9a83ca4e8a3cc050850b6c0f71f99ddb4f542b8266a26a59", size = 309285 } wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/02/a6b21098b1d5d6249b7c5ab69dde30108a71e4e819d4a9778f1de1d5b70d/fsspec-2025.10.0-py3-none-any.whl", hash = "sha256:7c7712353ae7d875407f97715f0e1ffcc21e33d5b24556cb1e090ae9409ec61d", size = 200966, upload-time = "2025-10-30T14:58:42.53Z" }, + { url = "https://files.pythonhosted.org/packages/eb/02/a6b21098b1d5d6249b7c5ab69dde30108a71e4e819d4a9778f1de1d5b70d/fsspec-2025.10.0-py3-none-any.whl", hash = "sha256:7c7712353ae7d875407f97715f0e1ffcc21e33d5b24556cb1e090ae9409ec61d", size = 200966 }, ] [package.optional-dependencies] @@ -1079,9 +1078,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "smmap" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684, upload-time = "2025-01-02T07:20:46.413Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794, upload-time = "2025-01-02T07:20:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794 }, ] [[package]] @@ -1091,21 +1090,21 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "gitdb" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/c8/dd58967d119baab745caec2f9d853297cec1989ec1d63f677d3880632b88/gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c", size = 215076, upload-time = "2025-07-24T03:45:54.871Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/c8/dd58967d119baab745caec2f9d853297cec1989ec1d63f677d3880632b88/gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c", size = 215076 } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/61/d4b89fec821f72385526e1b9d9a3a0385dda4a72b206d28049e2c7cd39b8/gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77", size = 208168, upload-time = "2025-07-24T03:45:52.517Z" }, + { url = "https://files.pythonhosted.org/packages/01/61/d4b89fec821f72385526e1b9d9a3a0385dda4a72b206d28049e2c7cd39b8/gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77", size = 208168 }, ] [[package]] name = "gnureadline" version = "8.2.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/92/20723aa239b9a8024e6f8358c789df8859ab1085a1ae106e5071727ad20f/gnureadline-8.2.13.tar.gz", hash = "sha256:c9b9e1e7ba99a80bb50c12027d6ce692574f77a65bf57bc97041cf81c0f49bd1", size = 3224991, upload-time = "2024-10-18T14:03:11.727Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/92/20723aa239b9a8024e6f8358c789df8859ab1085a1ae106e5071727ad20f/gnureadline-8.2.13.tar.gz", hash = "sha256:c9b9e1e7ba99a80bb50c12027d6ce692574f77a65bf57bc97041cf81c0f49bd1", size = 3224991 } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/bd/df8fd060e43efd3dbdd3b210bf558ce3ef854843cd093f910f4115ebe2e9/gnureadline-8.2.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c152a82613fa012ab4331bb9a0ffddb415e37561d376b910bf9e7d535607faf", size = 160504, upload-time = "2024-10-18T14:03:49.725Z" }, - { url = "https://files.pythonhosted.org/packages/97/ee/322e5340c8cdfa40e71bd0485a82404ad4cf9aed2260cca090f3c1a3a032/gnureadline-8.2.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85e362d2d0e85e45f0affae7bbfaf998b00167c55a78d31ee0f214de9ff429d2", size = 162380, upload-time = "2024-10-18T14:03:53.129Z" }, - { url = "https://files.pythonhosted.org/packages/a1/b0/4a3c55a05b4c1c240fd6dc204ff597432008c4649ce500688a2441d27cf4/gnureadline-8.2.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2d3e33d2e0dd694d623a2ca1fae6990b52f1d25955504b7293a9350fb9912940", size = 160646, upload-time = "2024-10-18T14:04:00.723Z" }, - { url = "https://files.pythonhosted.org/packages/3a/41/8821db40f2b0dd9cc935d6838bc63776fb5bfb1df092f8d4698ec29ada6a/gnureadline-8.2.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6c550d08c4d2882a83293a724b14a262ee5078fd4fa7acdc78aa59cab26ae343", size = 162630, upload-time = "2024-10-18T14:04:02.711Z" }, + { url = "https://files.pythonhosted.org/packages/68/bd/df8fd060e43efd3dbdd3b210bf558ce3ef854843cd093f910f4115ebe2e9/gnureadline-8.2.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c152a82613fa012ab4331bb9a0ffddb415e37561d376b910bf9e7d535607faf", size = 160504 }, + { url = "https://files.pythonhosted.org/packages/97/ee/322e5340c8cdfa40e71bd0485a82404ad4cf9aed2260cca090f3c1a3a032/gnureadline-8.2.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85e362d2d0e85e45f0affae7bbfaf998b00167c55a78d31ee0f214de9ff429d2", size = 162380 }, + { url = "https://files.pythonhosted.org/packages/a1/b0/4a3c55a05b4c1c240fd6dc204ff597432008c4649ce500688a2441d27cf4/gnureadline-8.2.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2d3e33d2e0dd694d623a2ca1fae6990b52f1d25955504b7293a9350fb9912940", size = 160646 }, + { url = "https://files.pythonhosted.org/packages/3a/41/8821db40f2b0dd9cc935d6838bc63776fb5bfb1df092f8d4698ec29ada6a/gnureadline-8.2.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6c550d08c4d2882a83293a724b14a262ee5078fd4fa7acdc78aa59cab26ae343", size = 162630 }, ] [[package]] @@ -1117,9 +1116,9 @@ dependencies = [ { name = "pyasn1-modules" }, { name = "rsa" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/ef/66d14cf0e01b08d2d51ffc3c20410c4e134a1548fc246a6081eae585a4fe/google_auth-2.43.0.tar.gz", hash = "sha256:88228eee5fc21b62a1b5fe773ca15e67778cb07dc8363adcb4a8827b52d81483", size = 296359, upload-time = "2025-11-06T00:13:36.587Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/ef/66d14cf0e01b08d2d51ffc3c20410c4e134a1548fc246a6081eae585a4fe/google_auth-2.43.0.tar.gz", hash = "sha256:88228eee5fc21b62a1b5fe773ca15e67778cb07dc8363adcb4a8827b52d81483", size = 296359 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/d1/385110a9ae86d91cc14c5282c61fe9f4dc41c0b9f7d423c6ad77038c4448/google_auth-2.43.0-py2.py3-none-any.whl", hash = "sha256:af628ba6fa493f75c7e9dbe9373d148ca9f4399b5ea29976519e0a3848eddd16", size = 223114, upload-time = "2025-11-06T00:13:35.209Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d1/385110a9ae86d91cc14c5282c61fe9f4dc41c0b9f7d423c6ad77038c4448/google_auth-2.43.0-py2.py3-none-any.whl", hash = "sha256:af628ba6fa493f75c7e9dbe9373d148ca9f4399b5ea29976519e0a3848eddd16", size = 223114 }, ] [[package]] @@ -1132,18 +1131,18 @@ dependencies = [ { name = "python-dateutil" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/f6/bf62ff950c317ed03e77f3f6ddd7e34aaa98fe89d79ebd660c55343d8054/graphene-3.4.3.tar.gz", hash = "sha256:2a3786948ce75fe7e078443d37f609cbe5bb36ad8d6b828740ad3b95ed1a0aaa", size = 44739, upload-time = "2024-11-09T20:44:25.757Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/f6/bf62ff950c317ed03e77f3f6ddd7e34aaa98fe89d79ebd660c55343d8054/graphene-3.4.3.tar.gz", hash = "sha256:2a3786948ce75fe7e078443d37f609cbe5bb36ad8d6b828740ad3b95ed1a0aaa", size = 44739 } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/e0/61d8e98007182e6b2aca7cf65904721fb2e4bce0192272ab9cb6f69d8812/graphene-3.4.3-py2.py3-none-any.whl", hash = "sha256:820db6289754c181007a150db1f7fff544b94142b556d12e3ebc777a7bf36c71", size = 114894, upload-time = "2024-11-09T20:44:23.851Z" }, + { url = "https://files.pythonhosted.org/packages/66/e0/61d8e98007182e6b2aca7cf65904721fb2e4bce0192272ab9cb6f69d8812/graphene-3.4.3-py2.py3-none-any.whl", hash = "sha256:820db6289754c181007a150db1f7fff544b94142b556d12e3ebc777a7bf36c71", size = 114894 }, ] [[package]] name = "graphql-core" version = "3.2.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ac/9b/037a640a2983b09aed4a823f9cf1729e6d780b0671f854efa4727a7affbe/graphql_core-3.2.7.tar.gz", hash = "sha256:27b6904bdd3b43f2a0556dad5d579bdfdeab1f38e8e8788e555bdcb586a6f62c", size = 513484, upload-time = "2025-11-01T22:30:40.436Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/9b/037a640a2983b09aed4a823f9cf1729e6d780b0671f854efa4727a7affbe/graphql_core-3.2.7.tar.gz", hash = "sha256:27b6904bdd3b43f2a0556dad5d579bdfdeab1f38e8e8788e555bdcb586a6f62c", size = 513484 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/14/933037032608787fb92e365883ad6a741c235e0ff992865ec5d904a38f1e/graphql_core-3.2.7-py3-none-any.whl", hash = "sha256:17fc8f3ca4a42913d8e24d9ac9f08deddf0a0b2483076575757f6c412ead2ec0", size = 207262, upload-time = "2025-11-01T22:30:38.912Z" }, + { url = "https://files.pythonhosted.org/packages/0a/14/933037032608787fb92e365883ad6a741c235e0ff992865ec5d904a38f1e/graphql_core-3.2.7-py3-none-any.whl", hash = "sha256:17fc8f3ca4a42913d8e24d9ac9f08deddf0a0b2483076575757f6c412ead2ec0", size = 207262 }, ] [[package]] @@ -1153,48 +1152,48 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "graphql-core" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/13/98fbf8d67552f102488ffc16c6f559ce71ea15f6294728d33928ab5ff14d/graphql-relay-3.2.0.tar.gz", hash = "sha256:1ff1c51298356e481a0be009ccdff249832ce53f30559c1338f22a0e0d17250c", size = 50027, upload-time = "2022-04-16T11:03:45.447Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/13/98fbf8d67552f102488ffc16c6f559ce71ea15f6294728d33928ab5ff14d/graphql-relay-3.2.0.tar.gz", hash = "sha256:1ff1c51298356e481a0be009ccdff249832ce53f30559c1338f22a0e0d17250c", size = 50027 } wheels = [ - { url = "https://files.pythonhosted.org/packages/74/16/a4cf06adbc711bd364a73ce043b0b08d8fa5aae3df11b6ee4248bcdad2e0/graphql_relay-3.2.0-py3-none-any.whl", hash = "sha256:c9b22bd28b170ba1fe674c74384a8ff30a76c8e26f88ac3aa1584dd3179953e5", size = 16940, upload-time = "2022-04-16T11:03:43.895Z" }, + { url = "https://files.pythonhosted.org/packages/74/16/a4cf06adbc711bd364a73ce043b0b08d8fa5aae3df11b6ee4248bcdad2e0/graphql_relay-3.2.0-py3-none-any.whl", hash = "sha256:c9b22bd28b170ba1fe674c74384a8ff30a76c8e26f88ac3aa1584dd3179953e5", size = 16940 }, ] [[package]] name = "greenlet" version = "3.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651, upload-time = "2025-12-04T14:49:44.05Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379, upload-time = "2025-12-04T14:23:30.498Z" }, - { url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294, upload-time = "2025-12-04T14:50:06.847Z" }, - { url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742, upload-time = "2025-12-04T14:57:42.349Z" }, - { url = "https://files.pythonhosted.org/packages/77/cb/43692bcd5f7a0da6ec0ec6d58ee7cddb606d055ce94a62ac9b1aa481e969/greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7", size = 622297, upload-time = "2025-12-04T15:07:13.552Z" }, - { url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885, upload-time = "2025-12-04T14:26:02.368Z" }, - { url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424, upload-time = "2025-12-04T15:04:23.757Z" }, - { url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017, upload-time = "2025-12-04T14:27:29.688Z" }, - { url = "https://files.pythonhosted.org/packages/6c/79/3912a94cf27ec503e51ba493692d6db1e3cd8ac7ac52b0b47c8e33d7f4f9/greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39", size = 301964, upload-time = "2025-12-04T14:36:58.316Z" }, - { url = "https://files.pythonhosted.org/packages/02/2f/28592176381b9ab2cafa12829ba7b472d177f3acc35d8fbcf3673d966fff/greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739", size = 275140, upload-time = "2025-12-04T14:23:01.282Z" }, - { url = "https://files.pythonhosted.org/packages/2c/80/fbe937bf81e9fca98c981fe499e59a3f45df2a04da0baa5c2be0dca0d329/greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808", size = 599219, upload-time = "2025-12-04T14:50:08.309Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ff/7c985128f0514271b8268476af89aee6866df5eec04ac17dcfbc676213df/greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54", size = 610211, upload-time = "2025-12-04T14:57:43.968Z" }, - { url = "https://files.pythonhosted.org/packages/79/07/c47a82d881319ec18a4510bb30463ed6891f2ad2c1901ed5ec23d3de351f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30a6e28487a790417d036088b3bcb3f3ac7d8babaa7d0139edbaddebf3af9492", size = 624311, upload-time = "2025-12-04T15:07:14.697Z" }, - { url = "https://files.pythonhosted.org/packages/fd/8e/424b8c6e78bd9837d14ff7df01a9829fc883ba2ab4ea787d4f848435f23f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527", size = 612833, upload-time = "2025-12-04T14:26:03.669Z" }, - { url = "https://files.pythonhosted.org/packages/b5/ba/56699ff9b7c76ca12f1cdc27a886d0f81f2189c3455ff9f65246780f713d/greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39", size = 1567256, upload-time = "2025-12-04T15:04:25.276Z" }, - { url = "https://files.pythonhosted.org/packages/1e/37/f31136132967982d698c71a281a8901daf1a8fbab935dce7c0cf15f942cc/greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8", size = 1636483, upload-time = "2025-12-04T14:27:30.804Z" }, - { url = "https://files.pythonhosted.org/packages/7e/71/ba21c3fb8c5dce83b8c01f458a42e99ffdb1963aeec08fff5a18588d8fd7/greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38", size = 301833, upload-time = "2025-12-04T14:32:23.929Z" }, - { url = "https://files.pythonhosted.org/packages/d7/7c/f0a6d0ede2c7bf092d00bc83ad5bafb7e6ec9b4aab2fbdfa6f134dc73327/greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f", size = 275671, upload-time = "2025-12-04T14:23:05.267Z" }, - { url = "https://files.pythonhosted.org/packages/44/06/dac639ae1a50f5969d82d2e3dd9767d30d6dbdbab0e1a54010c8fe90263c/greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365", size = 646360, upload-time = "2025-12-04T14:50:10.026Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/0fb76fe6c5369fba9bf98529ada6f4c3a1adf19e406a47332245ef0eb357/greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3", size = 658160, upload-time = "2025-12-04T14:57:45.41Z" }, - { url = "https://files.pythonhosted.org/packages/93/79/d2c70cae6e823fac36c3bbc9077962105052b7ef81db2f01ec3b9bf17e2b/greenlet-3.3.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dcd2bdbd444ff340e8d6bdf54d2f206ccddbb3ccfdcd3c25bf4afaa7b8f0cf45", size = 671388, upload-time = "2025-12-04T15:07:15.789Z" }, - { url = "https://files.pythonhosted.org/packages/b8/14/bab308fc2c1b5228c3224ec2bf928ce2e4d21d8046c161e44a2012b5203e/greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955", size = 660166, upload-time = "2025-12-04T14:26:05.099Z" }, - { url = "https://files.pythonhosted.org/packages/4b/d2/91465d39164eaa0085177f61983d80ffe746c5a1860f009811d498e7259c/greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55", size = 1615193, upload-time = "2025-12-04T15:04:27.041Z" }, - { url = "https://files.pythonhosted.org/packages/42/1b/83d110a37044b92423084d52d5d5a3b3a73cafb51b547e6d7366ff62eff1/greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc", size = 1683653, upload-time = "2025-12-04T14:27:32.366Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9a/9030e6f9aa8fd7808e9c31ba4c38f87c4f8ec324ee67431d181fe396d705/greenlet-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:73f51dd0e0bdb596fb0417e475fa3c5e32d4c83638296e560086b8d7da7c4170", size = 305387, upload-time = "2025-12-04T14:26:51.063Z" }, - { url = "https://files.pythonhosted.org/packages/a0/66/bd6317bc5932accf351fc19f177ffba53712a202f9df10587da8df257c7e/greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931", size = 282638, upload-time = "2025-12-04T14:25:20.941Z" }, - { url = "https://files.pythonhosted.org/packages/30/cf/cc81cb030b40e738d6e69502ccbd0dd1bced0588e958f9e757945de24404/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388", size = 651145, upload-time = "2025-12-04T14:50:11.039Z" }, - { url = "https://files.pythonhosted.org/packages/9c/ea/1020037b5ecfe95ca7df8d8549959baceb8186031da83d5ecceff8b08cd2/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3", size = 654236, upload-time = "2025-12-04T14:57:47.007Z" }, - { url = "https://files.pythonhosted.org/packages/69/cc/1e4bae2e45ca2fa55299f4e85854606a78ecc37fead20d69322f96000504/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2662433acbca297c9153a4023fe2161c8dcfdcc91f10433171cf7e7d94ba2221", size = 662506, upload-time = "2025-12-04T15:07:16.906Z" }, - { url = "https://files.pythonhosted.org/packages/57/b9/f8025d71a6085c441a7eaff0fd928bbb275a6633773667023d19179fe815/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3c6e9b9c1527a78520357de498b0e709fb9e2f49c3a513afd5a249007261911b", size = 653783, upload-time = "2025-12-04T14:26:06.225Z" }, - { url = "https://files.pythonhosted.org/packages/f6/c7/876a8c7a7485d5d6b5c6821201d542ef28be645aa024cfe1145b35c120c1/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:286d093f95ec98fdd92fcb955003b8a3d054b4e2cab3e2707a5039e7b50520fd", size = 1614857, upload-time = "2025-12-04T15:04:28.484Z" }, - { url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034, upload-time = "2025-12-04T14:27:33.531Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379 }, + { url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294 }, + { url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742 }, + { url = "https://files.pythonhosted.org/packages/77/cb/43692bcd5f7a0da6ec0ec6d58ee7cddb606d055ce94a62ac9b1aa481e969/greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7", size = 622297 }, + { url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885 }, + { url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424 }, + { url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017 }, + { url = "https://files.pythonhosted.org/packages/6c/79/3912a94cf27ec503e51ba493692d6db1e3cd8ac7ac52b0b47c8e33d7f4f9/greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39", size = 301964 }, + { url = "https://files.pythonhosted.org/packages/02/2f/28592176381b9ab2cafa12829ba7b472d177f3acc35d8fbcf3673d966fff/greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739", size = 275140 }, + { url = "https://files.pythonhosted.org/packages/2c/80/fbe937bf81e9fca98c981fe499e59a3f45df2a04da0baa5c2be0dca0d329/greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808", size = 599219 }, + { url = "https://files.pythonhosted.org/packages/c2/ff/7c985128f0514271b8268476af89aee6866df5eec04ac17dcfbc676213df/greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54", size = 610211 }, + { url = "https://files.pythonhosted.org/packages/79/07/c47a82d881319ec18a4510bb30463ed6891f2ad2c1901ed5ec23d3de351f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30a6e28487a790417d036088b3bcb3f3ac7d8babaa7d0139edbaddebf3af9492", size = 624311 }, + { url = "https://files.pythonhosted.org/packages/fd/8e/424b8c6e78bd9837d14ff7df01a9829fc883ba2ab4ea787d4f848435f23f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527", size = 612833 }, + { url = "https://files.pythonhosted.org/packages/b5/ba/56699ff9b7c76ca12f1cdc27a886d0f81f2189c3455ff9f65246780f713d/greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39", size = 1567256 }, + { url = "https://files.pythonhosted.org/packages/1e/37/f31136132967982d698c71a281a8901daf1a8fbab935dce7c0cf15f942cc/greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8", size = 1636483 }, + { url = "https://files.pythonhosted.org/packages/7e/71/ba21c3fb8c5dce83b8c01f458a42e99ffdb1963aeec08fff5a18588d8fd7/greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38", size = 301833 }, + { url = "https://files.pythonhosted.org/packages/d7/7c/f0a6d0ede2c7bf092d00bc83ad5bafb7e6ec9b4aab2fbdfa6f134dc73327/greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f", size = 275671 }, + { url = "https://files.pythonhosted.org/packages/44/06/dac639ae1a50f5969d82d2e3dd9767d30d6dbdbab0e1a54010c8fe90263c/greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365", size = 646360 }, + { url = "https://files.pythonhosted.org/packages/e0/94/0fb76fe6c5369fba9bf98529ada6f4c3a1adf19e406a47332245ef0eb357/greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3", size = 658160 }, + { url = "https://files.pythonhosted.org/packages/93/79/d2c70cae6e823fac36c3bbc9077962105052b7ef81db2f01ec3b9bf17e2b/greenlet-3.3.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dcd2bdbd444ff340e8d6bdf54d2f206ccddbb3ccfdcd3c25bf4afaa7b8f0cf45", size = 671388 }, + { url = "https://files.pythonhosted.org/packages/b8/14/bab308fc2c1b5228c3224ec2bf928ce2e4d21d8046c161e44a2012b5203e/greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955", size = 660166 }, + { url = "https://files.pythonhosted.org/packages/4b/d2/91465d39164eaa0085177f61983d80ffe746c5a1860f009811d498e7259c/greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55", size = 1615193 }, + { url = "https://files.pythonhosted.org/packages/42/1b/83d110a37044b92423084d52d5d5a3b3a73cafb51b547e6d7366ff62eff1/greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc", size = 1683653 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/9030e6f9aa8fd7808e9c31ba4c38f87c4f8ec324ee67431d181fe396d705/greenlet-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:73f51dd0e0bdb596fb0417e475fa3c5e32d4c83638296e560086b8d7da7c4170", size = 305387 }, + { url = "https://files.pythonhosted.org/packages/a0/66/bd6317bc5932accf351fc19f177ffba53712a202f9df10587da8df257c7e/greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931", size = 282638 }, + { url = "https://files.pythonhosted.org/packages/30/cf/cc81cb030b40e738d6e69502ccbd0dd1bced0588e958f9e757945de24404/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388", size = 651145 }, + { url = "https://files.pythonhosted.org/packages/9c/ea/1020037b5ecfe95ca7df8d8549959baceb8186031da83d5ecceff8b08cd2/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3", size = 654236 }, + { url = "https://files.pythonhosted.org/packages/69/cc/1e4bae2e45ca2fa55299f4e85854606a78ecc37fead20d69322f96000504/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2662433acbca297c9153a4023fe2161c8dcfdcc91f10433171cf7e7d94ba2221", size = 662506 }, + { url = "https://files.pythonhosted.org/packages/57/b9/f8025d71a6085c441a7eaff0fd928bbb275a6633773667023d19179fe815/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3c6e9b9c1527a78520357de498b0e709fb9e2f49c3a513afd5a249007261911b", size = 653783 }, + { url = "https://files.pythonhosted.org/packages/f6/c7/876a8c7a7485d5d6b5c6821201d542ef28be645aa024cfe1145b35c120c1/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:286d093f95ec98fdd92fcb955003b8a3d054b4e2cab3e2707a5039e7b50520fd", size = 1614857 }, + { url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034 }, ] [[package]] @@ -1204,47 +1203,47 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/34/72/9614c465dc206155d93eff0ca20d42e1e35afc533971379482de953521a4/gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec", size = 375031, upload-time = "2024-08-10T20:25:27.378Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/72/9614c465dc206155d93eff0ca20d42e1e35afc533971379482de953521a4/gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec", size = 375031 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/7d/6dac2a6e1eba33ee43f318edbed4ff29151a49b5d37f080aad1e6469bca4/gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d", size = 85029, upload-time = "2024-08-10T20:25:24.996Z" }, + { url = "https://files.pythonhosted.org/packages/cb/7d/6dac2a6e1eba33ee43f318edbed4ff29151a49b5d37f080aad1e6469bca4/gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d", size = 85029 }, ] [[package]] name = "h11" version = "0.16.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 }, ] [[package]] name = "hf-xet" version = "1.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/6e/0f11bacf08a67f7fb5ee09740f2ca54163863b07b70d579356e9222ce5d8/hf_xet-1.2.0.tar.gz", hash = "sha256:a8c27070ca547293b6890c4bf389f713f80e8c478631432962bb7f4bc0bd7d7f", size = 506020, upload-time = "2025-10-24T19:04:32.129Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/a5/85ef910a0aa034a2abcfadc360ab5ac6f6bc4e9112349bd40ca97551cff0/hf_xet-1.2.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:ceeefcd1b7aed4956ae8499e2199607765fbd1c60510752003b6cc0b8413b649", size = 2861870, upload-time = "2025-10-24T19:04:11.422Z" }, - { url = "https://files.pythonhosted.org/packages/ea/40/e2e0a7eb9a51fe8828ba2d47fe22a7e74914ea8a0db68a18c3aa7449c767/hf_xet-1.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b70218dd548e9840224df5638fdc94bd033552963cfa97f9170829381179c813", size = 2717584, upload-time = "2025-10-24T19:04:09.586Z" }, - { url = "https://files.pythonhosted.org/packages/a5/7d/daf7f8bc4594fdd59a8a596f9e3886133fdc68e675292218a5e4c1b7e834/hf_xet-1.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d40b18769bb9a8bc82a9ede575ce1a44c75eb80e7375a01d76259089529b5dc", size = 3315004, upload-time = "2025-10-24T19:04:00.314Z" }, - { url = "https://files.pythonhosted.org/packages/b1/ba/45ea2f605fbf6d81c8b21e4d970b168b18a53515923010c312c06cd83164/hf_xet-1.2.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:cd3a6027d59cfb60177c12d6424e31f4b5ff13d8e3a1247b3a584bf8977e6df5", size = 3222636, upload-time = "2025-10-24T19:03:58.111Z" }, - { url = "https://files.pythonhosted.org/packages/4a/1d/04513e3cab8f29ab8c109d309ddd21a2705afab9d52f2ba1151e0c14f086/hf_xet-1.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6de1fc44f58f6dd937956c8d304d8c2dea264c80680bcfa61ca4a15e7b76780f", size = 3408448, upload-time = "2025-10-24T19:04:20.951Z" }, - { url = "https://files.pythonhosted.org/packages/f0/7c/60a2756d7feec7387db3a1176c632357632fbe7849fce576c5559d4520c7/hf_xet-1.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f182f264ed2acd566c514e45da9f2119110e48a87a327ca271027904c70c5832", size = 3503401, upload-time = "2025-10-24T19:04:22.549Z" }, - { url = "https://files.pythonhosted.org/packages/4e/64/48fffbd67fb418ab07451e4ce641a70de1c40c10a13e25325e24858ebe5a/hf_xet-1.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:293a7a3787e5c95d7be1857358a9130694a9c6021de3f27fa233f37267174382", size = 2900866, upload-time = "2025-10-24T19:04:33.461Z" }, - { url = "https://files.pythonhosted.org/packages/e2/51/f7e2caae42f80af886db414d4e9885fac959330509089f97cccb339c6b87/hf_xet-1.2.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:10bfab528b968c70e062607f663e21e34e2bba349e8038db546646875495179e", size = 2861861, upload-time = "2025-10-24T19:04:19.01Z" }, - { url = "https://files.pythonhosted.org/packages/6e/1d/a641a88b69994f9371bd347f1dd35e5d1e2e2460a2e350c8d5165fc62005/hf_xet-1.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a212e842647b02eb6a911187dc878e79c4aa0aa397e88dd3b26761676e8c1f8", size = 2717699, upload-time = "2025-10-24T19:04:17.306Z" }, - { url = "https://files.pythonhosted.org/packages/df/e0/e5e9bba7d15f0318955f7ec3f4af13f92e773fbb368c0b8008a5acbcb12f/hf_xet-1.2.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30e06daccb3a7d4c065f34fc26c14c74f4653069bb2b194e7f18f17cbe9939c0", size = 3314885, upload-time = "2025-10-24T19:04:07.642Z" }, - { url = "https://files.pythonhosted.org/packages/21/90/b7fe5ff6f2b7b8cbdf1bd56145f863c90a5807d9758a549bf3d916aa4dec/hf_xet-1.2.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:29c8fc913a529ec0a91867ce3d119ac1aac966e098cf49501800c870328cc090", size = 3221550, upload-time = "2025-10-24T19:04:05.55Z" }, - { url = "https://files.pythonhosted.org/packages/6f/cb/73f276f0a7ce46cc6a6ec7d6c7d61cbfe5f2e107123d9bbd0193c355f106/hf_xet-1.2.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e159cbfcfbb29f920db2c09ed8b660eb894640d284f102ada929b6e3dc410a", size = 3408010, upload-time = "2025-10-24T19:04:28.598Z" }, - { url = "https://files.pythonhosted.org/packages/b8/1e/d642a12caa78171f4be64f7cd9c40e3ca5279d055d0873188a58c0f5fbb9/hf_xet-1.2.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9c91d5ae931510107f148874e9e2de8a16052b6f1b3ca3c1b12f15ccb491390f", size = 3503264, upload-time = "2025-10-24T19:04:30.397Z" }, - { url = "https://files.pythonhosted.org/packages/17/b5/33764714923fa1ff922770f7ed18c2daae034d21ae6e10dbf4347c854154/hf_xet-1.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:210d577732b519ac6ede149d2f2f34049d44e8622bf14eb3d63bbcd2d4b332dc", size = 2901071, upload-time = "2025-10-24T19:04:37.463Z" }, - { url = "https://files.pythonhosted.org/packages/96/2d/22338486473df5923a9ab7107d375dbef9173c338ebef5098ef593d2b560/hf_xet-1.2.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:46740d4ac024a7ca9b22bebf77460ff43332868b661186a8e46c227fdae01848", size = 2866099, upload-time = "2025-10-24T19:04:15.366Z" }, - { url = "https://files.pythonhosted.org/packages/7f/8c/c5becfa53234299bc2210ba314eaaae36c2875e0045809b82e40a9544f0c/hf_xet-1.2.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:27df617a076420d8845bea087f59303da8be17ed7ec0cd7ee3b9b9f579dff0e4", size = 2722178, upload-time = "2025-10-24T19:04:13.695Z" }, - { url = "https://files.pythonhosted.org/packages/9a/92/cf3ab0b652b082e66876d08da57fcc6fa2f0e6c70dfbbafbd470bb73eb47/hf_xet-1.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3651fd5bfe0281951b988c0facbe726aa5e347b103a675f49a3fa8144c7968fd", size = 3320214, upload-time = "2025-10-24T19:04:03.596Z" }, - { url = "https://files.pythonhosted.org/packages/46/92/3f7ec4a1b6a65bf45b059b6d4a5d38988f63e193056de2f420137e3c3244/hf_xet-1.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d06fa97c8562fb3ee7a378dd9b51e343bc5bc8190254202c9771029152f5e08c", size = 3229054, upload-time = "2025-10-24T19:04:01.949Z" }, - { url = "https://files.pythonhosted.org/packages/0b/dd/7ac658d54b9fb7999a0ccb07ad863b413cbaf5cf172f48ebcd9497ec7263/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4c1428c9ae73ec0939410ec73023c4f842927f39db09b063b9482dac5a3bb737", size = 3413812, upload-time = "2025-10-24T19:04:24.585Z" }, - { url = "https://files.pythonhosted.org/packages/92/68/89ac4e5b12a9ff6286a12174c8538a5930e2ed662091dd2572bbe0a18c8a/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a55558084c16b09b5ed32ab9ed38421e2d87cf3f1f89815764d1177081b99865", size = 3508920, upload-time = "2025-10-24T19:04:26.927Z" }, - { url = "https://files.pythonhosted.org/packages/cb/44/870d44b30e1dcfb6a65932e3e1506c103a8a5aea9103c337e7a53180322c/hf_xet-1.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69", size = 2905735, upload-time = "2025-10-24T19:04:35.928Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/5e/6e/0f11bacf08a67f7fb5ee09740f2ca54163863b07b70d579356e9222ce5d8/hf_xet-1.2.0.tar.gz", hash = "sha256:a8c27070ca547293b6890c4bf389f713f80e8c478631432962bb7f4bc0bd7d7f", size = 506020 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/a5/85ef910a0aa034a2abcfadc360ab5ac6f6bc4e9112349bd40ca97551cff0/hf_xet-1.2.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:ceeefcd1b7aed4956ae8499e2199607765fbd1c60510752003b6cc0b8413b649", size = 2861870 }, + { url = "https://files.pythonhosted.org/packages/ea/40/e2e0a7eb9a51fe8828ba2d47fe22a7e74914ea8a0db68a18c3aa7449c767/hf_xet-1.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b70218dd548e9840224df5638fdc94bd033552963cfa97f9170829381179c813", size = 2717584 }, + { url = "https://files.pythonhosted.org/packages/a5/7d/daf7f8bc4594fdd59a8a596f9e3886133fdc68e675292218a5e4c1b7e834/hf_xet-1.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d40b18769bb9a8bc82a9ede575ce1a44c75eb80e7375a01d76259089529b5dc", size = 3315004 }, + { url = "https://files.pythonhosted.org/packages/b1/ba/45ea2f605fbf6d81c8b21e4d970b168b18a53515923010c312c06cd83164/hf_xet-1.2.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:cd3a6027d59cfb60177c12d6424e31f4b5ff13d8e3a1247b3a584bf8977e6df5", size = 3222636 }, + { url = "https://files.pythonhosted.org/packages/4a/1d/04513e3cab8f29ab8c109d309ddd21a2705afab9d52f2ba1151e0c14f086/hf_xet-1.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6de1fc44f58f6dd937956c8d304d8c2dea264c80680bcfa61ca4a15e7b76780f", size = 3408448 }, + { url = "https://files.pythonhosted.org/packages/f0/7c/60a2756d7feec7387db3a1176c632357632fbe7849fce576c5559d4520c7/hf_xet-1.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f182f264ed2acd566c514e45da9f2119110e48a87a327ca271027904c70c5832", size = 3503401 }, + { url = "https://files.pythonhosted.org/packages/4e/64/48fffbd67fb418ab07451e4ce641a70de1c40c10a13e25325e24858ebe5a/hf_xet-1.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:293a7a3787e5c95d7be1857358a9130694a9c6021de3f27fa233f37267174382", size = 2900866 }, + { url = "https://files.pythonhosted.org/packages/e2/51/f7e2caae42f80af886db414d4e9885fac959330509089f97cccb339c6b87/hf_xet-1.2.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:10bfab528b968c70e062607f663e21e34e2bba349e8038db546646875495179e", size = 2861861 }, + { url = "https://files.pythonhosted.org/packages/6e/1d/a641a88b69994f9371bd347f1dd35e5d1e2e2460a2e350c8d5165fc62005/hf_xet-1.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a212e842647b02eb6a911187dc878e79c4aa0aa397e88dd3b26761676e8c1f8", size = 2717699 }, + { url = "https://files.pythonhosted.org/packages/df/e0/e5e9bba7d15f0318955f7ec3f4af13f92e773fbb368c0b8008a5acbcb12f/hf_xet-1.2.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30e06daccb3a7d4c065f34fc26c14c74f4653069bb2b194e7f18f17cbe9939c0", size = 3314885 }, + { url = "https://files.pythonhosted.org/packages/21/90/b7fe5ff6f2b7b8cbdf1bd56145f863c90a5807d9758a549bf3d916aa4dec/hf_xet-1.2.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:29c8fc913a529ec0a91867ce3d119ac1aac966e098cf49501800c870328cc090", size = 3221550 }, + { url = "https://files.pythonhosted.org/packages/6f/cb/73f276f0a7ce46cc6a6ec7d6c7d61cbfe5f2e107123d9bbd0193c355f106/hf_xet-1.2.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e159cbfcfbb29f920db2c09ed8b660eb894640d284f102ada929b6e3dc410a", size = 3408010 }, + { url = "https://files.pythonhosted.org/packages/b8/1e/d642a12caa78171f4be64f7cd9c40e3ca5279d055d0873188a58c0f5fbb9/hf_xet-1.2.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9c91d5ae931510107f148874e9e2de8a16052b6f1b3ca3c1b12f15ccb491390f", size = 3503264 }, + { url = "https://files.pythonhosted.org/packages/17/b5/33764714923fa1ff922770f7ed18c2daae034d21ae6e10dbf4347c854154/hf_xet-1.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:210d577732b519ac6ede149d2f2f34049d44e8622bf14eb3d63bbcd2d4b332dc", size = 2901071 }, + { url = "https://files.pythonhosted.org/packages/96/2d/22338486473df5923a9ab7107d375dbef9173c338ebef5098ef593d2b560/hf_xet-1.2.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:46740d4ac024a7ca9b22bebf77460ff43332868b661186a8e46c227fdae01848", size = 2866099 }, + { url = "https://files.pythonhosted.org/packages/7f/8c/c5becfa53234299bc2210ba314eaaae36c2875e0045809b82e40a9544f0c/hf_xet-1.2.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:27df617a076420d8845bea087f59303da8be17ed7ec0cd7ee3b9b9f579dff0e4", size = 2722178 }, + { url = "https://files.pythonhosted.org/packages/9a/92/cf3ab0b652b082e66876d08da57fcc6fa2f0e6c70dfbbafbd470bb73eb47/hf_xet-1.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3651fd5bfe0281951b988c0facbe726aa5e347b103a675f49a3fa8144c7968fd", size = 3320214 }, + { url = "https://files.pythonhosted.org/packages/46/92/3f7ec4a1b6a65bf45b059b6d4a5d38988f63e193056de2f420137e3c3244/hf_xet-1.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d06fa97c8562fb3ee7a378dd9b51e343bc5bc8190254202c9771029152f5e08c", size = 3229054 }, + { url = "https://files.pythonhosted.org/packages/0b/dd/7ac658d54b9fb7999a0ccb07ad863b413cbaf5cf172f48ebcd9497ec7263/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4c1428c9ae73ec0939410ec73023c4f842927f39db09b063b9482dac5a3bb737", size = 3413812 }, + { url = "https://files.pythonhosted.org/packages/92/68/89ac4e5b12a9ff6286a12174c8538a5930e2ed662091dd2572bbe0a18c8a/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a55558084c16b09b5ed32ab9ed38421e2d87cf3f1f89815764d1177081b99865", size = 3508920 }, + { url = "https://files.pythonhosted.org/packages/cb/44/870d44b30e1dcfb6a65932e3e1506c103a8a5aea9103c337e7a53180322c/hf_xet-1.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69", size = 2905735 }, ] [[package]] @@ -1255,9 +1254,9 @@ dependencies = [ { name = "certifi" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 }, ] [[package]] @@ -1270,18 +1269,18 @@ dependencies = [ { name = "httpcore" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, ] [[package]] name = "huey" -version = "2.5.4" +version = "2.5.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/c6/dfe74b0ee9708216ab798449b694d6ba7c1b701bdc2e5d378ec0505ca9a9/huey-2.5.4.tar.gz", hash = "sha256:4b7fb217b640fbb46efc4f4681b446b40726593522f093e8ef27c4a8fcb6cfbb", size = 848666, upload-time = "2025-10-23T13:04:55.549Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/87/07796060836baf60727df5edae1579adcc8140f42fb7dea3c34ca7ce7fd3/huey-2.5.5.tar.gz", hash = "sha256:a39010628a9a1a9e91462f9bf33dc243b006a9f21193026ea47ae18949a12581", size = 895915 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/86/fb8f2ec721106ee9d47adb3a757f937044a52239adb26bae6d9ad753927b/huey-2.5.4-py3-none-any.whl", hash = "sha256:0eac1fb2711f6366a1db003629354a0cea470a3db720d5bab0d140c28e993f9c", size = 76843, upload-time = "2025-10-23T20:58:10.572Z" }, + { url = "https://files.pythonhosted.org/packages/de/c2/0543039071259cfdab525757022de8dad6d22c15a0e7352f1a50a1444a13/huey-2.5.5-py3-none-any.whl", hash = "sha256:82ac73343248c5d7acec04814f952c61f7793e11fd99d26ed9030137d32f912c", size = 76889 }, ] [[package]] @@ -1298,18 +1297,18 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/63/4910c5fa9128fdadf6a9c5ac138e8b1b6cee4ca44bf7915bbfbce4e355ee/huggingface_hub-0.36.0.tar.gz", hash = "sha256:47b3f0e2539c39bf5cde015d63b72ec49baff67b6931c3d97f3f84532e2b8d25", size = 463358, upload-time = "2025-10-23T12:12:01.413Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/63/4910c5fa9128fdadf6a9c5ac138e8b1b6cee4ca44bf7915bbfbce4e355ee/huggingface_hub-0.36.0.tar.gz", hash = "sha256:47b3f0e2539c39bf5cde015d63b72ec49baff67b6931c3d97f3f84532e2b8d25", size = 463358 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/bd/1a875e0d592d447cbc02805fd3fe0f497714d6a2583f59d14fa9ebad96eb/huggingface_hub-0.36.0-py3-none-any.whl", hash = "sha256:7bcc9ad17d5b3f07b57c78e79d527102d08313caa278a641993acddcb894548d", size = 566094, upload-time = "2025-10-23T12:11:59.557Z" }, + { url = "https://files.pythonhosted.org/packages/cb/bd/1a875e0d592d447cbc02805fd3fe0f497714d6a2583f59d14fa9ebad96eb/huggingface_hub-0.36.0-py3-none-any.whl", hash = "sha256:7bcc9ad17d5b3f07b57c78e79d527102d08313caa278a641993acddcb894548d", size = 566094 }, ] [[package]] name = "humanize" version = "4.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/43/50033d25ad96a7f3845f40999b4778f753c3901a11808a584fed7c00d9f5/humanize-4.14.0.tar.gz", hash = "sha256:2fa092705ea640d605c435b1ca82b2866a1b601cdf96f076d70b79a855eba90d", size = 82939, upload-time = "2025-10-15T13:04:51.214Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/43/50033d25ad96a7f3845f40999b4778f753c3901a11808a584fed7c00d9f5/humanize-4.14.0.tar.gz", hash = "sha256:2fa092705ea640d605c435b1ca82b2866a1b601cdf96f076d70b79a855eba90d", size = 82939 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/5b/9512c5fb6c8218332b530f13500c6ff5f3ce3342f35e0dd7be9ac3856fd3/humanize-4.14.0-py3-none-any.whl", hash = "sha256:d57701248d040ad456092820e6fde56c930f17749956ac47f4f655c0c547bfff", size = 132092, upload-time = "2025-10-15T13:04:49.404Z" }, + { url = "https://files.pythonhosted.org/packages/c3/5b/9512c5fb6c8218332b530f13500c6ff5f3ce3342f35e0dd7be9ac3856fd3/humanize-4.14.0-py3-none-any.whl", hash = "sha256:d57701248d040ad456092820e6fde56c930f17749956ac47f4f655c0c547bfff", size = 132092 }, ] [[package]] @@ -1321,9 +1320,9 @@ dependencies = [ { name = "omegaconf" }, { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/8e/07e42bc434a847154083b315779b0a81d567154504624e181caf2c71cd98/hydra-core-1.3.2.tar.gz", hash = "sha256:8a878ed67216997c3e9d88a8e72e7b4767e81af37afb4ea3334b269a4390a824", size = 3263494, upload-time = "2023-02-23T18:33:43.03Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/8e/07e42bc434a847154083b315779b0a81d567154504624e181caf2c71cd98/hydra-core-1.3.2.tar.gz", hash = "sha256:8a878ed67216997c3e9d88a8e72e7b4767e81af37afb4ea3334b269a4390a824", size = 3263494 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/50/e0edd38dcd63fb26a8547f13d28f7a008bc4a3fd4eb4ff030673f22ad41a/hydra_core-1.3.2-py3-none-any.whl", hash = "sha256:fa0238a9e31df3373b35b0bfb672c34cc92718d21f81311d8996a16de1141d8b", size = 154547, upload-time = "2023-02-23T18:33:40.801Z" }, + { url = "https://files.pythonhosted.org/packages/c6/50/e0edd38dcd63fb26a8547f13d28f7a008bc4a3fd4eb4ff030673f22ad41a/hydra_core-1.3.2-py3-none-any.whl", hash = "sha256:fa0238a9e31df3373b35b0bfb672c34cc92718d21f81311d8996a16de1141d8b", size = 154547 }, ] [[package]] @@ -1334,18 +1333,18 @@ dependencies = [ { name = "hydra-core" }, { name = "optuna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/64/018134a44a124f56e27245dd8154900a6adafb1459a38ddea0f81e22f8d7/hydra-optuna-sweeper-1.2.0.tar.gz", hash = "sha256:f20b56dfdc4db9f84f3b502c8e60a5269967197c4df4c28653cf17f71b2136ce", size = 7914, upload-time = "2022-05-17T22:19:24.388Z" } +sdist = { url = "https://files.pythonhosted.org/packages/38/64/018134a44a124f56e27245dd8154900a6adafb1459a38ddea0f81e22f8d7/hydra-optuna-sweeper-1.2.0.tar.gz", hash = "sha256:f20b56dfdc4db9f84f3b502c8e60a5269967197c4df4c28653cf17f71b2136ce", size = 7914 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/25/1eccad47c39da342988fe501b174652c37676ab539baeae9a73444932d13/hydra_optuna_sweeper-1.2.0-py3-none-any.whl", hash = "sha256:b012d94c1dc86edfc33dcf3c7f10e9d2ca6b27040dccb7fdccbb8888b1cd6eef", size = 8486, upload-time = "2022-05-17T22:19:21.863Z" }, + { url = "https://files.pythonhosted.org/packages/b3/25/1eccad47c39da342988fe501b174652c37676ab539baeae9a73444932d13/hydra_optuna_sweeper-1.2.0-py3-none-any.whl", hash = "sha256:b012d94c1dc86edfc33dcf3c7f10e9d2ca6b27040dccb7fdccbb8888b1cd6eef", size = 8486 }, ] [[package]] name = "idna" version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008 }, ] [[package]] @@ -1355,45 +1354,45 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641 } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656 }, ] [[package]] name = "importlib-resources" version = "6.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/8c/f834fbf984f691b4f7ff60f50b514cc3de5cc08abfc3295564dd89c5e2e7/importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c", size = 44693, upload-time = "2025-01-03T18:51:56.698Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/8c/f834fbf984f691b4f7ff60f50b514cc3de5cc08abfc3295564dd89c5e2e7/importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c", size = 44693 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461, upload-time = "2025-01-03T18:51:54.306Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461 }, ] [[package]] name = "iniconfig" version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484 }, ] [[package]] name = "isort" version = "7.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/63/53/4f3c058e3bace40282876f9b553343376ee687f3c35a525dc79dbd450f88/isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187", size = 805049, upload-time = "2025-10-11T13:30:59.107Z" } +sdist = { url = "https://files.pythonhosted.org/packages/63/53/4f3c058e3bace40282876f9b553343376ee687f3c35a525dc79dbd450f88/isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187", size = 805049 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/ed/e3705d6d02b4f7aea715a353c8ce193efd0b5db13e204df895d38734c244/isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1", size = 94672, upload-time = "2025-10-11T13:30:57.665Z" }, + { url = "https://files.pythonhosted.org/packages/7f/ed/e3705d6d02b4f7aea715a353c8ce193efd0b5db13e204df895d38734c244/isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1", size = 94672 }, ] [[package]] name = "itsdangerous" version = "2.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234 }, ] [[package]] @@ -1407,9 +1406,9 @@ dependencies = [ { name = "opt-einsum" }, { name = "scipy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/32/82/84fd2c662e4d410a34b0402de9b56bb69d7f72d1b875c3ae0edc07df18cc/jax-0.8.1.tar.gz", hash = "sha256:e53f67b15315f5e154851a7fd77a192b59c6c75b3f7ac56e214296765391cca7", size = 2509320, upload-time = "2025-11-18T19:50:02.609Z" } +sdist = { url = "https://files.pythonhosted.org/packages/32/82/84fd2c662e4d410a34b0402de9b56bb69d7f72d1b875c3ae0edc07df18cc/jax-0.8.1.tar.gz", hash = "sha256:e53f67b15315f5e154851a7fd77a192b59c6c75b3f7ac56e214296765391cca7", size = 2509320 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/e7/19b8cfc8963b2e10a01a4db7bb27ec5fa39ecd024bc62f8e2d1de5625a9d/jax-0.8.1-py3-none-any.whl", hash = "sha256:4cbdc5548f3095cdd69d38e4337950b2fc1f250a740a0234d190e4a319077564", size = 2922137, upload-time = "2025-11-18T19:47:43.693Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e7/19b8cfc8963b2e10a01a4db7bb27ec5fa39ecd024bc62f8e2d1de5625a9d/jax-0.8.1-py3-none-any.whl", hash = "sha256:4cbdc5548f3095cdd69d38e4337950b2fc1f250a740a0234d190e4a319077564", size = 2922137 }, ] [[package]] @@ -1417,8 +1416,8 @@ name = "jax-cuda12-pjrt" version = "0.8.1" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/e4/53e6f7bb36bfe0b9223deaffc083c5c3e1ac9110837c1ef1139c9669b3a8/jax_cuda12_pjrt-0.8.1-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:a631d0689903354afd7b3d2ec595b7da06a6230a76da00ff9548f542b21b6250", size = 144096836, upload-time = "2025-11-18T19:47:47.054Z" }, - { url = "https://files.pythonhosted.org/packages/c1/85/c59752caca94e72861f7a6a42f37485df706e60ec4bb27090081899001d4/jax_cuda12_pjrt-0.8.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:452b70ee10cb9ac5d7dfca55ffbcdb89b6c8bc6ba70a45af7c490d1dcea98eb7", size = 150466615, upload-time = "2025-11-18T19:47:52.209Z" }, + { url = "https://files.pythonhosted.org/packages/a3/e4/53e6f7bb36bfe0b9223deaffc083c5c3e1ac9110837c1ef1139c9669b3a8/jax_cuda12_pjrt-0.8.1-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:a631d0689903354afd7b3d2ec595b7da06a6230a76da00ff9548f542b21b6250", size = 144096836 }, + { url = "https://files.pythonhosted.org/packages/c1/85/c59752caca94e72861f7a6a42f37485df706e60ec4bb27090081899001d4/jax_cuda12_pjrt-0.8.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:452b70ee10cb9ac5d7dfca55ffbcdb89b6c8bc6ba70a45af7c490d1dcea98eb7", size = 150466615 }, ] [[package]] @@ -1429,16 +1428,16 @@ dependencies = [ { name = "jax-cuda12-pjrt" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/89/cb/8119088cab8d798ca4a18d1ed143be3d90057c2fa2e8dbaf3bfff779014d/jax_cuda12_plugin-0.8.1-cp312-cp312-manylinux_2_27_aarch64.whl", hash = "sha256:385001f56f852959f061ae15ad157c39cc4471c8d1d2544dfc3f805684ac2213", size = 5595076, upload-time = "2025-11-18T19:47:59.419Z" }, - { url = "https://files.pythonhosted.org/packages/20/60/1dde369dd70b349ff388cd699d69c7d49ff3494af30b5b774037cc4d45e6/jax_cuda12_plugin-0.8.1-cp312-cp312-manylinux_2_27_x86_64.whl", hash = "sha256:b60bf0bbda24cec6fa71170bd69b613359f01a376d8e09fe34bf67ecc9a3164f", size = 5606884, upload-time = "2025-11-18T19:48:01.559Z" }, - { url = "https://files.pythonhosted.org/packages/0c/2a/6ab7562c4307d0ac3fb29d154c40dce18d6b1e6a7135f6601db716e03162/jax_cuda12_plugin-0.8.1-cp313-cp313-manylinux_2_27_aarch64.whl", hash = "sha256:da7c0f2ef1c697f9ade51a71cfad211e2bff25407a6855dddde372c0190fc468", size = 5594640, upload-time = "2025-11-18T19:48:02.736Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ee/cd701beb7639f97bed997cc620518b2133c0c4d4bb11af6dddd454388205/jax_cuda12_plugin-0.8.1-cp313-cp313-manylinux_2_27_x86_64.whl", hash = "sha256:7342c8810cc947de78f28c7287a30b2e201b0f51578543dd2553692b79a49942", size = 5606528, upload-time = "2025-11-18T19:48:04.596Z" }, - { url = "https://files.pythonhosted.org/packages/8f/a8/32ede42427d97f017834560ce2ad24e4c4665db433780dcf41ef9738daa4/jax_cuda12_plugin-0.8.1-cp313-cp313t-manylinux_2_27_aarch64.whl", hash = "sha256:836eb0cd3af612d17bf17efc7eee175c6b9827989d5370df8ba919947fcb67cf", size = 5606739, upload-time = "2025-11-18T19:48:05.905Z" }, - { url = "https://files.pythonhosted.org/packages/b9/0d/73e0ea8ab8202ef3bfe9f1b107eab7b6c57613e39aa64cb3d7cd2036cf32/jax_cuda12_plugin-0.8.1-cp313-cp313t-manylinux_2_27_x86_64.whl", hash = "sha256:13311b72ca703a1bbad1ec516ac9ef750019a2d2c421d4c1daf8acf2720b822e", size = 5614184, upload-time = "2025-11-18T19:48:08.278Z" }, - { url = "https://files.pythonhosted.org/packages/5e/79/03fdb210f873003359dfff25656ded223599c9b3629763c3c3031fa5d649/jax_cuda12_plugin-0.8.1-cp314-cp314-manylinux_2_27_aarch64.whl", hash = "sha256:9968c15b87fd3867b6da0ce30681673a7fc4eedebaadcd24dce892e3f9fe1a52", size = 5595218, upload-time = "2025-11-18T19:48:09.945Z" }, - { url = "https://files.pythonhosted.org/packages/4a/d7/54f2e706624c302b88c27b4762d94faddc6ffb9450526a2eacde50d442d5/jax_cuda12_plugin-0.8.1-cp314-cp314-manylinux_2_27_x86_64.whl", hash = "sha256:6a4b6fda687ca8361322029d58444bc0326798204806a3f90f231dc8ca5541a5", size = 5607208, upload-time = "2025-11-18T19:48:11.25Z" }, - { url = "https://files.pythonhosted.org/packages/9a/f2/fb2e048af7ceb9580e84adc25cfa1a3cc4168221c693b384a976ffe5c669/jax_cuda12_plugin-0.8.1-cp314-cp314t-manylinux_2_27_aarch64.whl", hash = "sha256:1052c29157c99ca01d74d3073bbf4f711eb94465c0b4f5a4322d5e46233b1b2f", size = 5607039, upload-time = "2025-11-18T19:48:12.452Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f2/ac0785d88136598074d56b84549e815a9719064f1bdfa90fa101cf492c6d/jax_cuda12_plugin-0.8.1-cp314-cp314t-manylinux_2_27_x86_64.whl", hash = "sha256:5a154723cb6c4e1e7969581a923dacf378f7515b0d53b5f1920e25e51cf6cecc", size = 5614345, upload-time = "2025-11-18T19:48:14.132Z" }, + { url = "https://files.pythonhosted.org/packages/89/cb/8119088cab8d798ca4a18d1ed143be3d90057c2fa2e8dbaf3bfff779014d/jax_cuda12_plugin-0.8.1-cp312-cp312-manylinux_2_27_aarch64.whl", hash = "sha256:385001f56f852959f061ae15ad157c39cc4471c8d1d2544dfc3f805684ac2213", size = 5595076 }, + { url = "https://files.pythonhosted.org/packages/20/60/1dde369dd70b349ff388cd699d69c7d49ff3494af30b5b774037cc4d45e6/jax_cuda12_plugin-0.8.1-cp312-cp312-manylinux_2_27_x86_64.whl", hash = "sha256:b60bf0bbda24cec6fa71170bd69b613359f01a376d8e09fe34bf67ecc9a3164f", size = 5606884 }, + { url = "https://files.pythonhosted.org/packages/0c/2a/6ab7562c4307d0ac3fb29d154c40dce18d6b1e6a7135f6601db716e03162/jax_cuda12_plugin-0.8.1-cp313-cp313-manylinux_2_27_aarch64.whl", hash = "sha256:da7c0f2ef1c697f9ade51a71cfad211e2bff25407a6855dddde372c0190fc468", size = 5594640 }, + { url = "https://files.pythonhosted.org/packages/ea/ee/cd701beb7639f97bed997cc620518b2133c0c4d4bb11af6dddd454388205/jax_cuda12_plugin-0.8.1-cp313-cp313-manylinux_2_27_x86_64.whl", hash = "sha256:7342c8810cc947de78f28c7287a30b2e201b0f51578543dd2553692b79a49942", size = 5606528 }, + { url = "https://files.pythonhosted.org/packages/8f/a8/32ede42427d97f017834560ce2ad24e4c4665db433780dcf41ef9738daa4/jax_cuda12_plugin-0.8.1-cp313-cp313t-manylinux_2_27_aarch64.whl", hash = "sha256:836eb0cd3af612d17bf17efc7eee175c6b9827989d5370df8ba919947fcb67cf", size = 5606739 }, + { url = "https://files.pythonhosted.org/packages/b9/0d/73e0ea8ab8202ef3bfe9f1b107eab7b6c57613e39aa64cb3d7cd2036cf32/jax_cuda12_plugin-0.8.1-cp313-cp313t-manylinux_2_27_x86_64.whl", hash = "sha256:13311b72ca703a1bbad1ec516ac9ef750019a2d2c421d4c1daf8acf2720b822e", size = 5614184 }, + { url = "https://files.pythonhosted.org/packages/5e/79/03fdb210f873003359dfff25656ded223599c9b3629763c3c3031fa5d649/jax_cuda12_plugin-0.8.1-cp314-cp314-manylinux_2_27_aarch64.whl", hash = "sha256:9968c15b87fd3867b6da0ce30681673a7fc4eedebaadcd24dce892e3f9fe1a52", size = 5595218 }, + { url = "https://files.pythonhosted.org/packages/4a/d7/54f2e706624c302b88c27b4762d94faddc6ffb9450526a2eacde50d442d5/jax_cuda12_plugin-0.8.1-cp314-cp314-manylinux_2_27_x86_64.whl", hash = "sha256:6a4b6fda687ca8361322029d58444bc0326798204806a3f90f231dc8ca5541a5", size = 5607208 }, + { url = "https://files.pythonhosted.org/packages/9a/f2/fb2e048af7ceb9580e84adc25cfa1a3cc4168221c693b384a976ffe5c669/jax_cuda12_plugin-0.8.1-cp314-cp314t-manylinux_2_27_aarch64.whl", hash = "sha256:1052c29157c99ca01d74d3073bbf4f711eb94465c0b4f5a4322d5e46233b1b2f", size = 5607039 }, + { url = "https://files.pythonhosted.org/packages/f4/f2/ac0785d88136598074d56b84549e815a9719064f1bdfa90fa101cf492c6d/jax_cuda12_plugin-0.8.1-cp314-cp314t-manylinux_2_27_x86_64.whl", hash = "sha256:5a154723cb6c4e1e7969581a923dacf378f7515b0d53b5f1920e25e51cf6cecc", size = 5614345 }, ] [[package]] @@ -1451,24 +1450,24 @@ dependencies = [ { name = "scipy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/9d/59b36e2f348e599d5812743f263ca54aa03be1a4c9dfc11504d19864b72d/jaxlib-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88bde0f535eeea6689e0cd57d40b7660d5206ac95c7d42e09562a109b963a49f", size = 55728156, upload-time = "2025-11-18T19:48:56.254Z" }, - { url = "https://files.pythonhosted.org/packages/7e/73/2aa891de9f5f4c60ba3c63bda97ec4ace50ffb900ff3bf750ce42c514a3b/jaxlib-0.8.1-cp312-cp312-manylinux_2_27_aarch64.whl", hash = "sha256:bed1e94ae8c7c16bca4476d8d7f582f0d1a102a4e69c3a9bd2069a0dc42274a9", size = 74209108, upload-time = "2025-11-18T19:48:59.572Z" }, - { url = "https://files.pythonhosted.org/packages/eb/4b/3c7e373d81219ee7493c1581c85a926c413ddeb3794cff87a37023a337e4/jaxlib-0.8.1-cp312-cp312-manylinux_2_27_x86_64.whl", hash = "sha256:af4924189fc53b69237715b56ebcbfc71bb91ca16184143dcef0d430c8173de6", size = 80256943, upload-time = "2025-11-18T19:49:02.92Z" }, - { url = "https://files.pythonhosted.org/packages/07/6c/a6f449a7d1c7f91d73c3b8e00ceba92dff9dfd642508bbe1ddba9cb9ea57/jaxlib-0.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:24ec3f3a9c45d6de060020dc94c444d69e18099fab927ea3979ff8cedf0ed2c9", size = 59787068, upload-time = "2025-11-18T19:49:06.275Z" }, - { url = "https://files.pythonhosted.org/packages/f8/67/97c62849b5d8fc075f902201ff136ad224a2ef113d1fa655ece0ffe8b2a4/jaxlib-0.8.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a0349f6e8179dc897d33aeb90ec66b4a8041330fbbba8d071dc6167cd2271539", size = 55726611, upload-time = "2025-11-18T19:49:09.162Z" }, - { url = "https://files.pythonhosted.org/packages/fd/2a/9fb7599e43d66958b6a9859e045b605afea31f7fd96cfa35a7a8e978b0f8/jaxlib-0.8.1-cp313-cp313-manylinux_2_27_aarch64.whl", hash = "sha256:bd697c171ace1e2e9d6ed910a78f385b3c4095cee290b0255aa58848f2acdeab", size = 74207596, upload-time = "2025-11-18T19:49:12.39Z" }, - { url = "https://files.pythonhosted.org/packages/7d/61/ab5c98641e15f9844dd49efbf6f22c6a9c5d17304319e5be8c51a1dfd088/jaxlib-0.8.1-cp313-cp313-manylinux_2_27_x86_64.whl", hash = "sha256:d245bd6a279c72ca5f796df84cdd64d7c9c8abc4b8d89adf4acf45898dab958b", size = 80254560, upload-time = "2025-11-18T19:49:16.172Z" }, - { url = "https://files.pythonhosted.org/packages/1c/71/82a04ce93baeca5b3d10340f574e0668d327b7d0d18e32d9080917c507f6/jaxlib-0.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:8e118e1fbe714f37a94ba26777c17faab7dca4a33646a3d98cd1d99673bbd6b1", size = 59786828, upload-time = "2025-11-18T19:49:19.563Z" }, - { url = "https://files.pythonhosted.org/packages/97/65/e7c625f1fdb54d45ac248d8398a28d6c02528c31feaa6e1c146a08192d77/jaxlib-0.8.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4933298fcfb07a5aa2d1fed21c111d07cea50e6f180dba2cdb5463c13fb98f2f", size = 55835933, upload-time = "2025-11-18T19:49:27.362Z" }, - { url = "https://files.pythonhosted.org/packages/1f/04/e09ff7b5ba0af93501cb196c65103a30e5050083203c1ff581f18718a356/jaxlib-0.8.1-cp313-cp313t-manylinux_2_27_aarch64.whl", hash = "sha256:f2f11491b077d05249d63813e811401194a41edc8e9cc60af8f4b554057cfad0", size = 74323389, upload-time = "2025-11-18T19:49:30.457Z" }, - { url = "https://files.pythonhosted.org/packages/44/9f/8b7f6ad9eebf8946e73049dae85f86544f5743bc8b2190898415646fa7ec/jaxlib-0.8.1-cp313-cp313t-manylinux_2_27_x86_64.whl", hash = "sha256:7a5d381fad89622750fae29fab83c0847e2931ad8d6a34dc13b28fc4d67f75a3", size = 80358249, upload-time = "2025-11-18T19:49:33.682Z" }, - { url = "https://files.pythonhosted.org/packages/47/6d/75943de28285afcc8d62e89c3e0efc0abdb7e7a72a9e967c3555fc9a35af/jaxlib-0.8.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:90e48973f8dbded7edc8728be84c01ae00412190187fb06622abfa4edd42c0a8", size = 55729587, upload-time = "2025-11-18T19:49:36.952Z" }, - { url = "https://files.pythonhosted.org/packages/2c/ce/9e68ca9f646039d687a94066a5e3e195fc70cebdfbe44945b3c53ceed321/jaxlib-0.8.1-cp314-cp314-manylinux_2_27_aarch64.whl", hash = "sha256:1a4001ed3ba9ed5a812da1b16f52eebb5d473a4480c1523828c7bd3dae8d1375", size = 74222294, upload-time = "2025-11-18T19:49:40.418Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0f/988a413cbf610610cb14783a6e0964a854d0f388ccafe9b4e61c2c188b88/jaxlib-0.8.1-cp314-cp314-manylinux_2_27_x86_64.whl", hash = "sha256:fdbbf2336c08bbf8f30548e204c8c9d77f8b2a3a5b7fc7985749246feb8852b0", size = 80268801, upload-time = "2025-11-18T19:49:44.943Z" }, - { url = "https://files.pythonhosted.org/packages/22/60/044c7591777971345e4778d006cfa1c603ece98fc06ac29ad1bc8f2e18e7/jaxlib-0.8.1-cp314-cp314-win_amd64.whl", hash = "sha256:63fc25c4b5d03256798796a024125e29bcf254acc3eae5dc3239d1c30b86b866", size = 62064205, upload-time = "2025-11-18T19:49:49.435Z" }, - { url = "https://files.pythonhosted.org/packages/07/9b/f6f01d79f519b0cbd09a6c751844b1e0294fc53ea0b09882466b21169ea5/jaxlib-0.8.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:92c41c9b9862c08521eb90515a7c5bcc840c6d30f86230cebf94aea2d6a0af81", size = 55834325, upload-time = "2025-11-18T19:49:52.541Z" }, - { url = "https://files.pythonhosted.org/packages/61/c7/13d13a6f0b0d2e91431d6a031129d51ea4b23af23bb947882234ed003f09/jaxlib-0.8.1-cp314-cp314t-manylinux_2_27_aarch64.whl", hash = "sha256:1bc76edec2bc74a7adb5e29329ece51a67c57cd011a06d55d07da62fbabe3389", size = 74320131, upload-time = "2025-11-18T19:49:56.208Z" }, - { url = "https://files.pythonhosted.org/packages/cd/8a/6cad418c0f11ce0cffa2b74b81fb76e6cf30247288fea75a372b6b163f2e/jaxlib-0.8.1-cp314-cp314t-manylinux_2_27_x86_64.whl", hash = "sha256:117f2fe2c19479e560ad85a3ef2fcc0b1d24816456f0d039f865c2acbab63b5a", size = 80360481, upload-time = "2025-11-18T19:50:00.065Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9d/59b36e2f348e599d5812743f263ca54aa03be1a4c9dfc11504d19864b72d/jaxlib-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88bde0f535eeea6689e0cd57d40b7660d5206ac95c7d42e09562a109b963a49f", size = 55728156 }, + { url = "https://files.pythonhosted.org/packages/7e/73/2aa891de9f5f4c60ba3c63bda97ec4ace50ffb900ff3bf750ce42c514a3b/jaxlib-0.8.1-cp312-cp312-manylinux_2_27_aarch64.whl", hash = "sha256:bed1e94ae8c7c16bca4476d8d7f582f0d1a102a4e69c3a9bd2069a0dc42274a9", size = 74209108 }, + { url = "https://files.pythonhosted.org/packages/eb/4b/3c7e373d81219ee7493c1581c85a926c413ddeb3794cff87a37023a337e4/jaxlib-0.8.1-cp312-cp312-manylinux_2_27_x86_64.whl", hash = "sha256:af4924189fc53b69237715b56ebcbfc71bb91ca16184143dcef0d430c8173de6", size = 80256943 }, + { url = "https://files.pythonhosted.org/packages/07/6c/a6f449a7d1c7f91d73c3b8e00ceba92dff9dfd642508bbe1ddba9cb9ea57/jaxlib-0.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:24ec3f3a9c45d6de060020dc94c444d69e18099fab927ea3979ff8cedf0ed2c9", size = 59787068 }, + { url = "https://files.pythonhosted.org/packages/f8/67/97c62849b5d8fc075f902201ff136ad224a2ef113d1fa655ece0ffe8b2a4/jaxlib-0.8.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a0349f6e8179dc897d33aeb90ec66b4a8041330fbbba8d071dc6167cd2271539", size = 55726611 }, + { url = "https://files.pythonhosted.org/packages/fd/2a/9fb7599e43d66958b6a9859e045b605afea31f7fd96cfa35a7a8e978b0f8/jaxlib-0.8.1-cp313-cp313-manylinux_2_27_aarch64.whl", hash = "sha256:bd697c171ace1e2e9d6ed910a78f385b3c4095cee290b0255aa58848f2acdeab", size = 74207596 }, + { url = "https://files.pythonhosted.org/packages/7d/61/ab5c98641e15f9844dd49efbf6f22c6a9c5d17304319e5be8c51a1dfd088/jaxlib-0.8.1-cp313-cp313-manylinux_2_27_x86_64.whl", hash = "sha256:d245bd6a279c72ca5f796df84cdd64d7c9c8abc4b8d89adf4acf45898dab958b", size = 80254560 }, + { url = "https://files.pythonhosted.org/packages/1c/71/82a04ce93baeca5b3d10340f574e0668d327b7d0d18e32d9080917c507f6/jaxlib-0.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:8e118e1fbe714f37a94ba26777c17faab7dca4a33646a3d98cd1d99673bbd6b1", size = 59786828 }, + { url = "https://files.pythonhosted.org/packages/97/65/e7c625f1fdb54d45ac248d8398a28d6c02528c31feaa6e1c146a08192d77/jaxlib-0.8.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4933298fcfb07a5aa2d1fed21c111d07cea50e6f180dba2cdb5463c13fb98f2f", size = 55835933 }, + { url = "https://files.pythonhosted.org/packages/1f/04/e09ff7b5ba0af93501cb196c65103a30e5050083203c1ff581f18718a356/jaxlib-0.8.1-cp313-cp313t-manylinux_2_27_aarch64.whl", hash = "sha256:f2f11491b077d05249d63813e811401194a41edc8e9cc60af8f4b554057cfad0", size = 74323389 }, + { url = "https://files.pythonhosted.org/packages/44/9f/8b7f6ad9eebf8946e73049dae85f86544f5743bc8b2190898415646fa7ec/jaxlib-0.8.1-cp313-cp313t-manylinux_2_27_x86_64.whl", hash = "sha256:7a5d381fad89622750fae29fab83c0847e2931ad8d6a34dc13b28fc4d67f75a3", size = 80358249 }, + { url = "https://files.pythonhosted.org/packages/47/6d/75943de28285afcc8d62e89c3e0efc0abdb7e7a72a9e967c3555fc9a35af/jaxlib-0.8.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:90e48973f8dbded7edc8728be84c01ae00412190187fb06622abfa4edd42c0a8", size = 55729587 }, + { url = "https://files.pythonhosted.org/packages/2c/ce/9e68ca9f646039d687a94066a5e3e195fc70cebdfbe44945b3c53ceed321/jaxlib-0.8.1-cp314-cp314-manylinux_2_27_aarch64.whl", hash = "sha256:1a4001ed3ba9ed5a812da1b16f52eebb5d473a4480c1523828c7bd3dae8d1375", size = 74222294 }, + { url = "https://files.pythonhosted.org/packages/3c/0f/988a413cbf610610cb14783a6e0964a854d0f388ccafe9b4e61c2c188b88/jaxlib-0.8.1-cp314-cp314-manylinux_2_27_x86_64.whl", hash = "sha256:fdbbf2336c08bbf8f30548e204c8c9d77f8b2a3a5b7fc7985749246feb8852b0", size = 80268801 }, + { url = "https://files.pythonhosted.org/packages/22/60/044c7591777971345e4778d006cfa1c603ece98fc06ac29ad1bc8f2e18e7/jaxlib-0.8.1-cp314-cp314-win_amd64.whl", hash = "sha256:63fc25c4b5d03256798796a024125e29bcf254acc3eae5dc3239d1c30b86b866", size = 62064205 }, + { url = "https://files.pythonhosted.org/packages/07/9b/f6f01d79f519b0cbd09a6c751844b1e0294fc53ea0b09882466b21169ea5/jaxlib-0.8.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:92c41c9b9862c08521eb90515a7c5bcc840c6d30f86230cebf94aea2d6a0af81", size = 55834325 }, + { url = "https://files.pythonhosted.org/packages/61/c7/13d13a6f0b0d2e91431d6a031129d51ea4b23af23bb947882234ed003f09/jaxlib-0.8.1-cp314-cp314t-manylinux_2_27_aarch64.whl", hash = "sha256:1bc76edec2bc74a7adb5e29329ece51a67c57cd011a06d55d07da62fbabe3389", size = 74320131 }, + { url = "https://files.pythonhosted.org/packages/cd/8a/6cad418c0f11ce0cffa2b74b81fb76e6cf30247288fea75a372b6b163f2e/jaxlib-0.8.1-cp314-cp314t-manylinux_2_27_x86_64.whl", hash = "sha256:117f2fe2c19479e560ad85a3ef2fcc0b1d24816456f0d039f865c2acbab63b5a", size = 80360481 }, ] [[package]] @@ -1478,9 +1477,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wadler-lindig" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e4/1e/827f9e17b26e21c7d4d934fd1a214284ad05663afedd37c21ed105db366b/jaxtyping-0.3.3.tar.gz", hash = "sha256:8003cfd16ba2ad9b47fdda1d982a575299a81ddfc7997ad0e917c87a0897ea86", size = 45484, upload-time = "2025-10-01T13:46:51.933Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/1e/827f9e17b26e21c7d4d934fd1a214284ad05663afedd37c21ed105db366b/jaxtyping-0.3.3.tar.gz", hash = "sha256:8003cfd16ba2ad9b47fdda1d982a575299a81ddfc7997ad0e917c87a0897ea86", size = 45484 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/97/88264b1af140f66ba7ca6eb2f3a108be233ee278bb3f1d5c750243e7458a/jaxtyping-0.3.3-py3-none-any.whl", hash = "sha256:a1c2f0f4351a8deda84b0e3b5c5a50894a1cdae2b82d841279fce4393aff4a7c", size = 55926, upload-time = "2025-10-01T13:46:50.621Z" }, + { url = "https://files.pythonhosted.org/packages/b8/97/88264b1af140f66ba7ca6eb2f3a108be233ee278bb3f1d5c750243e7458a/jaxtyping-0.3.3-py3-none-any.whl", hash = "sha256:a1c2f0f4351a8deda84b0e3b5c5a50894a1cdae2b82d841279fce4393aff4a7c", size = 55926 }, ] [[package]] @@ -1490,99 +1489,99 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, ] [[package]] name = "jmespath" version = "1.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843 } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256 }, ] [[package]] name = "joblib" version = "1.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/5d/447af5ea094b9e4c4054f82e223ada074c552335b9b4b2d14bd9b35a67c4/joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55", size = 331077, upload-time = "2025-08-27T12:15:46.575Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/5d/447af5ea094b9e4c4054f82e223ada074c552335b9b4b2d14bd9b35a67c4/joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55", size = 331077 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/e8/685f47e0d754320684db4425a0967f7d3fa70126bffd76110b7009a0090f/joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241", size = 308396, upload-time = "2025-08-27T12:15:45.188Z" }, + { url = "https://files.pythonhosted.org/packages/1e/e8/685f47e0d754320684db4425a0967f7d3fa70126bffd76110b7009a0090f/joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241", size = 308396 }, ] [[package]] name = "kiwisolver" version = "1.4.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" }, - { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" }, - { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" }, - { url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756, upload-time = "2025-08-10T21:26:13.096Z" }, - { url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404, upload-time = "2025-08-10T21:26:14.457Z" }, - { url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410, upload-time = "2025-08-10T21:26:15.73Z" }, - { url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631, upload-time = "2025-08-10T21:26:17.045Z" }, - { url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963, upload-time = "2025-08-10T21:26:18.737Z" }, - { url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295, upload-time = "2025-08-10T21:26:20.11Z" }, - { url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987, upload-time = "2025-08-10T21:26:21.49Z" }, - { url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817, upload-time = "2025-08-10T21:26:22.812Z" }, - { url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895, upload-time = "2025-08-10T21:26:24.37Z" }, - { url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992, upload-time = "2025-08-10T21:26:25.732Z" }, - { url = "https://files.pythonhosted.org/packages/31/c1/c2686cda909742ab66c7388e9a1a8521a59eb89f8bcfbee28fc980d07e24/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8", size = 123681, upload-time = "2025-08-10T21:26:26.725Z" }, - { url = "https://files.pythonhosted.org/packages/ca/f0/f44f50c9f5b1a1860261092e3bc91ecdc9acda848a8b8c6abfda4a24dd5c/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2", size = 66464, upload-time = "2025-08-10T21:26:27.733Z" }, - { url = "https://files.pythonhosted.org/packages/2d/7a/9d90a151f558e29c3936b8a47ac770235f436f2120aca41a6d5f3d62ae8d/kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f", size = 64961, upload-time = "2025-08-10T21:26:28.729Z" }, - { url = "https://files.pythonhosted.org/packages/e9/e9/f218a2cb3a9ffbe324ca29a9e399fa2d2866d7f348ec3a88df87fc248fc5/kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098", size = 1474607, upload-time = "2025-08-10T21:26:29.798Z" }, - { url = "https://files.pythonhosted.org/packages/d9/28/aac26d4c882f14de59041636292bc838db8961373825df23b8eeb807e198/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed", size = 1276546, upload-time = "2025-08-10T21:26:31.401Z" }, - { url = "https://files.pythonhosted.org/packages/8b/ad/8bfc1c93d4cc565e5069162f610ba2f48ff39b7de4b5b8d93f69f30c4bed/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525", size = 1294482, upload-time = "2025-08-10T21:26:32.721Z" }, - { url = "https://files.pythonhosted.org/packages/da/f1/6aca55ff798901d8ce403206d00e033191f63d82dd708a186e0ed2067e9c/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78", size = 1343720, upload-time = "2025-08-10T21:26:34.032Z" }, - { url = "https://files.pythonhosted.org/packages/d1/91/eed031876c595c81d90d0f6fc681ece250e14bf6998c3d7c419466b523b7/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b", size = 2224907, upload-time = "2025-08-10T21:26:35.824Z" }, - { url = "https://files.pythonhosted.org/packages/e9/ec/4d1925f2e49617b9cca9c34bfa11adefad49d00db038e692a559454dfb2e/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799", size = 2321334, upload-time = "2025-08-10T21:26:37.534Z" }, - { url = "https://files.pythonhosted.org/packages/43/cb/450cd4499356f68802750c6ddc18647b8ea01ffa28f50d20598e0befe6e9/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3", size = 2488313, upload-time = "2025-08-10T21:26:39.191Z" }, - { url = "https://files.pythonhosted.org/packages/71/67/fc76242bd99f885651128a5d4fa6083e5524694b7c88b489b1b55fdc491d/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c", size = 2291970, upload-time = "2025-08-10T21:26:40.828Z" }, - { url = "https://files.pythonhosted.org/packages/75/bd/f1a5d894000941739f2ae1b65a32892349423ad49c2e6d0771d0bad3fae4/kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d", size = 73894, upload-time = "2025-08-10T21:26:42.33Z" }, - { url = "https://files.pythonhosted.org/packages/95/38/dce480814d25b99a391abbddadc78f7c117c6da34be68ca8b02d5848b424/kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2", size = 64995, upload-time = "2025-08-10T21:26:43.889Z" }, - { url = "https://files.pythonhosted.org/packages/e2/37/7d218ce5d92dadc5ebdd9070d903e0c7cf7edfe03f179433ac4d13ce659c/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1", size = 126510, upload-time = "2025-08-10T21:26:44.915Z" }, - { url = "https://files.pythonhosted.org/packages/23/b0/e85a2b48233daef4b648fb657ebbb6f8367696a2d9548a00b4ee0eb67803/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1", size = 67903, upload-time = "2025-08-10T21:26:45.934Z" }, - { url = "https://files.pythonhosted.org/packages/44/98/f2425bc0113ad7de24da6bb4dae1343476e95e1d738be7c04d31a5d037fd/kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11", size = 66402, upload-time = "2025-08-10T21:26:47.101Z" }, - { url = "https://files.pythonhosted.org/packages/98/d8/594657886df9f34c4177cc353cc28ca7e6e5eb562d37ccc233bff43bbe2a/kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c", size = 1582135, upload-time = "2025-08-10T21:26:48.665Z" }, - { url = "https://files.pythonhosted.org/packages/5c/c6/38a115b7170f8b306fc929e166340c24958347308ea3012c2b44e7e295db/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197", size = 1389409, upload-time = "2025-08-10T21:26:50.335Z" }, - { url = "https://files.pythonhosted.org/packages/bf/3b/e04883dace81f24a568bcee6eb3001da4ba05114afa622ec9b6fafdc1f5e/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c", size = 1401763, upload-time = "2025-08-10T21:26:51.867Z" }, - { url = "https://files.pythonhosted.org/packages/9f/80/20ace48e33408947af49d7d15c341eaee69e4e0304aab4b7660e234d6288/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185", size = 1453643, upload-time = "2025-08-10T21:26:53.592Z" }, - { url = "https://files.pythonhosted.org/packages/64/31/6ce4380a4cd1f515bdda976a1e90e547ccd47b67a1546d63884463c92ca9/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748", size = 2330818, upload-time = "2025-08-10T21:26:55.051Z" }, - { url = "https://files.pythonhosted.org/packages/fa/e9/3f3fcba3bcc7432c795b82646306e822f3fd74df0ee81f0fa067a1f95668/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64", size = 2419963, upload-time = "2025-08-10T21:26:56.421Z" }, - { url = "https://files.pythonhosted.org/packages/99/43/7320c50e4133575c66e9f7dadead35ab22d7c012a3b09bb35647792b2a6d/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff", size = 2594639, upload-time = "2025-08-10T21:26:57.882Z" }, - { url = "https://files.pythonhosted.org/packages/65/d6/17ae4a270d4a987ef8a385b906d2bdfc9fce502d6dc0d3aea865b47f548c/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07", size = 2391741, upload-time = "2025-08-10T21:26:59.237Z" }, - { url = "https://files.pythonhosted.org/packages/2a/8f/8f6f491d595a9e5912971f3f863d81baddccc8a4d0c3749d6a0dd9ffc9df/kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c", size = 68646, upload-time = "2025-08-10T21:27:00.52Z" }, - { url = "https://files.pythonhosted.org/packages/6b/32/6cc0fbc9c54d06c2969faa9c1d29f5751a2e51809dd55c69055e62d9b426/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9928fe1eb816d11ae170885a74d074f57af3a0d65777ca47e9aeb854a1fba386", size = 123806, upload-time = "2025-08-10T21:27:01.537Z" }, - { url = "https://files.pythonhosted.org/packages/b2/dd/2bfb1d4a4823d92e8cbb420fe024b8d2167f72079b3bb941207c42570bdf/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d0005b053977e7b43388ddec89fa567f43d4f6d5c2c0affe57de5ebf290dc552", size = 66605, upload-time = "2025-08-10T21:27:03.335Z" }, - { url = "https://files.pythonhosted.org/packages/f7/69/00aafdb4e4509c2ca6064646cba9cd4b37933898f426756adb2cb92ebbed/kiwisolver-1.4.9-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2635d352d67458b66fd0667c14cb1d4145e9560d503219034a18a87e971ce4f3", size = 64925, upload-time = "2025-08-10T21:27:04.339Z" }, - { url = "https://files.pythonhosted.org/packages/43/dc/51acc6791aa14e5cb6d8a2e28cefb0dc2886d8862795449d021334c0df20/kiwisolver-1.4.9-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:767c23ad1c58c9e827b649a9ab7809fd5fd9db266a9cf02b0e926ddc2c680d58", size = 1472414, upload-time = "2025-08-10T21:27:05.437Z" }, - { url = "https://files.pythonhosted.org/packages/3d/bb/93fa64a81db304ac8a246f834d5094fae4b13baf53c839d6bb6e81177129/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72d0eb9fba308b8311685c2268cf7d0a0639a6cd027d8128659f72bdd8a024b4", size = 1281272, upload-time = "2025-08-10T21:27:07.063Z" }, - { url = "https://files.pythonhosted.org/packages/70/e6/6df102916960fb8d05069d4bd92d6d9a8202d5a3e2444494e7cd50f65b7a/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f68e4f3eeca8fb22cc3d731f9715a13b652795ef657a13df1ad0c7dc0e9731df", size = 1298578, upload-time = "2025-08-10T21:27:08.452Z" }, - { url = "https://files.pythonhosted.org/packages/7c/47/e142aaa612f5343736b087864dbaebc53ea8831453fb47e7521fa8658f30/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d84cd4061ae292d8ac367b2c3fa3aad11cb8625a95d135fe93f286f914f3f5a6", size = 1345607, upload-time = "2025-08-10T21:27:10.125Z" }, - { url = "https://files.pythonhosted.org/packages/54/89/d641a746194a0f4d1a3670fb900d0dbaa786fb98341056814bc3f058fa52/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a60ea74330b91bd22a29638940d115df9dc00af5035a9a2a6ad9399ffb4ceca5", size = 2230150, upload-time = "2025-08-10T21:27:11.484Z" }, - { url = "https://files.pythonhosted.org/packages/aa/6b/5ee1207198febdf16ac11f78c5ae40861b809cbe0e6d2a8d5b0b3044b199/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ce6a3a4e106cf35c2d9c4fa17c05ce0b180db622736845d4315519397a77beaf", size = 2325979, upload-time = "2025-08-10T21:27:12.917Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ff/b269eefd90f4ae14dcc74973d5a0f6d28d3b9bb1afd8c0340513afe6b39a/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:77937e5e2a38a7b48eef0585114fe7930346993a88060d0bf886086d2aa49ef5", size = 2491456, upload-time = "2025-08-10T21:27:14.353Z" }, - { url = "https://files.pythonhosted.org/packages/fc/d4/10303190bd4d30de547534601e259a4fbf014eed94aae3e5521129215086/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:24c175051354f4a28c5d6a31c93906dc653e2bf234e8a4bbfb964892078898ce", size = 2294621, upload-time = "2025-08-10T21:27:15.808Z" }, - { url = "https://files.pythonhosted.org/packages/28/e0/a9a90416fce5c0be25742729c2ea52105d62eda6c4be4d803c2a7be1fa50/kiwisolver-1.4.9-cp314-cp314-win_amd64.whl", hash = "sha256:0763515d4df10edf6d06a3c19734e2566368980d21ebec439f33f9eb936c07b7", size = 75417, upload-time = "2025-08-10T21:27:17.436Z" }, - { url = "https://files.pythonhosted.org/packages/1f/10/6949958215b7a9a264299a7db195564e87900f709db9245e4ebdd3c70779/kiwisolver-1.4.9-cp314-cp314-win_arm64.whl", hash = "sha256:0e4e2bf29574a6a7b7f6cb5fa69293b9f96c928949ac4a53ba3f525dffb87f9c", size = 66582, upload-time = "2025-08-10T21:27:18.436Z" }, - { url = "https://files.pythonhosted.org/packages/ec/79/60e53067903d3bc5469b369fe0dfc6b3482e2133e85dae9daa9527535991/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d976bbb382b202f71c67f77b0ac11244021cfa3f7dfd9e562eefcea2df711548", size = 126514, upload-time = "2025-08-10T21:27:19.465Z" }, - { url = "https://files.pythonhosted.org/packages/25/d1/4843d3e8d46b072c12a38c97c57fab4608d36e13fe47d47ee96b4d61ba6f/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2489e4e5d7ef9a1c300a5e0196e43d9c739f066ef23270607d45aba368b91f2d", size = 67905, upload-time = "2025-08-10T21:27:20.51Z" }, - { url = "https://files.pythonhosted.org/packages/8c/ae/29ffcbd239aea8b93108de1278271ae764dfc0d803a5693914975f200596/kiwisolver-1.4.9-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e2ea9f7ab7fbf18fffb1b5434ce7c69a07582f7acc7717720f1d69f3e806f90c", size = 66399, upload-time = "2025-08-10T21:27:21.496Z" }, - { url = "https://files.pythonhosted.org/packages/a1/ae/d7ba902aa604152c2ceba5d352d7b62106bedbccc8e95c3934d94472bfa3/kiwisolver-1.4.9-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b34e51affded8faee0dfdb705416153819d8ea9250bbbf7ea1b249bdeb5f1122", size = 1582197, upload-time = "2025-08-10T21:27:22.604Z" }, - { url = "https://files.pythonhosted.org/packages/f2/41/27c70d427eddb8bc7e4f16420a20fefc6f480312122a59a959fdfe0445ad/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8aacd3d4b33b772542b2e01beb50187536967b514b00003bdda7589722d2a64", size = 1390125, upload-time = "2025-08-10T21:27:24.036Z" }, - { url = "https://files.pythonhosted.org/packages/41/42/b3799a12bafc76d962ad69083f8b43b12bf4fe78b097b12e105d75c9b8f1/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7cf974dd4e35fa315563ac99d6287a1024e4dc2077b8a7d7cd3d2fb65d283134", size = 1402612, upload-time = "2025-08-10T21:27:25.773Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b5/a210ea073ea1cfaca1bb5c55a62307d8252f531beb364e18aa1e0888b5a0/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85bd218b5ecfbee8c8a82e121802dcb519a86044c9c3b2e4aef02fa05c6da370", size = 1453990, upload-time = "2025-08-10T21:27:27.089Z" }, - { url = "https://files.pythonhosted.org/packages/5f/ce/a829eb8c033e977d7ea03ed32fb3c1781b4fa0433fbadfff29e39c676f32/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0856e241c2d3df4efef7c04a1e46b1936b6120c9bcf36dd216e3acd84bc4fb21", size = 2331601, upload-time = "2025-08-10T21:27:29.343Z" }, - { url = "https://files.pythonhosted.org/packages/e0/4b/b5e97eb142eb9cd0072dacfcdcd31b1c66dc7352b0f7c7255d339c0edf00/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9af39d6551f97d31a4deebeac6f45b156f9755ddc59c07b402c148f5dbb6482a", size = 2422041, upload-time = "2025-08-10T21:27:30.754Z" }, - { url = "https://files.pythonhosted.org/packages/40/be/8eb4cd53e1b85ba4edc3a9321666f12b83113a178845593307a3e7891f44/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:bb4ae2b57fc1d8cbd1cf7b1d9913803681ffa903e7488012be5b76dedf49297f", size = 2594897, upload-time = "2025-08-10T21:27:32.803Z" }, - { url = "https://files.pythonhosted.org/packages/99/dd/841e9a66c4715477ea0abc78da039832fbb09dac5c35c58dc4c41a407b8a/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369", size = 2391835, upload-time = "2025-08-10T21:27:34.23Z" }, - { url = "https://files.pythonhosted.org/packages/0c/28/4b2e5c47a0da96896fdfdb006340ade064afa1e63675d01ea5ac222b6d52/kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891", size = 79988, upload-time = "2025-08-10T21:27:35.587Z" }, - { url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260, upload-time = "2025-08-10T21:27:36.606Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686 }, + { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460 }, + { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952 }, + { url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756 }, + { url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404 }, + { url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410 }, + { url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631 }, + { url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963 }, + { url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295 }, + { url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987 }, + { url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817 }, + { url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895 }, + { url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992 }, + { url = "https://files.pythonhosted.org/packages/31/c1/c2686cda909742ab66c7388e9a1a8521a59eb89f8bcfbee28fc980d07e24/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8", size = 123681 }, + { url = "https://files.pythonhosted.org/packages/ca/f0/f44f50c9f5b1a1860261092e3bc91ecdc9acda848a8b8c6abfda4a24dd5c/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2", size = 66464 }, + { url = "https://files.pythonhosted.org/packages/2d/7a/9d90a151f558e29c3936b8a47ac770235f436f2120aca41a6d5f3d62ae8d/kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f", size = 64961 }, + { url = "https://files.pythonhosted.org/packages/e9/e9/f218a2cb3a9ffbe324ca29a9e399fa2d2866d7f348ec3a88df87fc248fc5/kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098", size = 1474607 }, + { url = "https://files.pythonhosted.org/packages/d9/28/aac26d4c882f14de59041636292bc838db8961373825df23b8eeb807e198/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed", size = 1276546 }, + { url = "https://files.pythonhosted.org/packages/8b/ad/8bfc1c93d4cc565e5069162f610ba2f48ff39b7de4b5b8d93f69f30c4bed/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525", size = 1294482 }, + { url = "https://files.pythonhosted.org/packages/da/f1/6aca55ff798901d8ce403206d00e033191f63d82dd708a186e0ed2067e9c/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78", size = 1343720 }, + { url = "https://files.pythonhosted.org/packages/d1/91/eed031876c595c81d90d0f6fc681ece250e14bf6998c3d7c419466b523b7/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b", size = 2224907 }, + { url = "https://files.pythonhosted.org/packages/e9/ec/4d1925f2e49617b9cca9c34bfa11adefad49d00db038e692a559454dfb2e/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799", size = 2321334 }, + { url = "https://files.pythonhosted.org/packages/43/cb/450cd4499356f68802750c6ddc18647b8ea01ffa28f50d20598e0befe6e9/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3", size = 2488313 }, + { url = "https://files.pythonhosted.org/packages/71/67/fc76242bd99f885651128a5d4fa6083e5524694b7c88b489b1b55fdc491d/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c", size = 2291970 }, + { url = "https://files.pythonhosted.org/packages/75/bd/f1a5d894000941739f2ae1b65a32892349423ad49c2e6d0771d0bad3fae4/kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d", size = 73894 }, + { url = "https://files.pythonhosted.org/packages/95/38/dce480814d25b99a391abbddadc78f7c117c6da34be68ca8b02d5848b424/kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2", size = 64995 }, + { url = "https://files.pythonhosted.org/packages/e2/37/7d218ce5d92dadc5ebdd9070d903e0c7cf7edfe03f179433ac4d13ce659c/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1", size = 126510 }, + { url = "https://files.pythonhosted.org/packages/23/b0/e85a2b48233daef4b648fb657ebbb6f8367696a2d9548a00b4ee0eb67803/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1", size = 67903 }, + { url = "https://files.pythonhosted.org/packages/44/98/f2425bc0113ad7de24da6bb4dae1343476e95e1d738be7c04d31a5d037fd/kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11", size = 66402 }, + { url = "https://files.pythonhosted.org/packages/98/d8/594657886df9f34c4177cc353cc28ca7e6e5eb562d37ccc233bff43bbe2a/kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c", size = 1582135 }, + { url = "https://files.pythonhosted.org/packages/5c/c6/38a115b7170f8b306fc929e166340c24958347308ea3012c2b44e7e295db/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197", size = 1389409 }, + { url = "https://files.pythonhosted.org/packages/bf/3b/e04883dace81f24a568bcee6eb3001da4ba05114afa622ec9b6fafdc1f5e/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c", size = 1401763 }, + { url = "https://files.pythonhosted.org/packages/9f/80/20ace48e33408947af49d7d15c341eaee69e4e0304aab4b7660e234d6288/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185", size = 1453643 }, + { url = "https://files.pythonhosted.org/packages/64/31/6ce4380a4cd1f515bdda976a1e90e547ccd47b67a1546d63884463c92ca9/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748", size = 2330818 }, + { url = "https://files.pythonhosted.org/packages/fa/e9/3f3fcba3bcc7432c795b82646306e822f3fd74df0ee81f0fa067a1f95668/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64", size = 2419963 }, + { url = "https://files.pythonhosted.org/packages/99/43/7320c50e4133575c66e9f7dadead35ab22d7c012a3b09bb35647792b2a6d/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff", size = 2594639 }, + { url = "https://files.pythonhosted.org/packages/65/d6/17ae4a270d4a987ef8a385b906d2bdfc9fce502d6dc0d3aea865b47f548c/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07", size = 2391741 }, + { url = "https://files.pythonhosted.org/packages/2a/8f/8f6f491d595a9e5912971f3f863d81baddccc8a4d0c3749d6a0dd9ffc9df/kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c", size = 68646 }, + { url = "https://files.pythonhosted.org/packages/6b/32/6cc0fbc9c54d06c2969faa9c1d29f5751a2e51809dd55c69055e62d9b426/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9928fe1eb816d11ae170885a74d074f57af3a0d65777ca47e9aeb854a1fba386", size = 123806 }, + { url = "https://files.pythonhosted.org/packages/b2/dd/2bfb1d4a4823d92e8cbb420fe024b8d2167f72079b3bb941207c42570bdf/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d0005b053977e7b43388ddec89fa567f43d4f6d5c2c0affe57de5ebf290dc552", size = 66605 }, + { url = "https://files.pythonhosted.org/packages/f7/69/00aafdb4e4509c2ca6064646cba9cd4b37933898f426756adb2cb92ebbed/kiwisolver-1.4.9-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2635d352d67458b66fd0667c14cb1d4145e9560d503219034a18a87e971ce4f3", size = 64925 }, + { url = "https://files.pythonhosted.org/packages/43/dc/51acc6791aa14e5cb6d8a2e28cefb0dc2886d8862795449d021334c0df20/kiwisolver-1.4.9-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:767c23ad1c58c9e827b649a9ab7809fd5fd9db266a9cf02b0e926ddc2c680d58", size = 1472414 }, + { url = "https://files.pythonhosted.org/packages/3d/bb/93fa64a81db304ac8a246f834d5094fae4b13baf53c839d6bb6e81177129/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72d0eb9fba308b8311685c2268cf7d0a0639a6cd027d8128659f72bdd8a024b4", size = 1281272 }, + { url = "https://files.pythonhosted.org/packages/70/e6/6df102916960fb8d05069d4bd92d6d9a8202d5a3e2444494e7cd50f65b7a/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f68e4f3eeca8fb22cc3d731f9715a13b652795ef657a13df1ad0c7dc0e9731df", size = 1298578 }, + { url = "https://files.pythonhosted.org/packages/7c/47/e142aaa612f5343736b087864dbaebc53ea8831453fb47e7521fa8658f30/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d84cd4061ae292d8ac367b2c3fa3aad11cb8625a95d135fe93f286f914f3f5a6", size = 1345607 }, + { url = "https://files.pythonhosted.org/packages/54/89/d641a746194a0f4d1a3670fb900d0dbaa786fb98341056814bc3f058fa52/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a60ea74330b91bd22a29638940d115df9dc00af5035a9a2a6ad9399ffb4ceca5", size = 2230150 }, + { url = "https://files.pythonhosted.org/packages/aa/6b/5ee1207198febdf16ac11f78c5ae40861b809cbe0e6d2a8d5b0b3044b199/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ce6a3a4e106cf35c2d9c4fa17c05ce0b180db622736845d4315519397a77beaf", size = 2325979 }, + { url = "https://files.pythonhosted.org/packages/fc/ff/b269eefd90f4ae14dcc74973d5a0f6d28d3b9bb1afd8c0340513afe6b39a/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:77937e5e2a38a7b48eef0585114fe7930346993a88060d0bf886086d2aa49ef5", size = 2491456 }, + { url = "https://files.pythonhosted.org/packages/fc/d4/10303190bd4d30de547534601e259a4fbf014eed94aae3e5521129215086/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:24c175051354f4a28c5d6a31c93906dc653e2bf234e8a4bbfb964892078898ce", size = 2294621 }, + { url = "https://files.pythonhosted.org/packages/28/e0/a9a90416fce5c0be25742729c2ea52105d62eda6c4be4d803c2a7be1fa50/kiwisolver-1.4.9-cp314-cp314-win_amd64.whl", hash = "sha256:0763515d4df10edf6d06a3c19734e2566368980d21ebec439f33f9eb936c07b7", size = 75417 }, + { url = "https://files.pythonhosted.org/packages/1f/10/6949958215b7a9a264299a7db195564e87900f709db9245e4ebdd3c70779/kiwisolver-1.4.9-cp314-cp314-win_arm64.whl", hash = "sha256:0e4e2bf29574a6a7b7f6cb5fa69293b9f96c928949ac4a53ba3f525dffb87f9c", size = 66582 }, + { url = "https://files.pythonhosted.org/packages/ec/79/60e53067903d3bc5469b369fe0dfc6b3482e2133e85dae9daa9527535991/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d976bbb382b202f71c67f77b0ac11244021cfa3f7dfd9e562eefcea2df711548", size = 126514 }, + { url = "https://files.pythonhosted.org/packages/25/d1/4843d3e8d46b072c12a38c97c57fab4608d36e13fe47d47ee96b4d61ba6f/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2489e4e5d7ef9a1c300a5e0196e43d9c739f066ef23270607d45aba368b91f2d", size = 67905 }, + { url = "https://files.pythonhosted.org/packages/8c/ae/29ffcbd239aea8b93108de1278271ae764dfc0d803a5693914975f200596/kiwisolver-1.4.9-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e2ea9f7ab7fbf18fffb1b5434ce7c69a07582f7acc7717720f1d69f3e806f90c", size = 66399 }, + { url = "https://files.pythonhosted.org/packages/a1/ae/d7ba902aa604152c2ceba5d352d7b62106bedbccc8e95c3934d94472bfa3/kiwisolver-1.4.9-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b34e51affded8faee0dfdb705416153819d8ea9250bbbf7ea1b249bdeb5f1122", size = 1582197 }, + { url = "https://files.pythonhosted.org/packages/f2/41/27c70d427eddb8bc7e4f16420a20fefc6f480312122a59a959fdfe0445ad/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8aacd3d4b33b772542b2e01beb50187536967b514b00003bdda7589722d2a64", size = 1390125 }, + { url = "https://files.pythonhosted.org/packages/41/42/b3799a12bafc76d962ad69083f8b43b12bf4fe78b097b12e105d75c9b8f1/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7cf974dd4e35fa315563ac99d6287a1024e4dc2077b8a7d7cd3d2fb65d283134", size = 1402612 }, + { url = "https://files.pythonhosted.org/packages/d2/b5/a210ea073ea1cfaca1bb5c55a62307d8252f531beb364e18aa1e0888b5a0/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85bd218b5ecfbee8c8a82e121802dcb519a86044c9c3b2e4aef02fa05c6da370", size = 1453990 }, + { url = "https://files.pythonhosted.org/packages/5f/ce/a829eb8c033e977d7ea03ed32fb3c1781b4fa0433fbadfff29e39c676f32/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0856e241c2d3df4efef7c04a1e46b1936b6120c9bcf36dd216e3acd84bc4fb21", size = 2331601 }, + { url = "https://files.pythonhosted.org/packages/e0/4b/b5e97eb142eb9cd0072dacfcdcd31b1c66dc7352b0f7c7255d339c0edf00/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9af39d6551f97d31a4deebeac6f45b156f9755ddc59c07b402c148f5dbb6482a", size = 2422041 }, + { url = "https://files.pythonhosted.org/packages/40/be/8eb4cd53e1b85ba4edc3a9321666f12b83113a178845593307a3e7891f44/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:bb4ae2b57fc1d8cbd1cf7b1d9913803681ffa903e7488012be5b76dedf49297f", size = 2594897 }, + { url = "https://files.pythonhosted.org/packages/99/dd/841e9a66c4715477ea0abc78da039832fbb09dac5c35c58dc4c41a407b8a/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369", size = 2391835 }, + { url = "https://files.pythonhosted.org/packages/0c/28/4b2e5c47a0da96896fdfdb006340ade064afa1e63675d01ea5ac222b6d52/kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891", size = 79988 }, + { url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260 }, ] [[package]] @@ -1592,9 +1591,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474 } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509 }, ] [[package]] @@ -1604,72 +1603,72 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070 } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321 }, ] [[package]] name = "markupsafe" version = "3.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, - { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, - { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, - { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, - { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, - { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, - { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, - { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, - { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, - { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, - { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, - { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, - { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, - { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, - { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, - { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, - { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, - { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, - { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, - { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, - { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, - { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, - { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, - { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, - { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, - { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, - { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, - { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, - { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, - { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, - { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, - { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, - { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, - { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, - { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, - { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, - { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, - { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, - { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, - { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, - { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, - { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, - { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, - { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, - { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, - { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, - { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, - { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, - { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, - { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, - { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, - { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, - { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, - { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615 }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020 }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332 }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947 }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962 }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760 }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529 }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015 }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540 }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105 }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906 }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622 }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029 }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374 }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980 }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990 }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784 }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588 }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041 }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543 }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113 }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911 }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658 }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066 }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639 }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569 }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284 }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801 }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769 }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642 }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612 }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200 }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973 }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619 }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029 }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408 }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005 }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048 }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821 }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606 }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043 }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747 }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341 }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073 }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661 }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069 }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670 }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598 }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261 }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835 }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733 }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672 }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819 }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426 }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146 }, ] [[package]] @@ -1687,61 +1686,61 @@ dependencies = [ { name = "pyparsing" }, { name = "python-dateutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/e2/d2d5295be2f44c678ebaf3544ba32d20c1f9ef08c49fe47f496180e1db15/matplotlib-3.10.7.tar.gz", hash = "sha256:a06ba7e2a2ef9131c79c49e63dad355d2d878413a0376c1727c8b9335ff731c7", size = 34804865, upload-time = "2025-10-09T00:28:00.669Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/be/b3/09eb0f7796932826ec20c25b517d568627754f6c6462fca19e12c02f2e12/matplotlib-3.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a0edb7209e21840e8361e91ea84ea676658aa93edd5f8762793dec77a4a6748", size = 8272389, upload-time = "2025-10-09T00:26:42.474Z" }, - { url = "https://files.pythonhosted.org/packages/11/0b/1ae80ddafb8652fd8046cb5c8460ecc8d4afccb89e2c6d6bec61e04e1eaf/matplotlib-3.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c380371d3c23e0eadf8ebff114445b9f970aff2010198d498d4ab4c3b41eea4f", size = 8128247, upload-time = "2025-10-09T00:26:44.77Z" }, - { url = "https://files.pythonhosted.org/packages/7d/18/95ae2e242d4a5c98bd6e90e36e128d71cf1c7e39b0874feaed3ef782e789/matplotlib-3.10.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d5f256d49fea31f40f166a5e3131235a5d2f4b7f44520b1cf0baf1ce568ccff0", size = 8696996, upload-time = "2025-10-09T00:26:46.792Z" }, - { url = "https://files.pythonhosted.org/packages/7e/3d/5b559efc800bd05cb2033aa85f7e13af51958136a48327f7c261801ff90a/matplotlib-3.10.7-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11ae579ac83cdf3fb72573bb89f70e0534de05266728740d478f0f818983c695", size = 9530153, upload-time = "2025-10-09T00:26:49.07Z" }, - { url = "https://files.pythonhosted.org/packages/88/57/eab4a719fd110312d3c220595d63a3c85ec2a39723f0f4e7fa7e6e3f74ba/matplotlib-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4c14b6acd16cddc3569a2d515cfdd81c7a68ac5639b76548cfc1a9e48b20eb65", size = 9593093, upload-time = "2025-10-09T00:26:51.067Z" }, - { url = "https://files.pythonhosted.org/packages/31/3c/80816f027b3a4a28cd2a0a6ef7f89a2db22310e945cd886ec25bfb399221/matplotlib-3.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:0d8c32b7ea6fb80b1aeff5a2ceb3fb9778e2759e899d9beff75584714afcc5ee", size = 8122771, upload-time = "2025-10-09T00:26:53.296Z" }, - { url = "https://files.pythonhosted.org/packages/de/77/ef1fc78bfe99999b2675435cc52120887191c566b25017d78beaabef7f2d/matplotlib-3.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:5f3f6d315dcc176ba7ca6e74c7768fb7e4cf566c49cb143f6bc257b62e634ed8", size = 7992812, upload-time = "2025-10-09T00:26:54.882Z" }, - { url = "https://files.pythonhosted.org/packages/02/9c/207547916a02c78f6bdd83448d9b21afbc42f6379ed887ecf610984f3b4e/matplotlib-3.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1d9d3713a237970569156cfb4de7533b7c4eacdd61789726f444f96a0d28f57f", size = 8273212, upload-time = "2025-10-09T00:26:56.752Z" }, - { url = "https://files.pythonhosted.org/packages/bc/d0/b3d3338d467d3fc937f0bb7f256711395cae6f78e22cef0656159950adf0/matplotlib-3.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37a1fea41153dd6ee061d21ab69c9cf2cf543160b1b85d89cd3d2e2a7902ca4c", size = 8128713, upload-time = "2025-10-09T00:26:59.001Z" }, - { url = "https://files.pythonhosted.org/packages/22/ff/6425bf5c20d79aa5b959d1ce9e65f599632345391381c9a104133fe0b171/matplotlib-3.10.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b3c4ea4948d93c9c29dc01c0c23eef66f2101bf75158c291b88de6525c55c3d1", size = 8698527, upload-time = "2025-10-09T00:27:00.69Z" }, - { url = "https://files.pythonhosted.org/packages/d0/7f/ccdca06f4c2e6c7989270ed7829b8679466682f4cfc0f8c9986241c023b6/matplotlib-3.10.7-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22df30ffaa89f6643206cf13877191c63a50e8f800b038bc39bee9d2d4957632", size = 9529690, upload-time = "2025-10-09T00:27:02.664Z" }, - { url = "https://files.pythonhosted.org/packages/b8/95/b80fc2c1f269f21ff3d193ca697358e24408c33ce2b106a7438a45407b63/matplotlib-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b69676845a0a66f9da30e87f48be36734d6748024b525ec4710be40194282c84", size = 9593732, upload-time = "2025-10-09T00:27:04.653Z" }, - { url = "https://files.pythonhosted.org/packages/e1/b6/23064a96308b9aeceeffa65e96bcde459a2ea4934d311dee20afde7407a0/matplotlib-3.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:744991e0cc863dd669c8dc9136ca4e6e0082be2070b9d793cbd64bec872a6815", size = 8122727, upload-time = "2025-10-09T00:27:06.814Z" }, - { url = "https://files.pythonhosted.org/packages/b3/a6/2faaf48133b82cf3607759027f82b5c702aa99cdfcefb7f93d6ccf26a424/matplotlib-3.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:fba2974df0bf8ce3c995fa84b79cde38326e0f7b5409e7a3a481c1141340bcf7", size = 7992958, upload-time = "2025-10-09T00:27:08.567Z" }, - { url = "https://files.pythonhosted.org/packages/4a/f0/b018fed0b599bd48d84c08794cb242227fe3341952da102ee9d9682db574/matplotlib-3.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:932c55d1fa7af4423422cb6a492a31cbcbdbe68fd1a9a3f545aa5e7a143b5355", size = 8316849, upload-time = "2025-10-09T00:27:10.254Z" }, - { url = "https://files.pythonhosted.org/packages/b0/b7/bb4f23856197659f275e11a2a164e36e65e9b48ea3e93c4ec25b4f163198/matplotlib-3.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e38c2d581d62ee729a6e144c47a71b3f42fb4187508dbbf4fe71d5612c3433b", size = 8178225, upload-time = "2025-10-09T00:27:12.241Z" }, - { url = "https://files.pythonhosted.org/packages/62/56/0600609893ff277e6f3ab3c0cef4eafa6e61006c058e84286c467223d4d5/matplotlib-3.10.7-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:786656bb13c237bbcebcd402f65f44dd61ead60ee3deb045af429d889c8dbc67", size = 8711708, upload-time = "2025-10-09T00:27:13.879Z" }, - { url = "https://files.pythonhosted.org/packages/d8/1a/6bfecb0cafe94d6658f2f1af22c43b76cf7a1c2f0dc34ef84cbb6809617e/matplotlib-3.10.7-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09d7945a70ea43bf9248f4b6582734c2fe726723204a76eca233f24cffc7ef67", size = 9541409, upload-time = "2025-10-09T00:27:15.684Z" }, - { url = "https://files.pythonhosted.org/packages/08/50/95122a407d7f2e446fd865e2388a232a23f2b81934960ea802f3171518e4/matplotlib-3.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d0b181e9fa8daf1d9f2d4c547527b167cb8838fc587deabca7b5c01f97199e84", size = 9594054, upload-time = "2025-10-09T00:27:17.547Z" }, - { url = "https://files.pythonhosted.org/packages/13/76/75b194a43b81583478a81e78a07da8d9ca6ddf50dd0a2ccabf258059481d/matplotlib-3.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:31963603041634ce1a96053047b40961f7a29eb8f9a62e80cc2c0427aa1d22a2", size = 8200100, upload-time = "2025-10-09T00:27:20.039Z" }, - { url = "https://files.pythonhosted.org/packages/f5/9e/6aefebdc9f8235c12bdeeda44cc0383d89c1e41da2c400caf3ee2073a3ce/matplotlib-3.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:aebed7b50aa6ac698c90f60f854b47e48cd2252b30510e7a1feddaf5a3f72cbf", size = 8042131, upload-time = "2025-10-09T00:27:21.608Z" }, - { url = "https://files.pythonhosted.org/packages/0d/4b/e5bc2c321b6a7e3a75638d937d19ea267c34bd5a90e12bee76c4d7c7a0d9/matplotlib-3.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d883460c43e8c6b173fef244a2341f7f7c0e9725c7fe68306e8e44ed9c8fb100", size = 8273787, upload-time = "2025-10-09T00:27:23.27Z" }, - { url = "https://files.pythonhosted.org/packages/86/ad/6efae459c56c2fbc404da154e13e3a6039129f3c942b0152624f1c621f05/matplotlib-3.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:07124afcf7a6504eafcb8ce94091c5898bbdd351519a1beb5c45f7a38c67e77f", size = 8131348, upload-time = "2025-10-09T00:27:24.926Z" }, - { url = "https://files.pythonhosted.org/packages/a6/5a/a4284d2958dee4116359cc05d7e19c057e64ece1b4ac986ab0f2f4d52d5a/matplotlib-3.10.7-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c17398b709a6cce3d9fdb1595c33e356d91c098cd9486cb2cc21ea2ea418e715", size = 9533949, upload-time = "2025-10-09T00:27:26.704Z" }, - { url = "https://files.pythonhosted.org/packages/de/ff/f3781b5057fa3786623ad8976fc9f7b0d02b2f28534751fd5a44240de4cf/matplotlib-3.10.7-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7146d64f561498764561e9cd0ed64fcf582e570fc519e6f521e2d0cfd43365e1", size = 9804247, upload-time = "2025-10-09T00:27:28.514Z" }, - { url = "https://files.pythonhosted.org/packages/47/5a/993a59facb8444efb0e197bf55f545ee449902dcee86a4dfc580c3b61314/matplotlib-3.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:90ad854c0a435da3104c01e2c6f0028d7e719b690998a2333d7218db80950722", size = 9595497, upload-time = "2025-10-09T00:27:30.418Z" }, - { url = "https://files.pythonhosted.org/packages/0d/a5/77c95aaa9bb32c345cbb49626ad8eb15550cba2e6d4c88081a6c2ac7b08d/matplotlib-3.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:4645fc5d9d20ffa3a39361fcdbcec731382763b623b72627806bf251b6388866", size = 8252732, upload-time = "2025-10-09T00:27:32.332Z" }, - { url = "https://files.pythonhosted.org/packages/74/04/45d269b4268d222390d7817dae77b159651909669a34ee9fdee336db5883/matplotlib-3.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:9257be2f2a03415f9105c486d304a321168e61ad450f6153d77c69504ad764bb", size = 8124240, upload-time = "2025-10-09T00:27:33.94Z" }, - { url = "https://files.pythonhosted.org/packages/4b/c7/ca01c607bb827158b439208c153d6f14ddb9fb640768f06f7ca3488ae67b/matplotlib-3.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1e4bbad66c177a8fdfa53972e5ef8be72a5f27e6a607cec0d8579abd0f3102b1", size = 8316938, upload-time = "2025-10-09T00:27:35.534Z" }, - { url = "https://files.pythonhosted.org/packages/84/d2/5539e66e9f56d2fdec94bb8436f5e449683b4e199bcc897c44fbe3c99e28/matplotlib-3.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d8eb7194b084b12feb19142262165832fc6ee879b945491d1c3d4660748020c4", size = 8178245, upload-time = "2025-10-09T00:27:37.334Z" }, - { url = "https://files.pythonhosted.org/packages/77/b5/e6ca22901fd3e4fe433a82e583436dd872f6c966fca7e63cf806b40356f8/matplotlib-3.10.7-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d41379b05528091f00e1728004f9a8d7191260f3862178b88e8fd770206318", size = 9541411, upload-time = "2025-10-09T00:27:39.387Z" }, - { url = "https://files.pythonhosted.org/packages/9e/99/a4524db57cad8fee54b7237239a8f8360bfcfa3170d37c9e71c090c0f409/matplotlib-3.10.7-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4a74f79fafb2e177f240579bc83f0b60f82cc47d2f1d260f422a0627207008ca", size = 9803664, upload-time = "2025-10-09T00:27:41.492Z" }, - { url = "https://files.pythonhosted.org/packages/e6/a5/85e2edf76ea0ad4288d174926d9454ea85f3ce5390cc4e6fab196cbf250b/matplotlib-3.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:702590829c30aada1e8cef0568ddbffa77ca747b4d6e36c6d173f66e301f89cc", size = 9594066, upload-time = "2025-10-09T00:27:43.694Z" }, - { url = "https://files.pythonhosted.org/packages/39/69/9684368a314f6d83fe5c5ad2a4121a3a8e03723d2e5c8ea17b66c1bad0e7/matplotlib-3.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:f79d5de970fc90cd5591f60053aecfce1fcd736e0303d9f0bf86be649fa68fb8", size = 8342832, upload-time = "2025-10-09T00:27:45.543Z" }, - { url = "https://files.pythonhosted.org/packages/04/5f/e22e08da14bc1a0894184640d47819d2338b792732e20d292bf86e5ab785/matplotlib-3.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:cb783436e47fcf82064baca52ce748af71725d0352e1d31564cbe9c95df92b9c", size = 8172585, upload-time = "2025-10-09T00:27:47.185Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/ae/e2/d2d5295be2f44c678ebaf3544ba32d20c1f9ef08c49fe47f496180e1db15/matplotlib-3.10.7.tar.gz", hash = "sha256:a06ba7e2a2ef9131c79c49e63dad355d2d878413a0376c1727c8b9335ff731c7", size = 34804865 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/b3/09eb0f7796932826ec20c25b517d568627754f6c6462fca19e12c02f2e12/matplotlib-3.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a0edb7209e21840e8361e91ea84ea676658aa93edd5f8762793dec77a4a6748", size = 8272389 }, + { url = "https://files.pythonhosted.org/packages/11/0b/1ae80ddafb8652fd8046cb5c8460ecc8d4afccb89e2c6d6bec61e04e1eaf/matplotlib-3.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c380371d3c23e0eadf8ebff114445b9f970aff2010198d498d4ab4c3b41eea4f", size = 8128247 }, + { url = "https://files.pythonhosted.org/packages/7d/18/95ae2e242d4a5c98bd6e90e36e128d71cf1c7e39b0874feaed3ef782e789/matplotlib-3.10.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d5f256d49fea31f40f166a5e3131235a5d2f4b7f44520b1cf0baf1ce568ccff0", size = 8696996 }, + { url = "https://files.pythonhosted.org/packages/7e/3d/5b559efc800bd05cb2033aa85f7e13af51958136a48327f7c261801ff90a/matplotlib-3.10.7-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11ae579ac83cdf3fb72573bb89f70e0534de05266728740d478f0f818983c695", size = 9530153 }, + { url = "https://files.pythonhosted.org/packages/88/57/eab4a719fd110312d3c220595d63a3c85ec2a39723f0f4e7fa7e6e3f74ba/matplotlib-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4c14b6acd16cddc3569a2d515cfdd81c7a68ac5639b76548cfc1a9e48b20eb65", size = 9593093 }, + { url = "https://files.pythonhosted.org/packages/31/3c/80816f027b3a4a28cd2a0a6ef7f89a2db22310e945cd886ec25bfb399221/matplotlib-3.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:0d8c32b7ea6fb80b1aeff5a2ceb3fb9778e2759e899d9beff75584714afcc5ee", size = 8122771 }, + { url = "https://files.pythonhosted.org/packages/de/77/ef1fc78bfe99999b2675435cc52120887191c566b25017d78beaabef7f2d/matplotlib-3.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:5f3f6d315dcc176ba7ca6e74c7768fb7e4cf566c49cb143f6bc257b62e634ed8", size = 7992812 }, + { url = "https://files.pythonhosted.org/packages/02/9c/207547916a02c78f6bdd83448d9b21afbc42f6379ed887ecf610984f3b4e/matplotlib-3.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1d9d3713a237970569156cfb4de7533b7c4eacdd61789726f444f96a0d28f57f", size = 8273212 }, + { url = "https://files.pythonhosted.org/packages/bc/d0/b3d3338d467d3fc937f0bb7f256711395cae6f78e22cef0656159950adf0/matplotlib-3.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37a1fea41153dd6ee061d21ab69c9cf2cf543160b1b85d89cd3d2e2a7902ca4c", size = 8128713 }, + { url = "https://files.pythonhosted.org/packages/22/ff/6425bf5c20d79aa5b959d1ce9e65f599632345391381c9a104133fe0b171/matplotlib-3.10.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b3c4ea4948d93c9c29dc01c0c23eef66f2101bf75158c291b88de6525c55c3d1", size = 8698527 }, + { url = "https://files.pythonhosted.org/packages/d0/7f/ccdca06f4c2e6c7989270ed7829b8679466682f4cfc0f8c9986241c023b6/matplotlib-3.10.7-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22df30ffaa89f6643206cf13877191c63a50e8f800b038bc39bee9d2d4957632", size = 9529690 }, + { url = "https://files.pythonhosted.org/packages/b8/95/b80fc2c1f269f21ff3d193ca697358e24408c33ce2b106a7438a45407b63/matplotlib-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b69676845a0a66f9da30e87f48be36734d6748024b525ec4710be40194282c84", size = 9593732 }, + { url = "https://files.pythonhosted.org/packages/e1/b6/23064a96308b9aeceeffa65e96bcde459a2ea4934d311dee20afde7407a0/matplotlib-3.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:744991e0cc863dd669c8dc9136ca4e6e0082be2070b9d793cbd64bec872a6815", size = 8122727 }, + { url = "https://files.pythonhosted.org/packages/b3/a6/2faaf48133b82cf3607759027f82b5c702aa99cdfcefb7f93d6ccf26a424/matplotlib-3.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:fba2974df0bf8ce3c995fa84b79cde38326e0f7b5409e7a3a481c1141340bcf7", size = 7992958 }, + { url = "https://files.pythonhosted.org/packages/4a/f0/b018fed0b599bd48d84c08794cb242227fe3341952da102ee9d9682db574/matplotlib-3.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:932c55d1fa7af4423422cb6a492a31cbcbdbe68fd1a9a3f545aa5e7a143b5355", size = 8316849 }, + { url = "https://files.pythonhosted.org/packages/b0/b7/bb4f23856197659f275e11a2a164e36e65e9b48ea3e93c4ec25b4f163198/matplotlib-3.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e38c2d581d62ee729a6e144c47a71b3f42fb4187508dbbf4fe71d5612c3433b", size = 8178225 }, + { url = "https://files.pythonhosted.org/packages/62/56/0600609893ff277e6f3ab3c0cef4eafa6e61006c058e84286c467223d4d5/matplotlib-3.10.7-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:786656bb13c237bbcebcd402f65f44dd61ead60ee3deb045af429d889c8dbc67", size = 8711708 }, + { url = "https://files.pythonhosted.org/packages/d8/1a/6bfecb0cafe94d6658f2f1af22c43b76cf7a1c2f0dc34ef84cbb6809617e/matplotlib-3.10.7-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09d7945a70ea43bf9248f4b6582734c2fe726723204a76eca233f24cffc7ef67", size = 9541409 }, + { url = "https://files.pythonhosted.org/packages/08/50/95122a407d7f2e446fd865e2388a232a23f2b81934960ea802f3171518e4/matplotlib-3.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d0b181e9fa8daf1d9f2d4c547527b167cb8838fc587deabca7b5c01f97199e84", size = 9594054 }, + { url = "https://files.pythonhosted.org/packages/13/76/75b194a43b81583478a81e78a07da8d9ca6ddf50dd0a2ccabf258059481d/matplotlib-3.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:31963603041634ce1a96053047b40961f7a29eb8f9a62e80cc2c0427aa1d22a2", size = 8200100 }, + { url = "https://files.pythonhosted.org/packages/f5/9e/6aefebdc9f8235c12bdeeda44cc0383d89c1e41da2c400caf3ee2073a3ce/matplotlib-3.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:aebed7b50aa6ac698c90f60f854b47e48cd2252b30510e7a1feddaf5a3f72cbf", size = 8042131 }, + { url = "https://files.pythonhosted.org/packages/0d/4b/e5bc2c321b6a7e3a75638d937d19ea267c34bd5a90e12bee76c4d7c7a0d9/matplotlib-3.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d883460c43e8c6b173fef244a2341f7f7c0e9725c7fe68306e8e44ed9c8fb100", size = 8273787 }, + { url = "https://files.pythonhosted.org/packages/86/ad/6efae459c56c2fbc404da154e13e3a6039129f3c942b0152624f1c621f05/matplotlib-3.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:07124afcf7a6504eafcb8ce94091c5898bbdd351519a1beb5c45f7a38c67e77f", size = 8131348 }, + { url = "https://files.pythonhosted.org/packages/a6/5a/a4284d2958dee4116359cc05d7e19c057e64ece1b4ac986ab0f2f4d52d5a/matplotlib-3.10.7-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c17398b709a6cce3d9fdb1595c33e356d91c098cd9486cb2cc21ea2ea418e715", size = 9533949 }, + { url = "https://files.pythonhosted.org/packages/de/ff/f3781b5057fa3786623ad8976fc9f7b0d02b2f28534751fd5a44240de4cf/matplotlib-3.10.7-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7146d64f561498764561e9cd0ed64fcf582e570fc519e6f521e2d0cfd43365e1", size = 9804247 }, + { url = "https://files.pythonhosted.org/packages/47/5a/993a59facb8444efb0e197bf55f545ee449902dcee86a4dfc580c3b61314/matplotlib-3.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:90ad854c0a435da3104c01e2c6f0028d7e719b690998a2333d7218db80950722", size = 9595497 }, + { url = "https://files.pythonhosted.org/packages/0d/a5/77c95aaa9bb32c345cbb49626ad8eb15550cba2e6d4c88081a6c2ac7b08d/matplotlib-3.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:4645fc5d9d20ffa3a39361fcdbcec731382763b623b72627806bf251b6388866", size = 8252732 }, + { url = "https://files.pythonhosted.org/packages/74/04/45d269b4268d222390d7817dae77b159651909669a34ee9fdee336db5883/matplotlib-3.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:9257be2f2a03415f9105c486d304a321168e61ad450f6153d77c69504ad764bb", size = 8124240 }, + { url = "https://files.pythonhosted.org/packages/4b/c7/ca01c607bb827158b439208c153d6f14ddb9fb640768f06f7ca3488ae67b/matplotlib-3.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1e4bbad66c177a8fdfa53972e5ef8be72a5f27e6a607cec0d8579abd0f3102b1", size = 8316938 }, + { url = "https://files.pythonhosted.org/packages/84/d2/5539e66e9f56d2fdec94bb8436f5e449683b4e199bcc897c44fbe3c99e28/matplotlib-3.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d8eb7194b084b12feb19142262165832fc6ee879b945491d1c3d4660748020c4", size = 8178245 }, + { url = "https://files.pythonhosted.org/packages/77/b5/e6ca22901fd3e4fe433a82e583436dd872f6c966fca7e63cf806b40356f8/matplotlib-3.10.7-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d41379b05528091f00e1728004f9a8d7191260f3862178b88e8fd770206318", size = 9541411 }, + { url = "https://files.pythonhosted.org/packages/9e/99/a4524db57cad8fee54b7237239a8f8360bfcfa3170d37c9e71c090c0f409/matplotlib-3.10.7-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4a74f79fafb2e177f240579bc83f0b60f82cc47d2f1d260f422a0627207008ca", size = 9803664 }, + { url = "https://files.pythonhosted.org/packages/e6/a5/85e2edf76ea0ad4288d174926d9454ea85f3ce5390cc4e6fab196cbf250b/matplotlib-3.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:702590829c30aada1e8cef0568ddbffa77ca747b4d6e36c6d173f66e301f89cc", size = 9594066 }, + { url = "https://files.pythonhosted.org/packages/39/69/9684368a314f6d83fe5c5ad2a4121a3a8e03723d2e5c8ea17b66c1bad0e7/matplotlib-3.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:f79d5de970fc90cd5591f60053aecfce1fcd736e0303d9f0bf86be649fa68fb8", size = 8342832 }, + { url = "https://files.pythonhosted.org/packages/04/5f/e22e08da14bc1a0894184640d47819d2338b792732e20d292bf86e5ab785/matplotlib-3.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:cb783436e47fcf82064baca52ce748af71725d0352e1d31564cbe9c95df92b9c", size = 8172585 }, ] [[package]] name = "mccabe" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658, upload-time = "2022-01-24T01:14:51.113Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658 } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" }, + { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350 }, ] [[package]] name = "mdurl" version = "0.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, ] [[package]] @@ -1751,38 +1750,38 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0e/4a/c27b42ed9b1c7d13d9ba8b6905dece787d6259152f2309338aed29b2447b/ml_dtypes-0.5.4.tar.gz", hash = "sha256:8ab06a50fb9bf9666dd0fe5dfb4676fa2b0ac0f31ecff72a6c3af8e22c063453", size = 692314, upload-time = "2025-11-17T22:32:31.031Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/b8/3c70881695e056f8a32f8b941126cf78775d9a4d7feba8abcb52cb7b04f2/ml_dtypes-0.5.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a174837a64f5b16cab6f368171a1a03a27936b31699d167684073ff1c4237dac", size = 676927, upload-time = "2025-11-17T22:31:48.182Z" }, - { url = "https://files.pythonhosted.org/packages/54/0f/428ef6881782e5ebb7eca459689448c0394fa0a80bea3aa9262cba5445ea/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7f7c643e8b1320fd958bf098aa7ecf70623a42ec5154e3be3be673f4c34d900", size = 5028464, upload-time = "2025-11-17T22:31:50.135Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cb/28ce52eb94390dda42599c98ea0204d74799e4d8047a0eb559b6fd648056/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ad459e99793fa6e13bd5b7e6792c8f9190b4e5a1b45c63aba14a4d0a7f1d5ff", size = 5009002, upload-time = "2025-11-17T22:31:52.001Z" }, - { url = "https://files.pythonhosted.org/packages/f5/f0/0cfadd537c5470378b1b32bd859cf2824972174b51b873c9d95cfd7475a5/ml_dtypes-0.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:c1a953995cccb9e25a4ae19e34316671e4e2edaebe4cf538229b1fc7109087b7", size = 212222, upload-time = "2025-11-17T22:31:53.742Z" }, - { url = "https://files.pythonhosted.org/packages/16/2e/9acc86985bfad8f2c2d30291b27cd2bb4c74cea08695bd540906ed744249/ml_dtypes-0.5.4-cp312-cp312-win_arm64.whl", hash = "sha256:9bad06436568442575beb2d03389aa7456c690a5b05892c471215bfd8cf39460", size = 160793, upload-time = "2025-11-17T22:31:55.358Z" }, - { url = "https://files.pythonhosted.org/packages/d9/a1/4008f14bbc616cfb1ac5b39ea485f9c63031c4634ab3f4cf72e7541f816a/ml_dtypes-0.5.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c760d85a2f82e2bed75867079188c9d18dae2ee77c25a54d60e9cc79be1bc48", size = 676888, upload-time = "2025-11-17T22:31:56.907Z" }, - { url = "https://files.pythonhosted.org/packages/d3/b7/dff378afc2b0d5a7d6cd9d3209b60474d9819d1189d347521e1688a60a53/ml_dtypes-0.5.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce756d3a10d0c4067172804c9cc276ba9cc0ff47af9078ad439b075d1abdc29b", size = 5036993, upload-time = "2025-11-17T22:31:58.497Z" }, - { url = "https://files.pythonhosted.org/packages/eb/33/40cd74219417e78b97c47802037cf2d87b91973e18bb968a7da48a96ea44/ml_dtypes-0.5.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:533ce891ba774eabf607172254f2e7260ba5f57bdd64030c9a4fcfbd99815d0d", size = 5010956, upload-time = "2025-11-17T22:31:59.931Z" }, - { url = "https://files.pythonhosted.org/packages/e1/8b/200088c6859d8221454825959df35b5244fa9bdf263fd0249ac5fb75e281/ml_dtypes-0.5.4-cp313-cp313-win_amd64.whl", hash = "sha256:f21c9219ef48ca5ee78402d5cc831bd58ea27ce89beda894428bc67a52da5328", size = 212224, upload-time = "2025-11-17T22:32:01.349Z" }, - { url = "https://files.pythonhosted.org/packages/8f/75/dfc3775cb36367816e678f69a7843f6f03bd4e2bcd79941e01ea960a068e/ml_dtypes-0.5.4-cp313-cp313-win_arm64.whl", hash = "sha256:35f29491a3e478407f7047b8a4834e4640a77d2737e0b294d049746507af5175", size = 160798, upload-time = "2025-11-17T22:32:02.864Z" }, - { url = "https://files.pythonhosted.org/packages/4f/74/e9ddb35fd1dd43b1106c20ced3f53c2e8e7fc7598c15638e9f80677f81d4/ml_dtypes-0.5.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:304ad47faa395415b9ccbcc06a0350800bc50eda70f0e45326796e27c62f18b6", size = 702083, upload-time = "2025-11-17T22:32:04.08Z" }, - { url = "https://files.pythonhosted.org/packages/74/f5/667060b0aed1aa63166b22897fdf16dca9eb704e6b4bbf86848d5a181aa7/ml_dtypes-0.5.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6a0df4223b514d799b8a1629c65ddc351b3efa833ccf7f8ea0cf654a61d1e35d", size = 5354111, upload-time = "2025-11-17T22:32:05.546Z" }, - { url = "https://files.pythonhosted.org/packages/40/49/0f8c498a28c0efa5f5c95a9e374c83ec1385ca41d0e85e7cf40e5d519a21/ml_dtypes-0.5.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531eff30e4d368cb6255bc2328d070e35836aa4f282a0fb5f3a0cd7260257298", size = 5366453, upload-time = "2025-11-17T22:32:07.115Z" }, - { url = "https://files.pythonhosted.org/packages/8c/27/12607423d0a9c6bbbcc780ad19f1f6baa2b68b18ce4bddcdc122c4c68dc9/ml_dtypes-0.5.4-cp313-cp313t-win_amd64.whl", hash = "sha256:cb73dccfc991691c444acc8c0012bee8f2470da826a92e3a20bb333b1a7894e6", size = 225612, upload-time = "2025-11-17T22:32:08.615Z" }, - { url = "https://files.pythonhosted.org/packages/e5/80/5a5929e92c72936d5b19872c5fb8fc09327c1da67b3b68c6a13139e77e20/ml_dtypes-0.5.4-cp313-cp313t-win_arm64.whl", hash = "sha256:3bbbe120b915090d9dd1375e4684dd17a20a2491ef25d640a908281da85e73f1", size = 164145, upload-time = "2025-11-17T22:32:09.782Z" }, - { url = "https://files.pythonhosted.org/packages/72/4e/1339dc6e2557a344f5ba5590872e80346f76f6cb2ac3dd16e4666e88818c/ml_dtypes-0.5.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2b857d3af6ac0d39db1de7c706e69c7f9791627209c3d6dedbfca8c7e5faec22", size = 673781, upload-time = "2025-11-17T22:32:11.364Z" }, - { url = "https://files.pythonhosted.org/packages/04/f9/067b84365c7e83bda15bba2b06c6ca250ce27b20630b1128c435fb7a09aa/ml_dtypes-0.5.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:805cef3a38f4eafae3a5bf9ebdcdb741d0bcfd9e1bd90eb54abd24f928cd2465", size = 5036145, upload-time = "2025-11-17T22:32:12.783Z" }, - { url = "https://files.pythonhosted.org/packages/c6/bb/82c7dcf38070b46172a517e2334e665c5bf374a262f99a283ea454bece7c/ml_dtypes-0.5.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14a4fd3228af936461db66faccef6e4f41c1d82fcc30e9f8d58a08916b1d811f", size = 5010230, upload-time = "2025-11-17T22:32:14.38Z" }, - { url = "https://files.pythonhosted.org/packages/e9/93/2bfed22d2498c468f6bcd0d9f56b033eaa19f33320389314c19ef6766413/ml_dtypes-0.5.4-cp314-cp314-win_amd64.whl", hash = "sha256:8c6a2dcebd6f3903e05d51960a8058d6e131fe69f952a5397e5dbabc841b6d56", size = 221032, upload-time = "2025-11-17T22:32:15.763Z" }, - { url = "https://files.pythonhosted.org/packages/76/a3/9c912fe6ea747bb10fe2f8f54d027eb265db05dfb0c6335e3e063e74e6e8/ml_dtypes-0.5.4-cp314-cp314-win_arm64.whl", hash = "sha256:5a0f68ca8fd8d16583dfa7793973feb86f2fbb56ce3966daf9c9f748f52a2049", size = 163353, upload-time = "2025-11-17T22:32:16.932Z" }, - { url = "https://files.pythonhosted.org/packages/cd/02/48aa7d84cc30ab4ee37624a2fd98c56c02326785750cd212bc0826c2f15b/ml_dtypes-0.5.4-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:bfc534409c5d4b0bf945af29e5d0ab075eae9eecbb549ff8a29280db822f34f9", size = 702085, upload-time = "2025-11-17T22:32:18.175Z" }, - { url = "https://files.pythonhosted.org/packages/5a/e7/85cb99fe80a7a5513253ec7faa88a65306be071163485e9a626fce1b6e84/ml_dtypes-0.5.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2314892cdc3fcf05e373d76d72aaa15fda9fb98625effa73c1d646f331fcecb7", size = 5355358, upload-time = "2025-11-17T22:32:19.7Z" }, - { url = "https://files.pythonhosted.org/packages/79/2b/a826ba18d2179a56e144aef69e57fb2ab7c464ef0b2111940ee8a3a223a2/ml_dtypes-0.5.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d2ffd05a2575b1519dc928c0b93c06339eb67173ff53acb00724502cda231cf", size = 5366332, upload-time = "2025-11-17T22:32:21.193Z" }, - { url = "https://files.pythonhosted.org/packages/84/44/f4d18446eacb20ea11e82f133ea8f86e2bf2891785b67d9da8d0ab0ef525/ml_dtypes-0.5.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4381fe2f2452a2d7589689693d3162e876b3ddb0a832cde7a414f8e1adf7eab1", size = 236612, upload-time = "2025-11-17T22:32:22.579Z" }, - { url = "https://files.pythonhosted.org/packages/ad/3f/3d42e9a78fe5edf792a83c074b13b9b770092a4fbf3462872f4303135f09/ml_dtypes-0.5.4-cp314-cp314t-win_arm64.whl", hash = "sha256:11942cbf2cf92157db91e5022633c0d9474d4dfd813a909383bd23ce828a4b7d", size = 168825, upload-time = "2025-11-17T22:32:23.766Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/0e/4a/c27b42ed9b1c7d13d9ba8b6905dece787d6259152f2309338aed29b2447b/ml_dtypes-0.5.4.tar.gz", hash = "sha256:8ab06a50fb9bf9666dd0fe5dfb4676fa2b0ac0f31ecff72a6c3af8e22c063453", size = 692314 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/b8/3c70881695e056f8a32f8b941126cf78775d9a4d7feba8abcb52cb7b04f2/ml_dtypes-0.5.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a174837a64f5b16cab6f368171a1a03a27936b31699d167684073ff1c4237dac", size = 676927 }, + { url = "https://files.pythonhosted.org/packages/54/0f/428ef6881782e5ebb7eca459689448c0394fa0a80bea3aa9262cba5445ea/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7f7c643e8b1320fd958bf098aa7ecf70623a42ec5154e3be3be673f4c34d900", size = 5028464 }, + { url = "https://files.pythonhosted.org/packages/3a/cb/28ce52eb94390dda42599c98ea0204d74799e4d8047a0eb559b6fd648056/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ad459e99793fa6e13bd5b7e6792c8f9190b4e5a1b45c63aba14a4d0a7f1d5ff", size = 5009002 }, + { url = "https://files.pythonhosted.org/packages/f5/f0/0cfadd537c5470378b1b32bd859cf2824972174b51b873c9d95cfd7475a5/ml_dtypes-0.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:c1a953995cccb9e25a4ae19e34316671e4e2edaebe4cf538229b1fc7109087b7", size = 212222 }, + { url = "https://files.pythonhosted.org/packages/16/2e/9acc86985bfad8f2c2d30291b27cd2bb4c74cea08695bd540906ed744249/ml_dtypes-0.5.4-cp312-cp312-win_arm64.whl", hash = "sha256:9bad06436568442575beb2d03389aa7456c690a5b05892c471215bfd8cf39460", size = 160793 }, + { url = "https://files.pythonhosted.org/packages/d9/a1/4008f14bbc616cfb1ac5b39ea485f9c63031c4634ab3f4cf72e7541f816a/ml_dtypes-0.5.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c760d85a2f82e2bed75867079188c9d18dae2ee77c25a54d60e9cc79be1bc48", size = 676888 }, + { url = "https://files.pythonhosted.org/packages/d3/b7/dff378afc2b0d5a7d6cd9d3209b60474d9819d1189d347521e1688a60a53/ml_dtypes-0.5.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce756d3a10d0c4067172804c9cc276ba9cc0ff47af9078ad439b075d1abdc29b", size = 5036993 }, + { url = "https://files.pythonhosted.org/packages/eb/33/40cd74219417e78b97c47802037cf2d87b91973e18bb968a7da48a96ea44/ml_dtypes-0.5.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:533ce891ba774eabf607172254f2e7260ba5f57bdd64030c9a4fcfbd99815d0d", size = 5010956 }, + { url = "https://files.pythonhosted.org/packages/e1/8b/200088c6859d8221454825959df35b5244fa9bdf263fd0249ac5fb75e281/ml_dtypes-0.5.4-cp313-cp313-win_amd64.whl", hash = "sha256:f21c9219ef48ca5ee78402d5cc831bd58ea27ce89beda894428bc67a52da5328", size = 212224 }, + { url = "https://files.pythonhosted.org/packages/8f/75/dfc3775cb36367816e678f69a7843f6f03bd4e2bcd79941e01ea960a068e/ml_dtypes-0.5.4-cp313-cp313-win_arm64.whl", hash = "sha256:35f29491a3e478407f7047b8a4834e4640a77d2737e0b294d049746507af5175", size = 160798 }, + { url = "https://files.pythonhosted.org/packages/4f/74/e9ddb35fd1dd43b1106c20ced3f53c2e8e7fc7598c15638e9f80677f81d4/ml_dtypes-0.5.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:304ad47faa395415b9ccbcc06a0350800bc50eda70f0e45326796e27c62f18b6", size = 702083 }, + { url = "https://files.pythonhosted.org/packages/74/f5/667060b0aed1aa63166b22897fdf16dca9eb704e6b4bbf86848d5a181aa7/ml_dtypes-0.5.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6a0df4223b514d799b8a1629c65ddc351b3efa833ccf7f8ea0cf654a61d1e35d", size = 5354111 }, + { url = "https://files.pythonhosted.org/packages/40/49/0f8c498a28c0efa5f5c95a9e374c83ec1385ca41d0e85e7cf40e5d519a21/ml_dtypes-0.5.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531eff30e4d368cb6255bc2328d070e35836aa4f282a0fb5f3a0cd7260257298", size = 5366453 }, + { url = "https://files.pythonhosted.org/packages/8c/27/12607423d0a9c6bbbcc780ad19f1f6baa2b68b18ce4bddcdc122c4c68dc9/ml_dtypes-0.5.4-cp313-cp313t-win_amd64.whl", hash = "sha256:cb73dccfc991691c444acc8c0012bee8f2470da826a92e3a20bb333b1a7894e6", size = 225612 }, + { url = "https://files.pythonhosted.org/packages/e5/80/5a5929e92c72936d5b19872c5fb8fc09327c1da67b3b68c6a13139e77e20/ml_dtypes-0.5.4-cp313-cp313t-win_arm64.whl", hash = "sha256:3bbbe120b915090d9dd1375e4684dd17a20a2491ef25d640a908281da85e73f1", size = 164145 }, + { url = "https://files.pythonhosted.org/packages/72/4e/1339dc6e2557a344f5ba5590872e80346f76f6cb2ac3dd16e4666e88818c/ml_dtypes-0.5.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2b857d3af6ac0d39db1de7c706e69c7f9791627209c3d6dedbfca8c7e5faec22", size = 673781 }, + { url = "https://files.pythonhosted.org/packages/04/f9/067b84365c7e83bda15bba2b06c6ca250ce27b20630b1128c435fb7a09aa/ml_dtypes-0.5.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:805cef3a38f4eafae3a5bf9ebdcdb741d0bcfd9e1bd90eb54abd24f928cd2465", size = 5036145 }, + { url = "https://files.pythonhosted.org/packages/c6/bb/82c7dcf38070b46172a517e2334e665c5bf374a262f99a283ea454bece7c/ml_dtypes-0.5.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14a4fd3228af936461db66faccef6e4f41c1d82fcc30e9f8d58a08916b1d811f", size = 5010230 }, + { url = "https://files.pythonhosted.org/packages/e9/93/2bfed22d2498c468f6bcd0d9f56b033eaa19f33320389314c19ef6766413/ml_dtypes-0.5.4-cp314-cp314-win_amd64.whl", hash = "sha256:8c6a2dcebd6f3903e05d51960a8058d6e131fe69f952a5397e5dbabc841b6d56", size = 221032 }, + { url = "https://files.pythonhosted.org/packages/76/a3/9c912fe6ea747bb10fe2f8f54d027eb265db05dfb0c6335e3e063e74e6e8/ml_dtypes-0.5.4-cp314-cp314-win_arm64.whl", hash = "sha256:5a0f68ca8fd8d16583dfa7793973feb86f2fbb56ce3966daf9c9f748f52a2049", size = 163353 }, + { url = "https://files.pythonhosted.org/packages/cd/02/48aa7d84cc30ab4ee37624a2fd98c56c02326785750cd212bc0826c2f15b/ml_dtypes-0.5.4-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:bfc534409c5d4b0bf945af29e5d0ab075eae9eecbb549ff8a29280db822f34f9", size = 702085 }, + { url = "https://files.pythonhosted.org/packages/5a/e7/85cb99fe80a7a5513253ec7faa88a65306be071163485e9a626fce1b6e84/ml_dtypes-0.5.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2314892cdc3fcf05e373d76d72aaa15fda9fb98625effa73c1d646f331fcecb7", size = 5355358 }, + { url = "https://files.pythonhosted.org/packages/79/2b/a826ba18d2179a56e144aef69e57fb2ab7c464ef0b2111940ee8a3a223a2/ml_dtypes-0.5.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d2ffd05a2575b1519dc928c0b93c06339eb67173ff53acb00724502cda231cf", size = 5366332 }, + { url = "https://files.pythonhosted.org/packages/84/44/f4d18446eacb20ea11e82f133ea8f86e2bf2891785b67d9da8d0ab0ef525/ml_dtypes-0.5.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4381fe2f2452a2d7589689693d3162e876b3ddb0a832cde7a414f8e1adf7eab1", size = 236612 }, + { url = "https://files.pythonhosted.org/packages/ad/3f/3d42e9a78fe5edf792a83c074b13b9b770092a4fbf3462872f4303135f09/ml_dtypes-0.5.4-cp314-cp314t-win_arm64.whl", hash = "sha256:11942cbf2cf92157db91e5022633c0d9474d4dfd813a909383bd23ce828a4b7d", size = 168825 }, ] [[package]] name = "mlflow" -version = "3.6.0" +version = "3.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alembic" }, @@ -1804,14 +1803,14 @@ dependencies = [ { name = "sqlalchemy" }, { name = "waitress", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9b/25/930b5312952b2645f066ffacca5bee8e36577c35e327545da225440cbb6a/mlflow-3.6.0.tar.gz", hash = "sha256:d945d259b5c6b551a9f26846db8979fd84c78114a027b77ada3298f821a9b0e1", size = 8371484, upload-time = "2025-11-07T19:00:30.312Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/42/b553f3f65b663001ed4b987339b380934c5f414d3f6c004440445d3c9028/mlflow-3.7.0.tar.gz", hash = "sha256:391951abe33596497faaad2c8baf902c745472111b06e72130d5b44756bae74a", size = 8434396 } wheels = [ - { url = "https://files.pythonhosted.org/packages/79/69/5b018518b2fbd02481b58f7ca14f4a489b51e3c2d95cdc1b973135e8d456/mlflow-3.6.0-py3-none-any.whl", hash = "sha256:04d1691facd412be8e61b963fad859286cfeb2dbcafaea294e6aa0b83a15fc04", size = 8860293, upload-time = "2025-11-07T19:00:27.555Z" }, + { url = "https://files.pythonhosted.org/packages/9a/d9/84a3e931bb691109a2193c88842f6e55cbdb219953c25fdc771219e8ae68/mlflow-3.7.0-py3-none-any.whl", hash = "sha256:da7dd2744c4b1ae8d7986ef36edc35d5250d742f47cfb2637070366ed9404092", size = 8924265 }, ] [[package]] name = "mlflow-skinny" -version = "3.6.0" +version = "3.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools" }, @@ -1834,14 +1833,14 @@ dependencies = [ { name = "typing-extensions" }, { name = "uvicorn" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8d/8e/2a2d0cd5b1b985c5278202805f48aae6f2adc3ddc0fce3385ec50e07e258/mlflow_skinny-3.6.0.tar.gz", hash = "sha256:cc04706b5b6faace9faf95302a6e04119485e1bfe98ddc9b85b81984e80944b6", size = 1963286, upload-time = "2025-11-07T18:33:52.596Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/f2/bdf4779885a4705a84a9b2a41d2a334b44e78fbb0af463c16298547dc268/mlflow_skinny-3.7.0.tar.gz", hash = "sha256:5f04343ec2101fa39f798351b4f5c0e6664dffd0cd76ad8a68a087b1a8a5e702", size = 1982275 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/78/e8fdc3e1708bdfd1eba64f41ce96b461cae1b505aa08b69352ac99b4caa4/mlflow_skinny-3.6.0-py3-none-any.whl", hash = "sha256:c83b34fce592acb2cc6bddcb507587a6d9ef3f590d9e7a8658c85e0980596d78", size = 2364629, upload-time = "2025-11-07T18:33:50.744Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/04bcf3eb7d6c01fcf46a4a68875ffef20a7e40976a88e749d3036d3bbc42/mlflow_skinny-3.7.0-py3-none-any.whl", hash = "sha256:0fb37de3c8e1787dfcf1b04919b43328c133d9045ca54dfd3f359860670e5f0e", size = 2385928 }, ] [[package]] name = "mlflow-tracing" -version = "3.6.0" +version = "3.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools" }, @@ -1853,161 +1852,161 @@ dependencies = [ { name = "protobuf" }, { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/11/4e/a1b2f977a50ed3860e2848548a9173b9018806628d46d5bdafa8b45bc0c7/mlflow_tracing-3.6.0.tar.gz", hash = "sha256:ccff80b3aad6caa18233c98ba69922a91a6f914e0a13d12e1977af7523523d4c", size = 1061879, upload-time = "2025-11-07T18:36:24.818Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/75/d3083f5ad3ea8f9c55ba285b082049f65c5da82d1c50de3260c38b313220/mlflow_tracing-3.7.0.tar.gz", hash = "sha256:d5404f737441d86149e27ab9e758db26b141ec4fbb35572e2e27b608df87ab6b", size = 1073245 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/ec/ba3f513152cf5404e36263604d484728d47e61678c39228c36eb769199af/mlflow_tracing-3.6.0-py3-none-any.whl", hash = "sha256:a68ff03ba5129c67dc98e6871e0d5ef512dd3ee66d01e1c1a0c946c08a6d4755", size = 1281617, upload-time = "2025-11-07T18:36:23.299Z" }, + { url = "https://files.pythonhosted.org/packages/09/3f/6783d9d485c3a6656864fe8f7f6fa6f55a311bf88558b3bb7809096a74f0/mlflow_tracing-3.7.0-py3-none-any.whl", hash = "sha256:3bbe534bae95e5162a086df3f4722952ac1b7950f31907fb6ddd84affdac5c9f", size = 1294619 }, ] [[package]] name = "mpmath" version = "1.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106, upload-time = "2023-03-07T16:47:11.061Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 }, ] [[package]] name = "msgpack" version = "1.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" }, - { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" }, - { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, - { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, - { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, - { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" }, - { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" }, - { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" }, - { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" }, - { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" }, - { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" }, - { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" }, - { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" }, - { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" }, - { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" }, - { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" }, - { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" }, - { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" }, - { url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127, upload-time = "2025-10-08T09:15:24.408Z" }, - { url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981, upload-time = "2025-10-08T09:15:25.812Z" }, - { url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885, upload-time = "2025-10-08T09:15:27.22Z" }, - { url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658, upload-time = "2025-10-08T09:15:28.4Z" }, - { url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290, upload-time = "2025-10-08T09:15:29.764Z" }, - { url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234, upload-time = "2025-10-08T09:15:31.022Z" }, - { url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391, upload-time = "2025-10-08T09:15:32.265Z" }, - { url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787, upload-time = "2025-10-08T09:15:33.219Z" }, - { url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453, upload-time = "2025-10-08T09:15:34.225Z" }, - { url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264, upload-time = "2025-10-08T09:15:35.61Z" }, - { url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076, upload-time = "2025-10-08T09:15:36.619Z" }, - { url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242, upload-time = "2025-10-08T09:15:37.647Z" }, - { url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509, upload-time = "2025-10-08T09:15:38.794Z" }, - { url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957, upload-time = "2025-10-08T09:15:40.238Z" }, - { url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910, upload-time = "2025-10-08T09:15:41.505Z" }, - { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" }, - { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" }, - { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939 }, + { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064 }, + { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131 }, + { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556 }, + { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920 }, + { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013 }, + { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096 }, + { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708 }, + { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119 }, + { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212 }, + { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315 }, + { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721 }, + { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657 }, + { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668 }, + { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040 }, + { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037 }, + { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631 }, + { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118 }, + { url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127 }, + { url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981 }, + { url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885 }, + { url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658 }, + { url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290 }, + { url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234 }, + { url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391 }, + { url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787 }, + { url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453 }, + { url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264 }, + { url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076 }, + { url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242 }, + { url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509 }, + { url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957 }, + { url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910 }, + { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197 }, + { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772 }, + { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868 }, ] [[package]] name = "multidict" version = "6.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, - { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, - { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, - { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, - { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, - { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, - { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, - { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, - { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, - { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, - { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, - { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, - { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, - { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, - { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, - { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, - { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, - { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, - { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135, upload-time = "2025-10-06T14:49:54.26Z" }, - { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117, upload-time = "2025-10-06T14:49:55.82Z" }, - { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472, upload-time = "2025-10-06T14:49:57.048Z" }, - { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342, upload-time = "2025-10-06T14:49:58.368Z" }, - { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082, upload-time = "2025-10-06T14:49:59.89Z" }, - { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704, upload-time = "2025-10-06T14:50:01.485Z" }, - { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355, upload-time = "2025-10-06T14:50:02.955Z" }, - { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259, upload-time = "2025-10-06T14:50:04.446Z" }, - { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903, upload-time = "2025-10-06T14:50:05.98Z" }, - { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365, upload-time = "2025-10-06T14:50:07.511Z" }, - { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062, upload-time = "2025-10-06T14:50:09.074Z" }, - { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683, upload-time = "2025-10-06T14:50:10.714Z" }, - { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254, upload-time = "2025-10-06T14:50:12.28Z" }, - { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967, upload-time = "2025-10-06T14:50:14.16Z" }, - { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085, upload-time = "2025-10-06T14:50:15.639Z" }, - { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713, upload-time = "2025-10-06T14:50:17.066Z" }, - { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915, upload-time = "2025-10-06T14:50:18.264Z" }, - { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077, upload-time = "2025-10-06T14:50:19.853Z" }, - { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114, upload-time = "2025-10-06T14:50:21.223Z" }, - { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442, upload-time = "2025-10-06T14:50:22.871Z" }, - { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885, upload-time = "2025-10-06T14:50:24.258Z" }, - { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588, upload-time = "2025-10-06T14:50:25.716Z" }, - { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966, upload-time = "2025-10-06T14:50:28.192Z" }, - { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618, upload-time = "2025-10-06T14:50:29.82Z" }, - { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539, upload-time = "2025-10-06T14:50:31.731Z" }, - { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345, upload-time = "2025-10-06T14:50:33.26Z" }, - { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934, upload-time = "2025-10-06T14:50:34.808Z" }, - { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243, upload-time = "2025-10-06T14:50:36.436Z" }, - { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878, upload-time = "2025-10-06T14:50:37.953Z" }, - { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452, upload-time = "2025-10-06T14:50:39.574Z" }, - { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312, upload-time = "2025-10-06T14:50:41.612Z" }, - { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935, upload-time = "2025-10-06T14:50:43.972Z" }, - { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385, upload-time = "2025-10-06T14:50:45.648Z" }, - { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" }, - { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" }, - { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842", size = 75128, upload-time = "2025-10-06T14:50:51.92Z" }, - { url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b", size = 44410, upload-time = "2025-10-06T14:50:53.275Z" }, - { url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38", size = 43205, upload-time = "2025-10-06T14:50:54.911Z" }, - { url = "https://files.pythonhosted.org/packages/02/68/6b086fef8a3f1a8541b9236c594f0c9245617c29841f2e0395d979485cde/multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128", size = 245084, upload-time = "2025-10-06T14:50:56.369Z" }, - { url = "https://files.pythonhosted.org/packages/15/ee/f524093232007cd7a75c1d132df70f235cfd590a7c9eaccd7ff422ef4ae8/multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34", size = 252667, upload-time = "2025-10-06T14:50:57.991Z" }, - { url = "https://files.pythonhosted.org/packages/02/a5/eeb3f43ab45878f1895118c3ef157a480db58ede3f248e29b5354139c2c9/multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99", size = 233590, upload-time = "2025-10-06T14:50:59.589Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1e/76d02f8270b97269d7e3dbd45644b1785bda457b474315f8cf999525a193/multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202", size = 264112, upload-time = "2025-10-06T14:51:01.183Z" }, - { url = "https://files.pythonhosted.org/packages/76/0b/c28a70ecb58963847c2a8efe334904cd254812b10e535aefb3bcce513918/multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1", size = 261194, upload-time = "2025-10-06T14:51:02.794Z" }, - { url = "https://files.pythonhosted.org/packages/b4/63/2ab26e4209773223159b83aa32721b4021ffb08102f8ac7d689c943fded1/multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3", size = 248510, upload-time = "2025-10-06T14:51:04.724Z" }, - { url = "https://files.pythonhosted.org/packages/93/cd/06c1fa8282af1d1c46fd55c10a7930af652afdce43999501d4d68664170c/multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d", size = 248395, upload-time = "2025-10-06T14:51:06.306Z" }, - { url = "https://files.pythonhosted.org/packages/99/ac/82cb419dd6b04ccf9e7e61befc00c77614fc8134362488b553402ecd55ce/multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6", size = 239520, upload-time = "2025-10-06T14:51:08.091Z" }, - { url = "https://files.pythonhosted.org/packages/fa/f3/a0f9bf09493421bd8716a362e0cd1d244f5a6550f5beffdd6b47e885b331/multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7", size = 245479, upload-time = "2025-10-06T14:51:10.365Z" }, - { url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb", size = 258903, upload-time = "2025-10-06T14:51:12.466Z" }, - { url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f", size = 252333, upload-time = "2025-10-06T14:51:14.48Z" }, - { url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f", size = 243411, upload-time = "2025-10-06T14:51:16.072Z" }, - { url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885", size = 40940, upload-time = "2025-10-06T14:51:17.544Z" }, - { url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c", size = 45087, upload-time = "2025-10-06T14:51:18.875Z" }, - { url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000", size = 42368, upload-time = "2025-10-06T14:51:20.225Z" }, - { url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63", size = 82326, upload-time = "2025-10-06T14:51:21.588Z" }, - { url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718", size = 48065, upload-time = "2025-10-06T14:51:22.93Z" }, - { url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2", size = 46475, upload-time = "2025-10-06T14:51:24.352Z" }, - { url = "https://files.pythonhosted.org/packages/fe/6a/bab00cbab6d9cfb57afe1663318f72ec28289ea03fd4e8236bb78429893a/multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e", size = 239324, upload-time = "2025-10-06T14:51:25.822Z" }, - { url = "https://files.pythonhosted.org/packages/2a/5f/8de95f629fc22a7769ade8b41028e3e5a822c1f8904f618d175945a81ad3/multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064", size = 246877, upload-time = "2025-10-06T14:51:27.604Z" }, - { url = "https://files.pythonhosted.org/packages/23/b4/38881a960458f25b89e9f4a4fdcb02ac101cfa710190db6e5528841e67de/multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e", size = 225824, upload-time = "2025-10-06T14:51:29.664Z" }, - { url = "https://files.pythonhosted.org/packages/1e/39/6566210c83f8a261575f18e7144736059f0c460b362e96e9cf797a24b8e7/multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd", size = 253558, upload-time = "2025-10-06T14:51:31.684Z" }, - { url = "https://files.pythonhosted.org/packages/00/a3/67f18315100f64c269f46e6c0319fa87ba68f0f64f2b8e7fd7c72b913a0b/multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a", size = 252339, upload-time = "2025-10-06T14:51:33.699Z" }, - { url = "https://files.pythonhosted.org/packages/c8/2a/1cb77266afee2458d82f50da41beba02159b1d6b1f7973afc9a1cad1499b/multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96", size = 244895, upload-time = "2025-10-06T14:51:36.189Z" }, - { url = "https://files.pythonhosted.org/packages/dd/72/09fa7dd487f119b2eb9524946ddd36e2067c08510576d43ff68469563b3b/multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e", size = 241862, upload-time = "2025-10-06T14:51:41.291Z" }, - { url = "https://files.pythonhosted.org/packages/65/92/bc1f8bd0853d8669300f732c801974dfc3702c3eeadae2f60cef54dc69d7/multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599", size = 232376, upload-time = "2025-10-06T14:51:43.55Z" }, - { url = "https://files.pythonhosted.org/packages/09/86/ac39399e5cb9d0c2ac8ef6e10a768e4d3bc933ac808d49c41f9dc23337eb/multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394", size = 240272, upload-time = "2025-10-06T14:51:45.265Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38", size = 248774, upload-time = "2025-10-06T14:51:46.836Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9", size = 242731, upload-time = "2025-10-06T14:51:48.541Z" }, - { url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0", size = 240193, upload-time = "2025-10-06T14:51:50.355Z" }, - { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023, upload-time = "2025-10-06T14:51:51.883Z" }, - { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507, upload-time = "2025-10-06T14:51:53.672Z" }, - { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804, upload-time = "2025-10-06T14:51:55.415Z" }, - { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877 }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467 }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834 }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545 }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305 }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363 }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375 }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346 }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107 }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592 }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024 }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484 }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579 }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654 }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511 }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895 }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073 }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226 }, + { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135 }, + { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117 }, + { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472 }, + { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342 }, + { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082 }, + { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704 }, + { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355 }, + { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259 }, + { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903 }, + { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365 }, + { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062 }, + { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683 }, + { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254 }, + { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967 }, + { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085 }, + { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713 }, + { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915 }, + { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077 }, + { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114 }, + { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442 }, + { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885 }, + { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588 }, + { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966 }, + { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618 }, + { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539 }, + { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345 }, + { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934 }, + { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243 }, + { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878 }, + { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452 }, + { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312 }, + { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935 }, + { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385 }, + { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777 }, + { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104 }, + { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503 }, + { url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842", size = 75128 }, + { url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b", size = 44410 }, + { url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38", size = 43205 }, + { url = "https://files.pythonhosted.org/packages/02/68/6b086fef8a3f1a8541b9236c594f0c9245617c29841f2e0395d979485cde/multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128", size = 245084 }, + { url = "https://files.pythonhosted.org/packages/15/ee/f524093232007cd7a75c1d132df70f235cfd590a7c9eaccd7ff422ef4ae8/multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34", size = 252667 }, + { url = "https://files.pythonhosted.org/packages/02/a5/eeb3f43ab45878f1895118c3ef157a480db58ede3f248e29b5354139c2c9/multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99", size = 233590 }, + { url = "https://files.pythonhosted.org/packages/6a/1e/76d02f8270b97269d7e3dbd45644b1785bda457b474315f8cf999525a193/multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202", size = 264112 }, + { url = "https://files.pythonhosted.org/packages/76/0b/c28a70ecb58963847c2a8efe334904cd254812b10e535aefb3bcce513918/multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1", size = 261194 }, + { url = "https://files.pythonhosted.org/packages/b4/63/2ab26e4209773223159b83aa32721b4021ffb08102f8ac7d689c943fded1/multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3", size = 248510 }, + { url = "https://files.pythonhosted.org/packages/93/cd/06c1fa8282af1d1c46fd55c10a7930af652afdce43999501d4d68664170c/multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d", size = 248395 }, + { url = "https://files.pythonhosted.org/packages/99/ac/82cb419dd6b04ccf9e7e61befc00c77614fc8134362488b553402ecd55ce/multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6", size = 239520 }, + { url = "https://files.pythonhosted.org/packages/fa/f3/a0f9bf09493421bd8716a362e0cd1d244f5a6550f5beffdd6b47e885b331/multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7", size = 245479 }, + { url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb", size = 258903 }, + { url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f", size = 252333 }, + { url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f", size = 243411 }, + { url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885", size = 40940 }, + { url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c", size = 45087 }, + { url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000", size = 42368 }, + { url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63", size = 82326 }, + { url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718", size = 48065 }, + { url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2", size = 46475 }, + { url = "https://files.pythonhosted.org/packages/fe/6a/bab00cbab6d9cfb57afe1663318f72ec28289ea03fd4e8236bb78429893a/multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e", size = 239324 }, + { url = "https://files.pythonhosted.org/packages/2a/5f/8de95f629fc22a7769ade8b41028e3e5a822c1f8904f618d175945a81ad3/multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064", size = 246877 }, + { url = "https://files.pythonhosted.org/packages/23/b4/38881a960458f25b89e9f4a4fdcb02ac101cfa710190db6e5528841e67de/multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e", size = 225824 }, + { url = "https://files.pythonhosted.org/packages/1e/39/6566210c83f8a261575f18e7144736059f0c460b362e96e9cf797a24b8e7/multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd", size = 253558 }, + { url = "https://files.pythonhosted.org/packages/00/a3/67f18315100f64c269f46e6c0319fa87ba68f0f64f2b8e7fd7c72b913a0b/multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a", size = 252339 }, + { url = "https://files.pythonhosted.org/packages/c8/2a/1cb77266afee2458d82f50da41beba02159b1d6b1f7973afc9a1cad1499b/multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96", size = 244895 }, + { url = "https://files.pythonhosted.org/packages/dd/72/09fa7dd487f119b2eb9524946ddd36e2067c08510576d43ff68469563b3b/multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e", size = 241862 }, + { url = "https://files.pythonhosted.org/packages/65/92/bc1f8bd0853d8669300f732c801974dfc3702c3eeadae2f60cef54dc69d7/multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599", size = 232376 }, + { url = "https://files.pythonhosted.org/packages/09/86/ac39399e5cb9d0c2ac8ef6e10a768e4d3bc933ac808d49c41f9dc23337eb/multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394", size = 240272 }, + { url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38", size = 248774 }, + { url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9", size = 242731 }, + { url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0", size = 240193 }, + { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023 }, + { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507 }, + { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804 }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317 }, ] [[package]] @@ -2017,113 +2016,113 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dill" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/fd/2ae3826f5be24c6ed87266bc4e59c46ea5b059a103f3d7e7eb76a52aeecb/multiprocess-0.70.18.tar.gz", hash = "sha256:f9597128e6b3e67b23956da07cf3d2e5cba79e2f4e0fba8d7903636663ec6d0d", size = 1798503, upload-time = "2025-04-17T03:11:27.742Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/fd/2ae3826f5be24c6ed87266bc4e59c46ea5b059a103f3d7e7eb76a52aeecb/multiprocess-0.70.18.tar.gz", hash = "sha256:f9597128e6b3e67b23956da07cf3d2e5cba79e2f4e0fba8d7903636663ec6d0d", size = 1798503 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/d8/0cba6cf51a1a31f20471fbc823a716170c73012ddc4fb85d706630ed6e8f/multiprocess-0.70.18-py310-none-any.whl", hash = "sha256:60c194974c31784019c1f459d984e8f33ee48f10fcf42c309ba97b30d9bd53ea", size = 134948, upload-time = "2025-04-17T03:11:20.223Z" }, - { url = "https://files.pythonhosted.org/packages/4b/88/9039f2fed1012ef584751d4ceff9ab4a51e5ae264898f0b7cbf44340a859/multiprocess-0.70.18-py311-none-any.whl", hash = "sha256:5aa6eef98e691281b3ad923be2832bf1c55dd2c859acd73e5ec53a66aae06a1d", size = 144462, upload-time = "2025-04-17T03:11:21.657Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b6/5f922792be93b82ec6b5f270bbb1ef031fd0622847070bbcf9da816502cc/multiprocess-0.70.18-py312-none-any.whl", hash = "sha256:9b78f8e5024b573730bfb654783a13800c2c0f2dfc0c25e70b40d184d64adaa2", size = 150287, upload-time = "2025-04-17T03:11:22.69Z" }, - { url = "https://files.pythonhosted.org/packages/ee/25/7d7e78e750bc1aecfaf0efbf826c69a791d2eeaf29cf20cba93ff4cced78/multiprocess-0.70.18-py313-none-any.whl", hash = "sha256:871743755f43ef57d7910a38433cfe41319e72be1bbd90b79c7a5ac523eb9334", size = 151917, upload-time = "2025-04-17T03:11:24.044Z" }, - { url = "https://files.pythonhosted.org/packages/3b/c3/ca84c19bd14cdfc21c388fdcebf08b86a7a470ebc9f5c3c084fc2dbc50f7/multiprocess-0.70.18-py38-none-any.whl", hash = "sha256:dbf705e52a154fe5e90fb17b38f02556169557c2dd8bb084f2e06c2784d8279b", size = 132636, upload-time = "2025-04-17T03:11:24.936Z" }, - { url = "https://files.pythonhosted.org/packages/6c/28/dd72947e59a6a8c856448a5e74da6201cb5502ddff644fbc790e4bd40b9a/multiprocess-0.70.18-py39-none-any.whl", hash = "sha256:e78ca805a72b1b810c690b6b4cc32579eba34f403094bbbae962b7b5bf9dfcb8", size = 133478, upload-time = "2025-04-17T03:11:26.253Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d8/0cba6cf51a1a31f20471fbc823a716170c73012ddc4fb85d706630ed6e8f/multiprocess-0.70.18-py310-none-any.whl", hash = "sha256:60c194974c31784019c1f459d984e8f33ee48f10fcf42c309ba97b30d9bd53ea", size = 134948 }, + { url = "https://files.pythonhosted.org/packages/4b/88/9039f2fed1012ef584751d4ceff9ab4a51e5ae264898f0b7cbf44340a859/multiprocess-0.70.18-py311-none-any.whl", hash = "sha256:5aa6eef98e691281b3ad923be2832bf1c55dd2c859acd73e5ec53a66aae06a1d", size = 144462 }, + { url = "https://files.pythonhosted.org/packages/bf/b6/5f922792be93b82ec6b5f270bbb1ef031fd0622847070bbcf9da816502cc/multiprocess-0.70.18-py312-none-any.whl", hash = "sha256:9b78f8e5024b573730bfb654783a13800c2c0f2dfc0c25e70b40d184d64adaa2", size = 150287 }, + { url = "https://files.pythonhosted.org/packages/ee/25/7d7e78e750bc1aecfaf0efbf826c69a791d2eeaf29cf20cba93ff4cced78/multiprocess-0.70.18-py313-none-any.whl", hash = "sha256:871743755f43ef57d7910a38433cfe41319e72be1bbd90b79c7a5ac523eb9334", size = 151917 }, + { url = "https://files.pythonhosted.org/packages/3b/c3/ca84c19bd14cdfc21c388fdcebf08b86a7a470ebc9f5c3c084fc2dbc50f7/multiprocess-0.70.18-py38-none-any.whl", hash = "sha256:dbf705e52a154fe5e90fb17b38f02556169557c2dd8bb084f2e06c2784d8279b", size = 132636 }, + { url = "https://files.pythonhosted.org/packages/6c/28/dd72947e59a6a8c856448a5e74da6201cb5502ddff644fbc790e4bd40b9a/multiprocess-0.70.18-py39-none-any.whl", hash = "sha256:e78ca805a72b1b810c690b6b4cc32579eba34f403094bbbae962b7b5bf9dfcb8", size = 133478 }, ] [[package]] name = "narwhals" version = "2.13.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/ea/f82ef99ced4d03c33bb314c9b84a08a0a86c448aaa11ffd6256b99538aa5/narwhals-2.13.0.tar.gz", hash = "sha256:ee94c97f4cf7cfeebbeca8d274784df8b3d7fd3f955ce418af998d405576fdd9", size = 594555, upload-time = "2025-12-01T13:54:05.329Z" } +sdist = { url = "https://files.pythonhosted.org/packages/89/ea/f82ef99ced4d03c33bb314c9b84a08a0a86c448aaa11ffd6256b99538aa5/narwhals-2.13.0.tar.gz", hash = "sha256:ee94c97f4cf7cfeebbeca8d274784df8b3d7fd3f955ce418af998d405576fdd9", size = 594555 } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/0d/1861d1599571974b15b025e12b142d8e6b42ad66c8a07a89cb0fc21f1e03/narwhals-2.13.0-py3-none-any.whl", hash = "sha256:9b795523c179ca78204e3be53726da374168f906e38de2ff174c2363baaaf481", size = 426407, upload-time = "2025-12-01T13:54:03.861Z" }, + { url = "https://files.pythonhosted.org/packages/87/0d/1861d1599571974b15b025e12b142d8e6b42ad66c8a07a89cb0fc21f1e03/narwhals-2.13.0-py3-none-any.whl", hash = "sha256:9b795523c179ca78204e3be53726da374168f906e38de2ff174c2363baaaf481", size = 426407 }, ] [[package]] name = "nest-asyncio" version = "1.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" } +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195 }, ] [[package]] name = "networkx" -version = "3.6" +version = "3.6.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/fc/7b6fd4d22c8c4dc5704430140d8b3f520531d4fe7328b8f8d03f5a7950e8/networkx-3.6.tar.gz", hash = "sha256:285276002ad1f7f7da0f7b42f004bcba70d381e936559166363707fdad3d72ad", size = 2511464, upload-time = "2025-11-24T03:03:47.158Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/51/63fe664f3908c97be9d2e4f1158eb633317598cfa6e1fc14af5383f17512/networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509", size = 2517025 } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/c7/d64168da60332c17d24c0d2f08bdf3987e8d1ae9d84b5bbd0eec2eb26a55/networkx-3.6-py3-none-any.whl", hash = "sha256:cdb395b105806062473d3be36458d8f1459a4e4b98e236a66c3a48996e07684f", size = 2063713, upload-time = "2025-11-24T03:03:45.21Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762", size = 2068504 }, ] [[package]] name = "nodeenv" version = "1.9.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, ] [[package]] name = "numpy" version = "2.3.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950, upload-time = "2025-11-16T22:52:42.067Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/44/37/e669fe6cbb2b96c62f6bbedc6a81c0f3b7362f6a59230b23caa673a85721/numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e", size = 16733873, upload-time = "2025-11-16T22:49:49.84Z" }, - { url = "https://files.pythonhosted.org/packages/c5/65/df0db6c097892c9380851ab9e44b52d4f7ba576b833996e0080181c0c439/numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769", size = 12259838, upload-time = "2025-11-16T22:49:52.863Z" }, - { url = "https://files.pythonhosted.org/packages/5b/e1/1ee06e70eb2136797abe847d386e7c0e830b67ad1d43f364dd04fa50d338/numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5", size = 5088378, upload-time = "2025-11-16T22:49:55.055Z" }, - { url = "https://files.pythonhosted.org/packages/6d/9c/1ca85fb86708724275103b81ec4cf1ac1d08f465368acfc8da7ab545bdae/numpy-2.3.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3101e5177d114a593d79dd79658650fe28b5a0d8abeb8ce6f437c0e6df5be1a4", size = 6628559, upload-time = "2025-11-16T22:49:57.371Z" }, - { url = "https://files.pythonhosted.org/packages/74/78/fcd41e5a0ce4f3f7b003da85825acddae6d7ecb60cf25194741b036ca7d6/numpy-2.3.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b973c57ff8e184109db042c842423ff4f60446239bd585a5131cc47f06f789d", size = 14250702, upload-time = "2025-11-16T22:49:59.632Z" }, - { url = "https://files.pythonhosted.org/packages/b6/23/2a1b231b8ff672b4c450dac27164a8b2ca7d9b7144f9c02d2396518352eb/numpy-2.3.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d8163f43acde9a73c2a33605353a4f1bc4798745a8b1d73183b28e5b435ae28", size = 16606086, upload-time = "2025-11-16T22:50:02.127Z" }, - { url = "https://files.pythonhosted.org/packages/a0/c5/5ad26fbfbe2012e190cc7d5003e4d874b88bb18861d0829edc140a713021/numpy-2.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51c1e14eb1e154ebd80e860722f9e6ed6ec89714ad2db2d3aa33c31d7c12179b", size = 16025985, upload-time = "2025-11-16T22:50:04.536Z" }, - { url = "https://files.pythonhosted.org/packages/d2/fa/dd48e225c46c819288148d9d060b047fd2a6fb1eb37eae25112ee4cb4453/numpy-2.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b46b4ec24f7293f23adcd2d146960559aaf8020213de8ad1909dba6c013bf89c", size = 18542976, upload-time = "2025-11-16T22:50:07.557Z" }, - { url = "https://files.pythonhosted.org/packages/05/79/ccbd23a75862d95af03d28b5c6901a1b7da4803181513d52f3b86ed9446e/numpy-2.3.5-cp312-cp312-win32.whl", hash = "sha256:3997b5b3c9a771e157f9aae01dd579ee35ad7109be18db0e85dbdbe1de06e952", size = 6285274, upload-time = "2025-11-16T22:50:10.746Z" }, - { url = "https://files.pythonhosted.org/packages/2d/57/8aeaf160312f7f489dea47ab61e430b5cb051f59a98ae68b7133ce8fa06a/numpy-2.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:86945f2ee6d10cdfd67bcb4069c1662dd711f7e2a4343db5cecec06b87cf31aa", size = 12782922, upload-time = "2025-11-16T22:50:12.811Z" }, - { url = "https://files.pythonhosted.org/packages/78/a6/aae5cc2ca78c45e64b9ef22f089141d661516856cf7c8a54ba434576900d/numpy-2.3.5-cp312-cp312-win_arm64.whl", hash = "sha256:f28620fe26bee16243be2b7b874da327312240a7cdc38b769a697578d2100013", size = 10194667, upload-time = "2025-11-16T22:50:16.16Z" }, - { url = "https://files.pythonhosted.org/packages/db/69/9cde09f36da4b5a505341180a3f2e6fadc352fd4d2b7096ce9778db83f1a/numpy-2.3.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0f23b44f57077c1ede8c5f26b30f706498b4862d3ff0a7298b8411dd2f043ff", size = 16728251, upload-time = "2025-11-16T22:50:19.013Z" }, - { url = "https://files.pythonhosted.org/packages/79/fb/f505c95ceddd7027347b067689db71ca80bd5ecc926f913f1a23e65cf09b/numpy-2.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa5bc7c5d59d831d9773d1170acac7893ce3a5e130540605770ade83280e7188", size = 12254652, upload-time = "2025-11-16T22:50:21.487Z" }, - { url = "https://files.pythonhosted.org/packages/78/da/8c7738060ca9c31b30e9301ee0cf6c5ffdbf889d9593285a1cead337f9a5/numpy-2.3.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ccc933afd4d20aad3c00bcef049cb40049f7f196e0397f1109dba6fed63267b0", size = 5083172, upload-time = "2025-11-16T22:50:24.562Z" }, - { url = "https://files.pythonhosted.org/packages/a4/b4/ee5bb2537fb9430fd2ef30a616c3672b991a4129bb1c7dcc42aa0abbe5d7/numpy-2.3.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:afaffc4393205524af9dfa400fa250143a6c3bc646c08c9f5e25a9f4b4d6a903", size = 6622990, upload-time = "2025-11-16T22:50:26.47Z" }, - { url = "https://files.pythonhosted.org/packages/95/03/dc0723a013c7d7c19de5ef29e932c3081df1c14ba582b8b86b5de9db7f0f/numpy-2.3.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c75442b2209b8470d6d5d8b1c25714270686f14c749028d2199c54e29f20b4d", size = 14248902, upload-time = "2025-11-16T22:50:28.861Z" }, - { url = "https://files.pythonhosted.org/packages/f5/10/ca162f45a102738958dcec8023062dad0cbc17d1ab99d68c4e4a6c45fb2b/numpy-2.3.5-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e06aa0af8c0f05104d56450d6093ee639e15f24ecf62d417329d06e522e017", size = 16597430, upload-time = "2025-11-16T22:50:31.56Z" }, - { url = "https://files.pythonhosted.org/packages/2a/51/c1e29be863588db58175175f057286900b4b3327a1351e706d5e0f8dd679/numpy-2.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed89927b86296067b4f81f108a2271d8926467a8868e554eaf370fc27fa3ccaf", size = 16024551, upload-time = "2025-11-16T22:50:34.242Z" }, - { url = "https://files.pythonhosted.org/packages/83/68/8236589d4dbb87253d28259d04d9b814ec0ecce7cb1c7fed29729f4c3a78/numpy-2.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51c55fe3451421f3a6ef9a9c1439e82101c57a2c9eab9feb196a62b1a10b58ce", size = 18533275, upload-time = "2025-11-16T22:50:37.651Z" }, - { url = "https://files.pythonhosted.org/packages/40/56/2932d75b6f13465239e3b7b7e511be27f1b8161ca2510854f0b6e521c395/numpy-2.3.5-cp313-cp313-win32.whl", hash = "sha256:1978155dd49972084bd6ef388d66ab70f0c323ddee6f693d539376498720fb7e", size = 6277637, upload-time = "2025-11-16T22:50:40.11Z" }, - { url = "https://files.pythonhosted.org/packages/0c/88/e2eaa6cffb115b85ed7c7c87775cb8bcf0816816bc98ca8dbfa2ee33fe6e/numpy-2.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:00dc4e846108a382c5869e77c6ed514394bdeb3403461d25a829711041217d5b", size = 12779090, upload-time = "2025-11-16T22:50:42.503Z" }, - { url = "https://files.pythonhosted.org/packages/8f/88/3f41e13a44ebd4034ee17baa384acac29ba6a4fcc2aca95f6f08ca0447d1/numpy-2.3.5-cp313-cp313-win_arm64.whl", hash = "sha256:0472f11f6ec23a74a906a00b48a4dcf3849209696dff7c189714511268d103ae", size = 10194710, upload-time = "2025-11-16T22:50:44.971Z" }, - { url = "https://files.pythonhosted.org/packages/13/cb/71744144e13389d577f867f745b7df2d8489463654a918eea2eeb166dfc9/numpy-2.3.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:414802f3b97f3c1eef41e530aaba3b3c1620649871d8cb38c6eaff034c2e16bd", size = 16827292, upload-time = "2025-11-16T22:50:47.715Z" }, - { url = "https://files.pythonhosted.org/packages/71/80/ba9dc6f2a4398e7f42b708a7fdc841bb638d353be255655498edbf9a15a8/numpy-2.3.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5ee6609ac3604fa7780e30a03e5e241a7956f8e2fcfe547d51e3afa5247ac47f", size = 12378897, upload-time = "2025-11-16T22:50:51.327Z" }, - { url = "https://files.pythonhosted.org/packages/2e/6d/db2151b9f64264bcceccd51741aa39b50150de9b602d98ecfe7e0c4bff39/numpy-2.3.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:86d835afea1eaa143012a2d7a3f45a3adce2d7adc8b4961f0b362214d800846a", size = 5207391, upload-time = "2025-11-16T22:50:54.542Z" }, - { url = "https://files.pythonhosted.org/packages/80/ae/429bacace5ccad48a14c4ae5332f6aa8ab9f69524193511d60ccdfdc65fa/numpy-2.3.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:30bc11310e8153ca664b14c5f1b73e94bd0503681fcf136a163de856f3a50139", size = 6721275, upload-time = "2025-11-16T22:50:56.794Z" }, - { url = "https://files.pythonhosted.org/packages/74/5b/1919abf32d8722646a38cd527bc3771eb229a32724ee6ba340ead9b92249/numpy-2.3.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1062fde1dcf469571705945b0f221b73928f34a20c904ffb45db101907c3454e", size = 14306855, upload-time = "2025-11-16T22:50:59.208Z" }, - { url = "https://files.pythonhosted.org/packages/a5/87/6831980559434973bebc30cd9c1f21e541a0f2b0c280d43d3afd909b66d0/numpy-2.3.5-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce581db493ea1a96c0556360ede6607496e8bf9b3a8efa66e06477267bc831e9", size = 16657359, upload-time = "2025-11-16T22:51:01.991Z" }, - { url = "https://files.pythonhosted.org/packages/dd/91/c797f544491ee99fd00495f12ebb7802c440c1915811d72ac5b4479a3356/numpy-2.3.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:cc8920d2ec5fa99875b670bb86ddeb21e295cb07aa331810d9e486e0b969d946", size = 16093374, upload-time = "2025-11-16T22:51:05.291Z" }, - { url = "https://files.pythonhosted.org/packages/74/a6/54da03253afcbe7a72785ec4da9c69fb7a17710141ff9ac5fcb2e32dbe64/numpy-2.3.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9ee2197ef8c4f0dfe405d835f3b6a14f5fee7782b5de51ba06fb65fc9b36e9f1", size = 18594587, upload-time = "2025-11-16T22:51:08.585Z" }, - { url = "https://files.pythonhosted.org/packages/80/e9/aff53abbdd41b0ecca94285f325aff42357c6b5abc482a3fcb4994290b18/numpy-2.3.5-cp313-cp313t-win32.whl", hash = "sha256:70b37199913c1bd300ff6e2693316c6f869c7ee16378faf10e4f5e3275b299c3", size = 6405940, upload-time = "2025-11-16T22:51:11.541Z" }, - { url = "https://files.pythonhosted.org/packages/d5/81/50613fec9d4de5480de18d4f8ef59ad7e344d497edbef3cfd80f24f98461/numpy-2.3.5-cp313-cp313t-win_amd64.whl", hash = "sha256:b501b5fa195cc9e24fe102f21ec0a44dffc231d2af79950b451e0d99cea02234", size = 12920341, upload-time = "2025-11-16T22:51:14.312Z" }, - { url = "https://files.pythonhosted.org/packages/bb/ab/08fd63b9a74303947f34f0bd7c5903b9c5532c2d287bead5bdf4c556c486/numpy-2.3.5-cp313-cp313t-win_arm64.whl", hash = "sha256:a80afd79f45f3c4a7d341f13acbe058d1ca8ac017c165d3fa0d3de6bc1a079d7", size = 10262507, upload-time = "2025-11-16T22:51:16.846Z" }, - { url = "https://files.pythonhosted.org/packages/ba/97/1a914559c19e32d6b2e233cf9a6a114e67c856d35b1d6babca571a3e880f/numpy-2.3.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:bf06bc2af43fa8d32d30fae16ad965663e966b1a3202ed407b84c989c3221e82", size = 16735706, upload-time = "2025-11-16T22:51:19.558Z" }, - { url = "https://files.pythonhosted.org/packages/57/d4/51233b1c1b13ecd796311216ae417796b88b0616cfd8a33ae4536330748a/numpy-2.3.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:052e8c42e0c49d2575621c158934920524f6c5da05a1d3b9bab5d8e259e045f0", size = 12264507, upload-time = "2025-11-16T22:51:22.492Z" }, - { url = "https://files.pythonhosted.org/packages/45/98/2fe46c5c2675b8306d0b4a3ec3494273e93e1226a490f766e84298576956/numpy-2.3.5-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:1ed1ec893cff7040a02c8aa1c8611b94d395590d553f6b53629a4461dc7f7b63", size = 5093049, upload-time = "2025-11-16T22:51:25.171Z" }, - { url = "https://files.pythonhosted.org/packages/ce/0e/0698378989bb0ac5f1660c81c78ab1fe5476c1a521ca9ee9d0710ce54099/numpy-2.3.5-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:2dcd0808a421a482a080f89859a18beb0b3d1e905b81e617a188bd80422d62e9", size = 6626603, upload-time = "2025-11-16T22:51:27Z" }, - { url = "https://files.pythonhosted.org/packages/5e/a6/9ca0eecc489640615642a6cbc0ca9e10df70df38c4d43f5a928ff18d8827/numpy-2.3.5-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:727fd05b57df37dc0bcf1a27767a3d9a78cbbc92822445f32cc3436ba797337b", size = 14262696, upload-time = "2025-11-16T22:51:29.402Z" }, - { url = "https://files.pythonhosted.org/packages/c8/f6/07ec185b90ec9d7217a00eeeed7383b73d7e709dae2a9a021b051542a708/numpy-2.3.5-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fffe29a1ef00883599d1dc2c51aa2e5d80afe49523c261a74933df395c15c520", size = 16597350, upload-time = "2025-11-16T22:51:32.167Z" }, - { url = "https://files.pythonhosted.org/packages/75/37/164071d1dde6a1a84c9b8e5b414fa127981bad47adf3a6b7e23917e52190/numpy-2.3.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f7f0e05112916223d3f438f293abf0727e1181b5983f413dfa2fefc4098245c", size = 16040190, upload-time = "2025-11-16T22:51:35.403Z" }, - { url = "https://files.pythonhosted.org/packages/08/3c/f18b82a406b04859eb026d204e4e1773eb41c5be58410f41ffa511d114ae/numpy-2.3.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2e2eb32ddb9ccb817d620ac1d8dae7c3f641c1e5f55f531a33e8ab97960a75b8", size = 18536749, upload-time = "2025-11-16T22:51:39.698Z" }, - { url = "https://files.pythonhosted.org/packages/40/79/f82f572bf44cf0023a2fe8588768e23e1592585020d638999f15158609e1/numpy-2.3.5-cp314-cp314-win32.whl", hash = "sha256:66f85ce62c70b843bab1fb14a05d5737741e74e28c7b8b5a064de10142fad248", size = 6335432, upload-time = "2025-11-16T22:51:42.476Z" }, - { url = "https://files.pythonhosted.org/packages/a3/2e/235b4d96619931192c91660805e5e49242389742a7a82c27665021db690c/numpy-2.3.5-cp314-cp314-win_amd64.whl", hash = "sha256:e6a0bc88393d65807d751a614207b7129a310ca4fe76a74e5c7da5fa5671417e", size = 12919388, upload-time = "2025-11-16T22:51:45.275Z" }, - { url = "https://files.pythonhosted.org/packages/07/2b/29fd75ce45d22a39c61aad74f3d718e7ab67ccf839ca8b60866054eb15f8/numpy-2.3.5-cp314-cp314-win_arm64.whl", hash = "sha256:aeffcab3d4b43712bb7a60b65f6044d444e75e563ff6180af8f98dd4b905dfd2", size = 10476651, upload-time = "2025-11-16T22:51:47.749Z" }, - { url = "https://files.pythonhosted.org/packages/17/e1/f6a721234ebd4d87084cfa68d081bcba2f5cfe1974f7de4e0e8b9b2a2ba1/numpy-2.3.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:17531366a2e3a9e30762c000f2c43a9aaa05728712e25c11ce1dbe700c53ad41", size = 16834503, upload-time = "2025-11-16T22:51:50.443Z" }, - { url = "https://files.pythonhosted.org/packages/5c/1c/baf7ffdc3af9c356e1c135e57ab7cf8d247931b9554f55c467efe2c69eff/numpy-2.3.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d21644de1b609825ede2f48be98dfde4656aefc713654eeee280e37cadc4e0ad", size = 12381612, upload-time = "2025-11-16T22:51:53.609Z" }, - { url = "https://files.pythonhosted.org/packages/74/91/f7f0295151407ddc9ba34e699013c32c3c91944f9b35fcf9281163dc1468/numpy-2.3.5-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:c804e3a5aba5460c73955c955bdbd5c08c354954e9270a2c1565f62e866bdc39", size = 5210042, upload-time = "2025-11-16T22:51:56.213Z" }, - { url = "https://files.pythonhosted.org/packages/2e/3b/78aebf345104ec50dd50a4d06ddeb46a9ff5261c33bcc58b1c4f12f85ec2/numpy-2.3.5-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:cc0a57f895b96ec78969c34f682c602bf8da1a0270b09bc65673df2e7638ec20", size = 6724502, upload-time = "2025-11-16T22:51:58.584Z" }, - { url = "https://files.pythonhosted.org/packages/02/c6/7c34b528740512e57ef1b7c8337ab0b4f0bddf34c723b8996c675bc2bc91/numpy-2.3.5-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:900218e456384ea676e24ea6a0417f030a3b07306d29d7ad843957b40a9d8d52", size = 14308962, upload-time = "2025-11-16T22:52:01.698Z" }, - { url = "https://files.pythonhosted.org/packages/80/35/09d433c5262bc32d725bafc619e095b6a6651caf94027a03da624146f655/numpy-2.3.5-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09a1bea522b25109bf8e6f3027bd810f7c1085c64a0c7ce050c1676ad0ba010b", size = 16655054, upload-time = "2025-11-16T22:52:04.267Z" }, - { url = "https://files.pythonhosted.org/packages/7a/ab/6a7b259703c09a88804fa2430b43d6457b692378f6b74b356155283566ac/numpy-2.3.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04822c00b5fd0323c8166d66c701dc31b7fbd252c100acd708c48f763968d6a3", size = 16091613, upload-time = "2025-11-16T22:52:08.651Z" }, - { url = "https://files.pythonhosted.org/packages/c2/88/330da2071e8771e60d1038166ff9d73f29da37b01ec3eb43cb1427464e10/numpy-2.3.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d6889ec4ec662a1a37eb4b4fb26b6100841804dac55bd9df579e326cdc146227", size = 18591147, upload-time = "2025-11-16T22:52:11.453Z" }, - { url = "https://files.pythonhosted.org/packages/51/41/851c4b4082402d9ea860c3626db5d5df47164a712cb23b54be028b184c1c/numpy-2.3.5-cp314-cp314t-win32.whl", hash = "sha256:93eebbcf1aafdf7e2ddd44c2923e2672e1010bddc014138b229e49725b4d6be5", size = 6479806, upload-time = "2025-11-16T22:52:14.641Z" }, - { url = "https://files.pythonhosted.org/packages/90/30/d48bde1dfd93332fa557cff1972fbc039e055a52021fbef4c2c4b1eefd17/numpy-2.3.5-cp314-cp314t-win_amd64.whl", hash = "sha256:c8a9958e88b65c3b27e22ca2a076311636850b612d6bbfb76e8d156aacde2aaf", size = 13105760, upload-time = "2025-11-16T22:52:17.975Z" }, - { url = "https://files.pythonhosted.org/packages/2d/fd/4b5eb0b3e888d86aee4d198c23acec7d214baaf17ea93c1adec94c9518b9/numpy-2.3.5-cp314-cp314t-win_arm64.whl", hash = "sha256:6203fdf9f3dc5bdaed7319ad8698e685c7a3be10819f41d32a0723e611733b42", size = 10545459, upload-time = "2025-11-16T22:52:20.55Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/37/e669fe6cbb2b96c62f6bbedc6a81c0f3b7362f6a59230b23caa673a85721/numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e", size = 16733873 }, + { url = "https://files.pythonhosted.org/packages/c5/65/df0db6c097892c9380851ab9e44b52d4f7ba576b833996e0080181c0c439/numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769", size = 12259838 }, + { url = "https://files.pythonhosted.org/packages/5b/e1/1ee06e70eb2136797abe847d386e7c0e830b67ad1d43f364dd04fa50d338/numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5", size = 5088378 }, + { url = "https://files.pythonhosted.org/packages/6d/9c/1ca85fb86708724275103b81ec4cf1ac1d08f465368acfc8da7ab545bdae/numpy-2.3.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3101e5177d114a593d79dd79658650fe28b5a0d8abeb8ce6f437c0e6df5be1a4", size = 6628559 }, + { url = "https://files.pythonhosted.org/packages/74/78/fcd41e5a0ce4f3f7b003da85825acddae6d7ecb60cf25194741b036ca7d6/numpy-2.3.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b973c57ff8e184109db042c842423ff4f60446239bd585a5131cc47f06f789d", size = 14250702 }, + { url = "https://files.pythonhosted.org/packages/b6/23/2a1b231b8ff672b4c450dac27164a8b2ca7d9b7144f9c02d2396518352eb/numpy-2.3.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d8163f43acde9a73c2a33605353a4f1bc4798745a8b1d73183b28e5b435ae28", size = 16606086 }, + { url = "https://files.pythonhosted.org/packages/a0/c5/5ad26fbfbe2012e190cc7d5003e4d874b88bb18861d0829edc140a713021/numpy-2.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51c1e14eb1e154ebd80e860722f9e6ed6ec89714ad2db2d3aa33c31d7c12179b", size = 16025985 }, + { url = "https://files.pythonhosted.org/packages/d2/fa/dd48e225c46c819288148d9d060b047fd2a6fb1eb37eae25112ee4cb4453/numpy-2.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b46b4ec24f7293f23adcd2d146960559aaf8020213de8ad1909dba6c013bf89c", size = 18542976 }, + { url = "https://files.pythonhosted.org/packages/05/79/ccbd23a75862d95af03d28b5c6901a1b7da4803181513d52f3b86ed9446e/numpy-2.3.5-cp312-cp312-win32.whl", hash = "sha256:3997b5b3c9a771e157f9aae01dd579ee35ad7109be18db0e85dbdbe1de06e952", size = 6285274 }, + { url = "https://files.pythonhosted.org/packages/2d/57/8aeaf160312f7f489dea47ab61e430b5cb051f59a98ae68b7133ce8fa06a/numpy-2.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:86945f2ee6d10cdfd67bcb4069c1662dd711f7e2a4343db5cecec06b87cf31aa", size = 12782922 }, + { url = "https://files.pythonhosted.org/packages/78/a6/aae5cc2ca78c45e64b9ef22f089141d661516856cf7c8a54ba434576900d/numpy-2.3.5-cp312-cp312-win_arm64.whl", hash = "sha256:f28620fe26bee16243be2b7b874da327312240a7cdc38b769a697578d2100013", size = 10194667 }, + { url = "https://files.pythonhosted.org/packages/db/69/9cde09f36da4b5a505341180a3f2e6fadc352fd4d2b7096ce9778db83f1a/numpy-2.3.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0f23b44f57077c1ede8c5f26b30f706498b4862d3ff0a7298b8411dd2f043ff", size = 16728251 }, + { url = "https://files.pythonhosted.org/packages/79/fb/f505c95ceddd7027347b067689db71ca80bd5ecc926f913f1a23e65cf09b/numpy-2.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa5bc7c5d59d831d9773d1170acac7893ce3a5e130540605770ade83280e7188", size = 12254652 }, + { url = "https://files.pythonhosted.org/packages/78/da/8c7738060ca9c31b30e9301ee0cf6c5ffdbf889d9593285a1cead337f9a5/numpy-2.3.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ccc933afd4d20aad3c00bcef049cb40049f7f196e0397f1109dba6fed63267b0", size = 5083172 }, + { url = "https://files.pythonhosted.org/packages/a4/b4/ee5bb2537fb9430fd2ef30a616c3672b991a4129bb1c7dcc42aa0abbe5d7/numpy-2.3.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:afaffc4393205524af9dfa400fa250143a6c3bc646c08c9f5e25a9f4b4d6a903", size = 6622990 }, + { url = "https://files.pythonhosted.org/packages/95/03/dc0723a013c7d7c19de5ef29e932c3081df1c14ba582b8b86b5de9db7f0f/numpy-2.3.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c75442b2209b8470d6d5d8b1c25714270686f14c749028d2199c54e29f20b4d", size = 14248902 }, + { url = "https://files.pythonhosted.org/packages/f5/10/ca162f45a102738958dcec8023062dad0cbc17d1ab99d68c4e4a6c45fb2b/numpy-2.3.5-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e06aa0af8c0f05104d56450d6093ee639e15f24ecf62d417329d06e522e017", size = 16597430 }, + { url = "https://files.pythonhosted.org/packages/2a/51/c1e29be863588db58175175f057286900b4b3327a1351e706d5e0f8dd679/numpy-2.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed89927b86296067b4f81f108a2271d8926467a8868e554eaf370fc27fa3ccaf", size = 16024551 }, + { url = "https://files.pythonhosted.org/packages/83/68/8236589d4dbb87253d28259d04d9b814ec0ecce7cb1c7fed29729f4c3a78/numpy-2.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51c55fe3451421f3a6ef9a9c1439e82101c57a2c9eab9feb196a62b1a10b58ce", size = 18533275 }, + { url = "https://files.pythonhosted.org/packages/40/56/2932d75b6f13465239e3b7b7e511be27f1b8161ca2510854f0b6e521c395/numpy-2.3.5-cp313-cp313-win32.whl", hash = "sha256:1978155dd49972084bd6ef388d66ab70f0c323ddee6f693d539376498720fb7e", size = 6277637 }, + { url = "https://files.pythonhosted.org/packages/0c/88/e2eaa6cffb115b85ed7c7c87775cb8bcf0816816bc98ca8dbfa2ee33fe6e/numpy-2.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:00dc4e846108a382c5869e77c6ed514394bdeb3403461d25a829711041217d5b", size = 12779090 }, + { url = "https://files.pythonhosted.org/packages/8f/88/3f41e13a44ebd4034ee17baa384acac29ba6a4fcc2aca95f6f08ca0447d1/numpy-2.3.5-cp313-cp313-win_arm64.whl", hash = "sha256:0472f11f6ec23a74a906a00b48a4dcf3849209696dff7c189714511268d103ae", size = 10194710 }, + { url = "https://files.pythonhosted.org/packages/13/cb/71744144e13389d577f867f745b7df2d8489463654a918eea2eeb166dfc9/numpy-2.3.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:414802f3b97f3c1eef41e530aaba3b3c1620649871d8cb38c6eaff034c2e16bd", size = 16827292 }, + { url = "https://files.pythonhosted.org/packages/71/80/ba9dc6f2a4398e7f42b708a7fdc841bb638d353be255655498edbf9a15a8/numpy-2.3.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5ee6609ac3604fa7780e30a03e5e241a7956f8e2fcfe547d51e3afa5247ac47f", size = 12378897 }, + { url = "https://files.pythonhosted.org/packages/2e/6d/db2151b9f64264bcceccd51741aa39b50150de9b602d98ecfe7e0c4bff39/numpy-2.3.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:86d835afea1eaa143012a2d7a3f45a3adce2d7adc8b4961f0b362214d800846a", size = 5207391 }, + { url = "https://files.pythonhosted.org/packages/80/ae/429bacace5ccad48a14c4ae5332f6aa8ab9f69524193511d60ccdfdc65fa/numpy-2.3.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:30bc11310e8153ca664b14c5f1b73e94bd0503681fcf136a163de856f3a50139", size = 6721275 }, + { url = "https://files.pythonhosted.org/packages/74/5b/1919abf32d8722646a38cd527bc3771eb229a32724ee6ba340ead9b92249/numpy-2.3.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1062fde1dcf469571705945b0f221b73928f34a20c904ffb45db101907c3454e", size = 14306855 }, + { url = "https://files.pythonhosted.org/packages/a5/87/6831980559434973bebc30cd9c1f21e541a0f2b0c280d43d3afd909b66d0/numpy-2.3.5-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce581db493ea1a96c0556360ede6607496e8bf9b3a8efa66e06477267bc831e9", size = 16657359 }, + { url = "https://files.pythonhosted.org/packages/dd/91/c797f544491ee99fd00495f12ebb7802c440c1915811d72ac5b4479a3356/numpy-2.3.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:cc8920d2ec5fa99875b670bb86ddeb21e295cb07aa331810d9e486e0b969d946", size = 16093374 }, + { url = "https://files.pythonhosted.org/packages/74/a6/54da03253afcbe7a72785ec4da9c69fb7a17710141ff9ac5fcb2e32dbe64/numpy-2.3.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9ee2197ef8c4f0dfe405d835f3b6a14f5fee7782b5de51ba06fb65fc9b36e9f1", size = 18594587 }, + { url = "https://files.pythonhosted.org/packages/80/e9/aff53abbdd41b0ecca94285f325aff42357c6b5abc482a3fcb4994290b18/numpy-2.3.5-cp313-cp313t-win32.whl", hash = "sha256:70b37199913c1bd300ff6e2693316c6f869c7ee16378faf10e4f5e3275b299c3", size = 6405940 }, + { url = "https://files.pythonhosted.org/packages/d5/81/50613fec9d4de5480de18d4f8ef59ad7e344d497edbef3cfd80f24f98461/numpy-2.3.5-cp313-cp313t-win_amd64.whl", hash = "sha256:b501b5fa195cc9e24fe102f21ec0a44dffc231d2af79950b451e0d99cea02234", size = 12920341 }, + { url = "https://files.pythonhosted.org/packages/bb/ab/08fd63b9a74303947f34f0bd7c5903b9c5532c2d287bead5bdf4c556c486/numpy-2.3.5-cp313-cp313t-win_arm64.whl", hash = "sha256:a80afd79f45f3c4a7d341f13acbe058d1ca8ac017c165d3fa0d3de6bc1a079d7", size = 10262507 }, + { url = "https://files.pythonhosted.org/packages/ba/97/1a914559c19e32d6b2e233cf9a6a114e67c856d35b1d6babca571a3e880f/numpy-2.3.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:bf06bc2af43fa8d32d30fae16ad965663e966b1a3202ed407b84c989c3221e82", size = 16735706 }, + { url = "https://files.pythonhosted.org/packages/57/d4/51233b1c1b13ecd796311216ae417796b88b0616cfd8a33ae4536330748a/numpy-2.3.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:052e8c42e0c49d2575621c158934920524f6c5da05a1d3b9bab5d8e259e045f0", size = 12264507 }, + { url = "https://files.pythonhosted.org/packages/45/98/2fe46c5c2675b8306d0b4a3ec3494273e93e1226a490f766e84298576956/numpy-2.3.5-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:1ed1ec893cff7040a02c8aa1c8611b94d395590d553f6b53629a4461dc7f7b63", size = 5093049 }, + { url = "https://files.pythonhosted.org/packages/ce/0e/0698378989bb0ac5f1660c81c78ab1fe5476c1a521ca9ee9d0710ce54099/numpy-2.3.5-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:2dcd0808a421a482a080f89859a18beb0b3d1e905b81e617a188bd80422d62e9", size = 6626603 }, + { url = "https://files.pythonhosted.org/packages/5e/a6/9ca0eecc489640615642a6cbc0ca9e10df70df38c4d43f5a928ff18d8827/numpy-2.3.5-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:727fd05b57df37dc0bcf1a27767a3d9a78cbbc92822445f32cc3436ba797337b", size = 14262696 }, + { url = "https://files.pythonhosted.org/packages/c8/f6/07ec185b90ec9d7217a00eeeed7383b73d7e709dae2a9a021b051542a708/numpy-2.3.5-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fffe29a1ef00883599d1dc2c51aa2e5d80afe49523c261a74933df395c15c520", size = 16597350 }, + { url = "https://files.pythonhosted.org/packages/75/37/164071d1dde6a1a84c9b8e5b414fa127981bad47adf3a6b7e23917e52190/numpy-2.3.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f7f0e05112916223d3f438f293abf0727e1181b5983f413dfa2fefc4098245c", size = 16040190 }, + { url = "https://files.pythonhosted.org/packages/08/3c/f18b82a406b04859eb026d204e4e1773eb41c5be58410f41ffa511d114ae/numpy-2.3.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2e2eb32ddb9ccb817d620ac1d8dae7c3f641c1e5f55f531a33e8ab97960a75b8", size = 18536749 }, + { url = "https://files.pythonhosted.org/packages/40/79/f82f572bf44cf0023a2fe8588768e23e1592585020d638999f15158609e1/numpy-2.3.5-cp314-cp314-win32.whl", hash = "sha256:66f85ce62c70b843bab1fb14a05d5737741e74e28c7b8b5a064de10142fad248", size = 6335432 }, + { url = "https://files.pythonhosted.org/packages/a3/2e/235b4d96619931192c91660805e5e49242389742a7a82c27665021db690c/numpy-2.3.5-cp314-cp314-win_amd64.whl", hash = "sha256:e6a0bc88393d65807d751a614207b7129a310ca4fe76a74e5c7da5fa5671417e", size = 12919388 }, + { url = "https://files.pythonhosted.org/packages/07/2b/29fd75ce45d22a39c61aad74f3d718e7ab67ccf839ca8b60866054eb15f8/numpy-2.3.5-cp314-cp314-win_arm64.whl", hash = "sha256:aeffcab3d4b43712bb7a60b65f6044d444e75e563ff6180af8f98dd4b905dfd2", size = 10476651 }, + { url = "https://files.pythonhosted.org/packages/17/e1/f6a721234ebd4d87084cfa68d081bcba2f5cfe1974f7de4e0e8b9b2a2ba1/numpy-2.3.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:17531366a2e3a9e30762c000f2c43a9aaa05728712e25c11ce1dbe700c53ad41", size = 16834503 }, + { url = "https://files.pythonhosted.org/packages/5c/1c/baf7ffdc3af9c356e1c135e57ab7cf8d247931b9554f55c467efe2c69eff/numpy-2.3.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d21644de1b609825ede2f48be98dfde4656aefc713654eeee280e37cadc4e0ad", size = 12381612 }, + { url = "https://files.pythonhosted.org/packages/74/91/f7f0295151407ddc9ba34e699013c32c3c91944f9b35fcf9281163dc1468/numpy-2.3.5-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:c804e3a5aba5460c73955c955bdbd5c08c354954e9270a2c1565f62e866bdc39", size = 5210042 }, + { url = "https://files.pythonhosted.org/packages/2e/3b/78aebf345104ec50dd50a4d06ddeb46a9ff5261c33bcc58b1c4f12f85ec2/numpy-2.3.5-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:cc0a57f895b96ec78969c34f682c602bf8da1a0270b09bc65673df2e7638ec20", size = 6724502 }, + { url = "https://files.pythonhosted.org/packages/02/c6/7c34b528740512e57ef1b7c8337ab0b4f0bddf34c723b8996c675bc2bc91/numpy-2.3.5-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:900218e456384ea676e24ea6a0417f030a3b07306d29d7ad843957b40a9d8d52", size = 14308962 }, + { url = "https://files.pythonhosted.org/packages/80/35/09d433c5262bc32d725bafc619e095b6a6651caf94027a03da624146f655/numpy-2.3.5-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09a1bea522b25109bf8e6f3027bd810f7c1085c64a0c7ce050c1676ad0ba010b", size = 16655054 }, + { url = "https://files.pythonhosted.org/packages/7a/ab/6a7b259703c09a88804fa2430b43d6457b692378f6b74b356155283566ac/numpy-2.3.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04822c00b5fd0323c8166d66c701dc31b7fbd252c100acd708c48f763968d6a3", size = 16091613 }, + { url = "https://files.pythonhosted.org/packages/c2/88/330da2071e8771e60d1038166ff9d73f29da37b01ec3eb43cb1427464e10/numpy-2.3.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d6889ec4ec662a1a37eb4b4fb26b6100841804dac55bd9df579e326cdc146227", size = 18591147 }, + { url = "https://files.pythonhosted.org/packages/51/41/851c4b4082402d9ea860c3626db5d5df47164a712cb23b54be028b184c1c/numpy-2.3.5-cp314-cp314t-win32.whl", hash = "sha256:93eebbcf1aafdf7e2ddd44c2923e2672e1010bddc014138b229e49725b4d6be5", size = 6479806 }, + { url = "https://files.pythonhosted.org/packages/90/30/d48bde1dfd93332fa557cff1972fbc039e055a52021fbef4c2c4b1eefd17/numpy-2.3.5-cp314-cp314t-win_amd64.whl", hash = "sha256:c8a9958e88b65c3b27e22ca2a076311636850b612d6bbfb76e8d156aacde2aaf", size = 13105760 }, + { url = "https://files.pythonhosted.org/packages/2d/fd/4b5eb0b3e888d86aee4d198c23acec7d214baaf17ea93c1adec94c9518b9/numpy-2.3.5-cp314-cp314t-win_arm64.whl", hash = "sha256:6203fdf9f3dc5bdaed7319ad8698e685c7a3be10819f41d32a0723e611733b42", size = 10545459 }, ] [[package]] @@ -2131,9 +2130,9 @@ name = "nvidia-cublas-cu12" version = "12.8.4.1" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/99/db44d685f0e257ff0e213ade1964fc459b4a690a73293220e98feb3307cf/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:b86f6dd8935884615a0683b663891d43781b819ac4f2ba2b0c9604676af346d0", size = 590537124, upload-time = "2025-03-07T01:43:53.556Z" }, - { url = "https://files.pythonhosted.org/packages/dc/61/e24b560ab2e2eaeb3c839129175fb330dfcfc29e5203196e5541a4c44682/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8ac4e771d5a348c551b2a426eda6193c19aa630236b418086020df5ba9667142", size = 594346921, upload-time = "2025-03-07T01:44:31.254Z" }, - { url = "https://files.pythonhosted.org/packages/70/61/7d7b3c70186fb651d0fbd35b01dbfc8e755f69fd58f817f3d0f642df20c3/nvidia_cublas_cu12-12.8.4.1-py3-none-win_amd64.whl", hash = "sha256:47e9b82132fa8d2b4944e708049229601448aaad7e6f296f630f2d1a32de35af", size = 567544208, upload-time = "2025-03-07T01:53:30.535Z" }, + { url = "https://files.pythonhosted.org/packages/29/99/db44d685f0e257ff0e213ade1964fc459b4a690a73293220e98feb3307cf/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:b86f6dd8935884615a0683b663891d43781b819ac4f2ba2b0c9604676af346d0", size = 590537124 }, + { url = "https://files.pythonhosted.org/packages/dc/61/e24b560ab2e2eaeb3c839129175fb330dfcfc29e5203196e5541a4c44682/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8ac4e771d5a348c551b2a426eda6193c19aa630236b418086020df5ba9667142", size = 594346921 }, + { url = "https://files.pythonhosted.org/packages/70/61/7d7b3c70186fb651d0fbd35b01dbfc8e755f69fd58f817f3d0f642df20c3/nvidia_cublas_cu12-12.8.4.1-py3-none-win_amd64.whl", hash = "sha256:47e9b82132fa8d2b4944e708049229601448aaad7e6f296f630f2d1a32de35af", size = 567544208 }, ] [[package]] @@ -2141,9 +2140,9 @@ name = "nvidia-cuda-cupti-cu12" version = "12.8.90" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/1f/b3bd73445e5cb342727fd24fe1f7b748f690b460acadc27ea22f904502c8/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4412396548808ddfed3f17a467b104ba7751e6b58678a4b840675c56d21cf7ed", size = 9533318, upload-time = "2025-03-07T01:40:10.421Z" }, - { url = "https://files.pythonhosted.org/packages/f8/02/2adcaa145158bf1a8295d83591d22e4103dbfd821bcaf6f3f53151ca4ffa/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea0cb07ebda26bb9b29ba82cda34849e73c166c18162d3913575b0c9db9a6182", size = 10248621, upload-time = "2025-03-07T01:40:21.213Z" }, - { url = "https://files.pythonhosted.org/packages/41/bc/83f5426095d93694ae39fe1311431b5d5a9bb82e48bf0dd8e19be2765942/nvidia_cuda_cupti_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:bb479dcdf7e6d4f8b0b01b115260399bf34154a1a2e9fe11c85c517d87efd98e", size = 7015759, upload-time = "2025-03-07T01:51:11.355Z" }, + { url = "https://files.pythonhosted.org/packages/d5/1f/b3bd73445e5cb342727fd24fe1f7b748f690b460acadc27ea22f904502c8/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4412396548808ddfed3f17a467b104ba7751e6b58678a4b840675c56d21cf7ed", size = 9533318 }, + { url = "https://files.pythonhosted.org/packages/f8/02/2adcaa145158bf1a8295d83591d22e4103dbfd821bcaf6f3f53151ca4ffa/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea0cb07ebda26bb9b29ba82cda34849e73c166c18162d3913575b0c9db9a6182", size = 10248621 }, + { url = "https://files.pythonhosted.org/packages/41/bc/83f5426095d93694ae39fe1311431b5d5a9bb82e48bf0dd8e19be2765942/nvidia_cuda_cupti_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:bb479dcdf7e6d4f8b0b01b115260399bf34154a1a2e9fe11c85c517d87efd98e", size = 7015759 }, ] [[package]] @@ -2151,9 +2150,9 @@ name = "nvidia-cuda-nvcc-cu12" version = "12.9.86" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/48/b54a06168a2190572a312bfe4ce443687773eb61367ced31e064953dd2f7/nvidia_cuda_nvcc_cu12-12.9.86-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:5d6a0d32fdc7ea39917c20065614ae93add6f577d840233237ff08e9a38f58f0", size = 40546229, upload-time = "2025-06-05T20:01:53.357Z" }, - { url = "https://files.pythonhosted.org/packages/d6/5c/8cc072436787104bbbcbde1f76ab4a0d89e68f7cebc758dd2ad7913a43d0/nvidia_cuda_nvcc_cu12-12.9.86-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44e1eca4d08926193a558d2434b1bf83d57b4d5743e0c431c0c83d51da1df62b", size = 39411138, upload-time = "2025-06-05T20:01:43.182Z" }, - { url = "https://files.pythonhosted.org/packages/d2/9e/c71c53655a65d7531c89421c282359e2f626838762f1ce6180ea0bbebd29/nvidia_cuda_nvcc_cu12-12.9.86-py3-none-win_amd64.whl", hash = "sha256:8ed7f0b17dea662755395be029376db3b94fed5cbb17c2d35cc866c5b1b84099", size = 34669845, upload-time = "2025-06-05T20:11:56.308Z" }, + { url = "https://files.pythonhosted.org/packages/25/48/b54a06168a2190572a312bfe4ce443687773eb61367ced31e064953dd2f7/nvidia_cuda_nvcc_cu12-12.9.86-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:5d6a0d32fdc7ea39917c20065614ae93add6f577d840233237ff08e9a38f58f0", size = 40546229 }, + { url = "https://files.pythonhosted.org/packages/d6/5c/8cc072436787104bbbcbde1f76ab4a0d89e68f7cebc758dd2ad7913a43d0/nvidia_cuda_nvcc_cu12-12.9.86-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44e1eca4d08926193a558d2434b1bf83d57b4d5743e0c431c0c83d51da1df62b", size = 39411138 }, + { url = "https://files.pythonhosted.org/packages/d2/9e/c71c53655a65d7531c89421c282359e2f626838762f1ce6180ea0bbebd29/nvidia_cuda_nvcc_cu12-12.9.86-py3-none-win_amd64.whl", hash = "sha256:8ed7f0b17dea662755395be029376db3b94fed5cbb17c2d35cc866c5b1b84099", size = 34669845 }, ] [[package]] @@ -2161,7 +2160,7 @@ name = "nvidia-cuda-nvrtc-cu12" version = "12.8.93" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/6b/32f747947df2da6994e999492ab306a903659555dddc0fbdeb9d71f75e52/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a7756528852ef889772a84c6cd89d41dfa74667e24cca16bb31f8f061e3e9994", size = 88040029, upload-time = "2025-03-07T01:42:13.562Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/32f747947df2da6994e999492ab306a903659555dddc0fbdeb9d71f75e52/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a7756528852ef889772a84c6cd89d41dfa74667e24cca16bb31f8f061e3e9994", size = 88040029 }, ] [[package]] @@ -2169,9 +2168,9 @@ name = "nvidia-cuda-runtime-cu12" version = "12.8.90" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/75/f865a3b236e4647605ea34cc450900854ba123834a5f1598e160b9530c3a/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:52bf7bbee900262ffefe5e9d5a2a69a30d97e2bc5bb6cc866688caa976966e3d", size = 965265, upload-time = "2025-03-07T01:39:43.533Z" }, - { url = "https://files.pythonhosted.org/packages/0d/9b/a997b638fcd068ad6e4d53b8551a7d30fe8b404d6f1804abf1df69838932/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90", size = 954765, upload-time = "2025-03-07T01:40:01.615Z" }, - { url = "https://files.pythonhosted.org/packages/30/a5/a515b7600ad361ea14bfa13fb4d6687abf500adc270f19e89849c0590492/nvidia_cuda_runtime_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:c0c6027f01505bfed6c3b21ec546f69c687689aad5f1a377554bc6ca4aa993a8", size = 944318, upload-time = "2025-03-07T01:51:01.794Z" }, + { url = "https://files.pythonhosted.org/packages/7c/75/f865a3b236e4647605ea34cc450900854ba123834a5f1598e160b9530c3a/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:52bf7bbee900262ffefe5e9d5a2a69a30d97e2bc5bb6cc866688caa976966e3d", size = 965265 }, + { url = "https://files.pythonhosted.org/packages/0d/9b/a997b638fcd068ad6e4d53b8551a7d30fe8b404d6f1804abf1df69838932/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90", size = 954765 }, + { url = "https://files.pythonhosted.org/packages/30/a5/a515b7600ad361ea14bfa13fb4d6687abf500adc270f19e89849c0590492/nvidia_cuda_runtime_cu12-12.8.90-py3-none-win_amd64.whl", hash = "sha256:c0c6027f01505bfed6c3b21ec546f69c687689aad5f1a377554bc6ca4aa993a8", size = 944318 }, ] [[package]] @@ -2182,9 +2181,9 @@ dependencies = [ { name = "nvidia-cublas-cu12" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/41/e79269ce215c857c935fd86bcfe91a451a584dfc27f1e068f568b9ad1ab7/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:c9132cc3f8958447b4910a1720036d9eff5928cc3179b0a51fb6d167c6cc87d8", size = 705026878, upload-time = "2025-06-06T21:52:51.348Z" }, - { url = "https://files.pythonhosted.org/packages/ba/51/e123d997aa098c61d029f76663dedbfb9bc8dcf8c60cbd6adbe42f76d049/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:949452be657fa16687d0930933f032835951ef0892b37d2d53824d1a84dc97a8", size = 706758467, upload-time = "2025-06-06T21:54:08.597Z" }, - { url = "https://files.pythonhosted.org/packages/3d/90/0bd6e586701b3a890fd38aa71c387dab4883d619d6e5ad912ccbd05bfd67/nvidia_cudnn_cu12-9.10.2.21-py3-none-win_amd64.whl", hash = "sha256:c6288de7d63e6cf62988f0923f96dc339cea362decb1bf5b3141883392a7d65e", size = 692992268, upload-time = "2025-06-06T21:55:18.114Z" }, + { url = "https://files.pythonhosted.org/packages/fa/41/e79269ce215c857c935fd86bcfe91a451a584dfc27f1e068f568b9ad1ab7/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:c9132cc3f8958447b4910a1720036d9eff5928cc3179b0a51fb6d167c6cc87d8", size = 705026878 }, + { url = "https://files.pythonhosted.org/packages/ba/51/e123d997aa098c61d029f76663dedbfb9bc8dcf8c60cbd6adbe42f76d049/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:949452be657fa16687d0930933f032835951ef0892b37d2d53824d1a84dc97a8", size = 706758467 }, + { url = "https://files.pythonhosted.org/packages/3d/90/0bd6e586701b3a890fd38aa71c387dab4883d619d6e5ad912ccbd05bfd67/nvidia_cudnn_cu12-9.10.2.21-py3-none-win_amd64.whl", hash = "sha256:c6288de7d63e6cf62988f0923f96dc339cea362decb1bf5b3141883392a7d65e", size = 692992268 }, ] [[package]] @@ -2195,9 +2194,9 @@ dependencies = [ { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/60/bc/7771846d3a0272026c416fbb7e5f4c1f146d6d80704534d0b187dd6f4800/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:848ef7224d6305cdb2a4df928759dca7b1201874787083b6e7550dd6765ce69a", size = 193109211, upload-time = "2025-03-07T01:44:56.873Z" }, - { url = "https://files.pythonhosted.org/packages/1f/13/ee4e00f30e676b66ae65b4f08cb5bcbb8392c03f54f2d5413ea99a5d1c80/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d2dd21ec0b88cf61b62e6b43564355e5222e4a3fb394cac0db101f2dd0d4f74", size = 193118695, upload-time = "2025-03-07T01:45:27.821Z" }, - { url = "https://files.pythonhosted.org/packages/7d/ec/ce1629f1e478bb5ccd208986b5f9e0316a78538dd6ab1d0484f012f8e2a1/nvidia_cufft_cu12-11.3.3.83-py3-none-win_amd64.whl", hash = "sha256:7a64a98ef2a7c47f905aaf8931b69a3a43f27c55530c698bb2ed7c75c0b42cb7", size = 192216559, upload-time = "2025-03-07T01:53:57.106Z" }, + { url = "https://files.pythonhosted.org/packages/60/bc/7771846d3a0272026c416fbb7e5f4c1f146d6d80704534d0b187dd6f4800/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:848ef7224d6305cdb2a4df928759dca7b1201874787083b6e7550dd6765ce69a", size = 193109211 }, + { url = "https://files.pythonhosted.org/packages/1f/13/ee4e00f30e676b66ae65b4f08cb5bcbb8392c03f54f2d5413ea99a5d1c80/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d2dd21ec0b88cf61b62e6b43564355e5222e4a3fb394cac0db101f2dd0d4f74", size = 193118695 }, + { url = "https://files.pythonhosted.org/packages/7d/ec/ce1629f1e478bb5ccd208986b5f9e0316a78538dd6ab1d0484f012f8e2a1/nvidia_cufft_cu12-11.3.3.83-py3-none-win_amd64.whl", hash = "sha256:7a64a98ef2a7c47f905aaf8931b69a3a43f27c55530c698bb2ed7c75c0b42cb7", size = 192216559 }, ] [[package]] @@ -2205,7 +2204,7 @@ name = "nvidia-cufile-cu12" version = "1.13.1.3" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/fe/1bcba1dfbfb8d01be8d93f07bfc502c93fa23afa6fd5ab3fc7c1df71038a/nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d069003be650e131b21c932ec3d8969c1715379251f8d23a1860554b1cb24fc", size = 1197834, upload-time = "2025-03-07T01:45:50.723Z" }, + { url = "https://files.pythonhosted.org/packages/bb/fe/1bcba1dfbfb8d01be8d93f07bfc502c93fa23afa6fd5ab3fc7c1df71038a/nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d069003be650e131b21c932ec3d8969c1715379251f8d23a1860554b1cb24fc", size = 1197834 }, ] [[package]] @@ -2213,9 +2212,9 @@ name = "nvidia-curand-cu12" version = "10.3.9.90" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/5e/92aa15eca622a388b80fbf8375d4760738df6285b1e92c43d37390a33a9a/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:dfab99248034673b779bc6decafdc3404a8a6f502462201f2f31f11354204acd", size = 63625754, upload-time = "2025-03-07T01:46:10.735Z" }, - { url = "https://files.pythonhosted.org/packages/fb/aa/6584b56dc84ebe9cf93226a5cde4d99080c8e90ab40f0c27bda7a0f29aa1/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:b32331d4f4df5d6eefa0554c565b626c7216f87a06a4f56fab27c3b68a830ec9", size = 63619976, upload-time = "2025-03-07T01:46:23.323Z" }, - { url = "https://files.pythonhosted.org/packages/b9/75/70c05b2f3ed5be3bb30b7102b6eb78e100da4bbf6944fd6725c012831cab/nvidia_curand_cu12-10.3.9.90-py3-none-win_amd64.whl", hash = "sha256:f149a8ca457277da854f89cf282d6ef43176861926c7ac85b2a0fbd237c587ec", size = 62765309, upload-time = "2025-03-07T01:54:20.478Z" }, + { url = "https://files.pythonhosted.org/packages/45/5e/92aa15eca622a388b80fbf8375d4760738df6285b1e92c43d37390a33a9a/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:dfab99248034673b779bc6decafdc3404a8a6f502462201f2f31f11354204acd", size = 63625754 }, + { url = "https://files.pythonhosted.org/packages/fb/aa/6584b56dc84ebe9cf93226a5cde4d99080c8e90ab40f0c27bda7a0f29aa1/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:b32331d4f4df5d6eefa0554c565b626c7216f87a06a4f56fab27c3b68a830ec9", size = 63619976 }, + { url = "https://files.pythonhosted.org/packages/b9/75/70c05b2f3ed5be3bb30b7102b6eb78e100da4bbf6944fd6725c012831cab/nvidia_curand_cu12-10.3.9.90-py3-none-win_amd64.whl", hash = "sha256:f149a8ca457277da854f89cf282d6ef43176861926c7ac85b2a0fbd237c587ec", size = 62765309 }, ] [[package]] @@ -2228,9 +2227,9 @@ dependencies = [ { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/32/f7cd6ce8a7690544d084ea21c26e910a97e077c9b7f07bf5de623ee19981/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:db9ed69dbef9715071232caa9b69c52ac7de3a95773c2db65bdba85916e4e5c0", size = 267229841, upload-time = "2025-03-07T01:46:54.356Z" }, - { url = "https://files.pythonhosted.org/packages/85/48/9a13d2975803e8cf2777d5ed57b87a0b6ca2cc795f9a4f59796a910bfb80/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4376c11ad263152bd50ea295c05370360776f8c3427b30991df774f9fb26c450", size = 267506905, upload-time = "2025-03-07T01:47:16.273Z" }, - { url = "https://files.pythonhosted.org/packages/13/c0/76ca8551b8a84146ffa189fec81c26d04adba4bc0dbe09cd6e6fd9b7de04/nvidia_cusolver_cu12-11.7.3.90-py3-none-win_amd64.whl", hash = "sha256:4a550db115fcabc4d495eb7d39ac8b58d4ab5d8e63274d3754df1c0ad6a22d34", size = 256720438, upload-time = "2025-03-07T01:54:39.898Z" }, + { url = "https://files.pythonhosted.org/packages/c8/32/f7cd6ce8a7690544d084ea21c26e910a97e077c9b7f07bf5de623ee19981/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:db9ed69dbef9715071232caa9b69c52ac7de3a95773c2db65bdba85916e4e5c0", size = 267229841 }, + { url = "https://files.pythonhosted.org/packages/85/48/9a13d2975803e8cf2777d5ed57b87a0b6ca2cc795f9a4f59796a910bfb80/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4376c11ad263152bd50ea295c05370360776f8c3427b30991df774f9fb26c450", size = 267506905 }, + { url = "https://files.pythonhosted.org/packages/13/c0/76ca8551b8a84146ffa189fec81c26d04adba4bc0dbe09cd6e6fd9b7de04/nvidia_cusolver_cu12-11.7.3.90-py3-none-win_amd64.whl", hash = "sha256:4a550db115fcabc4d495eb7d39ac8b58d4ab5d8e63274d3754df1c0ad6a22d34", size = 256720438 }, ] [[package]] @@ -2241,9 +2240,9 @@ dependencies = [ { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/f7/cd777c4109681367721b00a106f491e0d0d15cfa1fd59672ce580ce42a97/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b6c161cb130be1a07a27ea6923df8141f3c295852f4b260c65f18f3e0a091dc", size = 288117129, upload-time = "2025-03-07T01:47:40.407Z" }, - { url = "https://files.pythonhosted.org/packages/c2/f5/e1854cb2f2bcd4280c44736c93550cc300ff4b8c95ebe370d0aa7d2b473d/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ec05d76bbbd8b61b06a80e1eaf8cf4959c3d4ce8e711b65ebd0443bb0ebb13b", size = 288216466, upload-time = "2025-03-07T01:48:13.779Z" }, - { url = "https://files.pythonhosted.org/packages/62/07/f3b2ad63f8e3d257a599f422ae34eb565e70c41031aecefa3d18b62cabd1/nvidia_cusparse_cu12-12.5.8.93-py3-none-win_amd64.whl", hash = "sha256:9a33604331cb2cac199f2e7f5104dfbb8a5a898c367a53dfda9ff2acb6b6b4dd", size = 284937404, upload-time = "2025-03-07T01:55:07.742Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f7/cd777c4109681367721b00a106f491e0d0d15cfa1fd59672ce580ce42a97/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b6c161cb130be1a07a27ea6923df8141f3c295852f4b260c65f18f3e0a091dc", size = 288117129 }, + { url = "https://files.pythonhosted.org/packages/c2/f5/e1854cb2f2bcd4280c44736c93550cc300ff4b8c95ebe370d0aa7d2b473d/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ec05d76bbbd8b61b06a80e1eaf8cf4959c3d4ce8e711b65ebd0443bb0ebb13b", size = 288216466 }, + { url = "https://files.pythonhosted.org/packages/62/07/f3b2ad63f8e3d257a599f422ae34eb565e70c41031aecefa3d18b62cabd1/nvidia_cusparse_cu12-12.5.8.93-py3-none-win_amd64.whl", hash = "sha256:9a33604331cb2cac199f2e7f5104dfbb8a5a898c367a53dfda9ff2acb6b6b4dd", size = 284937404 }, ] [[package]] @@ -2251,7 +2250,7 @@ name = "nvidia-cusparselt-cu12" version = "0.7.1" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/79/12978b96bd44274fe38b5dde5cfb660b1d114f70a65ef962bcbbed99b549/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f1bb701d6b930d5a7cea44c19ceb973311500847f81b634d802b7b539dc55623", size = 287193691, upload-time = "2025-02-26T00:15:44.104Z" }, + { url = "https://files.pythonhosted.org/packages/56/79/12978b96bd44274fe38b5dde5cfb660b1d114f70a65ef962bcbbed99b549/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f1bb701d6b930d5a7cea44c19ceb973311500847f81b634d802b7b539dc55623", size = 287193691 }, ] [[package]] @@ -2259,7 +2258,7 @@ name = "nvidia-nccl-cu12" version = "2.27.5" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/89/f7a07dc961b60645dbbf42e80f2bc85ade7feb9a491b11a1e973aa00071f/nvidia_nccl_cu12-2.27.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ad730cf15cb5d25fe849c6e6ca9eb5b76db16a80f13f425ac68d8e2e55624457", size = 322348229, upload-time = "2025-06-26T04:11:28.385Z" }, + { url = "https://files.pythonhosted.org/packages/6e/89/f7a07dc961b60645dbbf42e80f2bc85ade7feb9a491b11a1e973aa00071f/nvidia_nccl_cu12-2.27.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ad730cf15cb5d25fe849c6e6ca9eb5b76db16a80f13f425ac68d8e2e55624457", size = 322348229 }, ] [[package]] @@ -2267,9 +2266,9 @@ name = "nvidia-nvjitlink-cu12" version = "12.8.93" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/74/86a07f1d0f42998ca31312f998bd3b9a7eff7f52378f4f270c8679c77fb9/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:81ff63371a7ebd6e6451970684f916be2eab07321b73c9d244dc2b4da7f73b88", size = 39254836, upload-time = "2025-03-07T01:49:55.661Z" }, - { url = "https://files.pythonhosted.org/packages/2a/a2/8cee5da30d13430e87bf99bb33455d2724d0a4a9cb5d7926d80ccb96d008/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:adccd7161ace7261e01bb91e44e88da350895c270d23f744f0820c818b7229e7", size = 38386204, upload-time = "2025-03-07T01:49:43.612Z" }, - { url = "https://files.pythonhosted.org/packages/ed/d7/34f02dad2e30c31b10a51f6b04e025e5dd60e5f936af9045a9b858a05383/nvidia_nvjitlink_cu12-12.8.93-py3-none-win_amd64.whl", hash = "sha256:bd93fbeeee850917903583587f4fc3a4eafa022e34572251368238ab5e6bd67f", size = 268553710, upload-time = "2025-03-07T01:56:24.13Z" }, + { url = "https://files.pythonhosted.org/packages/f6/74/86a07f1d0f42998ca31312f998bd3b9a7eff7f52378f4f270c8679c77fb9/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:81ff63371a7ebd6e6451970684f916be2eab07321b73c9d244dc2b4da7f73b88", size = 39254836 }, + { url = "https://files.pythonhosted.org/packages/2a/a2/8cee5da30d13430e87bf99bb33455d2724d0a4a9cb5d7926d80ccb96d008/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:adccd7161ace7261e01bb91e44e88da350895c270d23f744f0820c818b7229e7", size = 38386204 }, + { url = "https://files.pythonhosted.org/packages/ed/d7/34f02dad2e30c31b10a51f6b04e025e5dd60e5f936af9045a9b858a05383/nvidia_nvjitlink_cu12-12.8.93-py3-none-win_amd64.whl", hash = "sha256:bd93fbeeee850917903583587f4fc3a4eafa022e34572251368238ab5e6bd67f", size = 268553710 }, ] [[package]] @@ -2277,7 +2276,7 @@ name = "nvidia-nvshmem-cu12" version = "3.3.20" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/6c/99acb2f9eb85c29fc6f3a7ac4dccfd992e22666dd08a642b303311326a97/nvidia_nvshmem_cu12-3.3.20-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d00f26d3f9b2e3c3065be895e3059d6479ea5c638a3f38c9fec49b1b9dd7c1e5", size = 124657145, upload-time = "2025-08-04T20:25:19.995Z" }, + { url = "https://files.pythonhosted.org/packages/3b/6c/99acb2f9eb85c29fc6f3a7ac4dccfd992e22666dd08a642b303311326a97/nvidia_nvshmem_cu12-3.3.20-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d00f26d3f9b2e3c3065be895e3059d6479ea5c638a3f38c9fec49b1b9dd7c1e5", size = 124657145 }, ] [[package]] @@ -2285,7 +2284,7 @@ name = "nvidia-nvtx-cu12" version = "12.8.90" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/eb/86626c1bbc2edb86323022371c39aa48df6fd8b0a1647bc274577f72e90b/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b17e2001cc0d751a5bc2c6ec6d26ad95913324a4adb86788c944f8ce9ba441f", size = 89954, upload-time = "2025-03-07T01:42:44.131Z" }, + { url = "https://files.pythonhosted.org/packages/a2/eb/86626c1bbc2edb86323022371c39aa48df6fd8b0a1647bc274577f72e90b/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b17e2001cc0d751a5bc2c6ec6d26ad95913324a4adb86788c944f8ce9ba441f", size = 89954 }, ] [[package]] @@ -2296,9 +2295,9 @@ dependencies = [ { name = "antlr4-python3-runtime" }, { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/09/48/6388f1bb9da707110532cb70ec4d2822858ddfb44f1cdf1233c20a80ea4b/omegaconf-2.3.0.tar.gz", hash = "sha256:d5d4b6d29955cc50ad50c46dc269bcd92c6e00f5f90d23ab5fee7bfca4ba4cc7", size = 3298120, upload-time = "2022-12-08T20:59:22.753Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/48/6388f1bb9da707110532cb70ec4d2822858ddfb44f1cdf1233c20a80ea4b/omegaconf-2.3.0.tar.gz", hash = "sha256:d5d4b6d29955cc50ad50c46dc269bcd92c6e00f5f90d23ab5fee7bfca4ba4cc7", size = 3298120 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/94/1843518e420fa3ed6919835845df698c7e27e183cb997394e4a670973a65/omegaconf-2.3.0-py3-none-any.whl", hash = "sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b", size = 79500, upload-time = "2022-12-08T20:59:19.686Z" }, + { url = "https://files.pythonhosted.org/packages/e3/94/1843518e420fa3ed6919835845df698c7e27e183cb997394e4a670973a65/omegaconf-2.3.0-py3-none-any.whl", hash = "sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b", size = 79500 }, ] [[package]] @@ -2309,9 +2308,9 @@ dependencies = [ { name = "importlib-metadata" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/0b/e5428c009d4d9af0515b0a8371a8aaae695371af291f45e702f7969dce6b/opentelemetry_api-1.39.0.tar.gz", hash = "sha256:6130644268c5ac6bdffaf660ce878f10906b3e789f7e2daa5e169b047a2933b9", size = 65763, upload-time = "2025-12-03T13:19:56.378Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/0b/e5428c009d4d9af0515b0a8371a8aaae695371af291f45e702f7969dce6b/opentelemetry_api-1.39.0.tar.gz", hash = "sha256:6130644268c5ac6bdffaf660ce878f10906b3e789f7e2daa5e169b047a2933b9", size = 65763 } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/85/d831a9bc0a9e0e1a304ff3d12c1489a5fbc9bf6690a15dcbdae372bbca45/opentelemetry_api-1.39.0-py3-none-any.whl", hash = "sha256:3c3b3ca5c5687b1b5b37e5c5027ff68eacea8675241b29f13110a8ffbb8f0459", size = 66357, upload-time = "2025-12-03T13:19:33.043Z" }, + { url = "https://files.pythonhosted.org/packages/05/85/d831a9bc0a9e0e1a304ff3d12c1489a5fbc9bf6690a15dcbdae372bbca45/opentelemetry_api-1.39.0-py3-none-any.whl", hash = "sha256:3c3b3ca5c5687b1b5b37e5c5027ff68eacea8675241b29f13110a8ffbb8f0459", size = 66357 }, ] [[package]] @@ -2321,9 +2320,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/48/b5/64d2f8c3393cd13ea2092106118f7b98461ba09333d40179a31444c6f176/opentelemetry_proto-1.39.0.tar.gz", hash = "sha256:c1fa48678ad1a1624258698e59be73f990b7fc1f39e73e16a9d08eef65dd838c", size = 46153, upload-time = "2025-12-03T13:20:08.729Z" } +sdist = { url = "https://files.pythonhosted.org/packages/48/b5/64d2f8c3393cd13ea2092106118f7b98461ba09333d40179a31444c6f176/opentelemetry_proto-1.39.0.tar.gz", hash = "sha256:c1fa48678ad1a1624258698e59be73f990b7fc1f39e73e16a9d08eef65dd838c", size = 46153 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/4d/d500e1862beed68318705732d1976c390f4a72ca8009c4983ff627acff20/opentelemetry_proto-1.39.0-py3-none-any.whl", hash = "sha256:1e086552ac79acb501485ff0ce75533f70f3382d43d0a30728eeee594f7bf818", size = 72534, upload-time = "2025-12-03T13:19:50.251Z" }, + { url = "https://files.pythonhosted.org/packages/e3/4d/d500e1862beed68318705732d1976c390f4a72ca8009c4983ff627acff20/opentelemetry_proto-1.39.0-py3-none-any.whl", hash = "sha256:1e086552ac79acb501485ff0ce75533f70f3382d43d0a30728eeee594f7bf818", size = 72534 }, ] [[package]] @@ -2335,9 +2334,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/51/e3/7cd989003e7cde72e0becfe830abff0df55c69d237ee7961a541e0167833/opentelemetry_sdk-1.39.0.tar.gz", hash = "sha256:c22204f12a0529e07aa4d985f1bca9d6b0e7b29fe7f03e923548ae52e0e15dde", size = 171322, upload-time = "2025-12-03T13:20:09.651Z" } +sdist = { url = "https://files.pythonhosted.org/packages/51/e3/7cd989003e7cde72e0becfe830abff0df55c69d237ee7961a541e0167833/opentelemetry_sdk-1.39.0.tar.gz", hash = "sha256:c22204f12a0529e07aa4d985f1bca9d6b0e7b29fe7f03e923548ae52e0e15dde", size = 171322 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/b4/2adc8bc83eb1055ecb592708efb6f0c520cc2eb68970b02b0f6ecda149cf/opentelemetry_sdk-1.39.0-py3-none-any.whl", hash = "sha256:90cfb07600dfc0d2de26120cebc0c8f27e69bf77cd80ef96645232372709a514", size = 132413, upload-time = "2025-12-03T13:19:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b4/2adc8bc83eb1055ecb592708efb6f0c520cc2eb68970b02b0f6ecda149cf/opentelemetry_sdk-1.39.0-py3-none-any.whl", hash = "sha256:90cfb07600dfc0d2de26120cebc0c8f27e69bf77cd80ef96645232372709a514", size = 132413 }, ] [[package]] @@ -2348,18 +2347,18 @@ dependencies = [ { name = "opentelemetry-api" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/0e/176a7844fe4e3cb5de604212094dffaed4e18b32f1c56b5258bcbcba85c2/opentelemetry_semantic_conventions-0.60b0.tar.gz", hash = "sha256:227d7aa73cbb8a2e418029d6b6465553aa01cf7e78ec9d0bc3255c7b3ac5bf8f", size = 137935, upload-time = "2025-12-03T13:20:12.395Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/0e/176a7844fe4e3cb5de604212094dffaed4e18b32f1c56b5258bcbcba85c2/opentelemetry_semantic_conventions-0.60b0.tar.gz", hash = "sha256:227d7aa73cbb8a2e418029d6b6465553aa01cf7e78ec9d0bc3255c7b3ac5bf8f", size = 137935 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/56/af0306666f91bae47db14d620775604688361f0f76a872e0005277311131/opentelemetry_semantic_conventions-0.60b0-py3-none-any.whl", hash = "sha256:069530852691136018087b52688857d97bba61cd641d0f8628d2d92788c4f78a", size = 219981, upload-time = "2025-12-03T13:19:53.585Z" }, + { url = "https://files.pythonhosted.org/packages/d0/56/af0306666f91bae47db14d620775604688361f0f76a872e0005277311131/opentelemetry_semantic_conventions-0.60b0-py3-none-any.whl", hash = "sha256:069530852691136018087b52688857d97bba61cd641d0f8628d2d92788c4f78a", size = 219981 }, ] [[package]] name = "opt-einsum" version = "3.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/b9/2ac072041e899a52f20cf9510850ff58295003aa75525e58343591b0cbfb/opt_einsum-3.4.0.tar.gz", hash = "sha256:96ca72f1b886d148241348783498194c577fa30a8faac108586b14f1ba4473ac", size = 63004, upload-time = "2024-09-26T14:33:24.483Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/b9/2ac072041e899a52f20cf9510850ff58295003aa75525e58343591b0cbfb/opt_einsum-3.4.0.tar.gz", hash = "sha256:96ca72f1b886d148241348783498194c577fa30a8faac108586b14f1ba4473ac", size = 63004 } wheels = [ - { url = "https://files.pythonhosted.org/packages/23/cd/066e86230ae37ed0be70aae89aabf03ca8d9f39c8aea0dec8029455b5540/opt_einsum-3.4.0-py3-none-any.whl", hash = "sha256:69bb92469f86a1565195ece4ac0323943e83477171b91d24c35afe028a90d7cd", size = 71932, upload-time = "2024-09-26T14:33:23.039Z" }, + { url = "https://files.pythonhosted.org/packages/23/cd/066e86230ae37ed0be70aae89aabf03ca8d9f39c8aea0dec8029455b5540/opt_einsum-3.4.0-py3-none-any.whl", hash = "sha256:69bb92469f86a1565195ece4ac0323943e83477171b91d24c35afe028a90d7cd", size = 71932 }, ] [[package]] @@ -2378,9 +2377,9 @@ dependencies = [ { name = "sqlalchemy" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/6a/5204699dd9c2b4b3062e3c77469058d7d96e59946e67353a80f0103ff353/optuna-2.10.1.tar.gz", hash = "sha256:8a12009b57757c1070b3bff2261c24824d6430c22926dd1e2ace33b3deff555f", size = 224093, upload-time = "2022-06-13T05:39:22.48Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/6a/5204699dd9c2b4b3062e3c77469058d7d96e59946e67353a80f0103ff353/optuna-2.10.1.tar.gz", hash = "sha256:8a12009b57757c1070b3bff2261c24824d6430c22926dd1e2ace33b3deff555f", size = 224093 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/6c/c6fab7d673d9d12144b8fe7155e15aeb06798b029ecfece4b193ffc859e6/optuna-2.10.1-py3-none-any.whl", hash = "sha256:d57dc1fd911d20697098bdef83836f8078387a3be2a5400ab26db15f8341e1f4", size = 308237, upload-time = "2022-06-13T05:39:20.176Z" }, + { url = "https://files.pythonhosted.org/packages/9f/6c/c6fab7d673d9d12144b8fe7155e15aeb06798b029ecfece4b193ffc859e6/optuna-2.10.1-py3-none-any.whl", hash = "sha256:d57dc1fd911d20697098bdef83836f8078387a3be2a5400ab26db15f8341e1f4", size = 308237 }, ] [[package]] @@ -2403,27 +2402,27 @@ dependencies = [ { name = "tensorstore" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/0a/6cda861d9a9df6681126128c4f8417b692c4e8fbff2d9f643fc99c716679/orbax_checkpoint-0.11.30.tar.gz", hash = "sha256:5395e9fc80b750ee3644ee19f969923c7e3c83369133da5ea256a86d9bb838a6", size = 402806, upload-time = "2025-11-27T07:58:52.736Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/0a/6cda861d9a9df6681126128c4f8417b692c4e8fbff2d9f643fc99c716679/orbax_checkpoint-0.11.30.tar.gz", hash = "sha256:5395e9fc80b750ee3644ee19f969923c7e3c83369133da5ea256a86d9bb838a6", size = 402806 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f5/2c/cbfb3e42fbfc2c6b5615d07b922ecb6113f1b0ec8fc4ffd0f370d8cbd82b/orbax_checkpoint-0.11.30-py3-none-any.whl", hash = "sha256:56b15d07af7a4ff655f18d219de850d86944b1552e5143e81f5b15480f240a46", size = 598455, upload-time = "2025-11-27T07:58:51.548Z" }, + { url = "https://files.pythonhosted.org/packages/f5/2c/cbfb3e42fbfc2c6b5615d07b922ecb6113f1b0ec8fc4ffd0f370d8cbd82b/orbax_checkpoint-0.11.30-py3-none-any.whl", hash = "sha256:56b15d07af7a4ff655f18d219de850d86944b1552e5143e81f5b15480f240a46", size = 598455 }, ] [[package]] name = "ordered-set" version = "4.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/ca/bfac8bc689799bcca4157e0e0ced07e70ce125193fc2e166d2e685b7e2fe/ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8", size = 12826, upload-time = "2022-01-26T14:38:56.6Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/ca/bfac8bc689799bcca4157e0e0ced07e70ce125193fc2e166d2e685b7e2fe/ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8", size = 12826 } wheels = [ - { url = "https://files.pythonhosted.org/packages/33/55/af02708f230eb77084a299d7b08175cff006dea4f2721074b92cdb0296c0/ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562", size = 7634, upload-time = "2022-01-26T14:38:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/33/55/af02708f230eb77084a299d7b08175cff006dea4f2721074b92cdb0296c0/ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562", size = 7634 }, ] [[package]] name = "packaging" version = "25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 }, ] [[package]] @@ -2436,41 +2435,41 @@ dependencies = [ { name = "pytz" }, { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, - { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, - { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, - { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, - { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, - { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, - { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, - { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671, upload-time = "2025-09-29T23:21:05.024Z" }, - { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807, upload-time = "2025-09-29T23:21:15.979Z" }, - { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872, upload-time = "2025-09-29T23:21:27.165Z" }, - { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371, upload-time = "2025-09-29T23:21:40.532Z" }, - { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333, upload-time = "2025-09-29T23:21:55.77Z" }, - { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120, upload-time = "2025-09-29T23:22:10.109Z" }, - { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991, upload-time = "2025-09-29T23:25:04.889Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227, upload-time = "2025-09-29T23:22:24.343Z" }, - { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056, upload-time = "2025-09-29T23:22:37.762Z" }, - { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189, upload-time = "2025-09-29T23:22:51.688Z" }, - { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912, upload-time = "2025-09-29T23:23:05.042Z" }, - { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160, upload-time = "2025-09-29T23:23:28.57Z" }, - { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233, upload-time = "2025-09-29T23:24:24.876Z" }, - { url = "https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0", size = 11540635, upload-time = "2025-09-29T23:25:52.486Z" }, - { url = "https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593", size = 10759079, upload-time = "2025-09-29T23:26:33.204Z" }, - { url = "https://files.pythonhosted.org/packages/ca/05/d01ef80a7a3a12b2f8bbf16daba1e17c98a2f039cbc8e2f77a2c5a63d382/pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c", size = 11814049, upload-time = "2025-09-29T23:27:15.384Z" }, - { url = "https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b", size = 12332638, upload-time = "2025-09-29T23:27:51.625Z" }, - { url = "https://files.pythonhosted.org/packages/c5/33/dd70400631b62b9b29c3c93d2feee1d0964dc2bae2e5ad7a6c73a7f25325/pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6", size = 12886834, upload-time = "2025-09-29T23:28:21.289Z" }, - { url = "https://files.pythonhosted.org/packages/d3/18/b5d48f55821228d0d2692b34fd5034bb185e854bdb592e9c640f6290e012/pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3", size = 13409925, upload-time = "2025-09-29T23:28:58.261Z" }, - { url = "https://files.pythonhosted.org/packages/a6/3d/124ac75fcd0ecc09b8fdccb0246ef65e35b012030defb0e0eba2cbbbe948/pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5", size = 11109071, upload-time = "2025-09-29T23:32:27.484Z" }, - { url = "https://files.pythonhosted.org/packages/89/9c/0e21c895c38a157e0faa1fb64587a9226d6dd46452cac4532d80c3c4a244/pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec", size = 12048504, upload-time = "2025-09-29T23:29:31.47Z" }, - { url = "https://files.pythonhosted.org/packages/d7/82/b69a1c95df796858777b68fbe6a81d37443a33319761d7c652ce77797475/pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7", size = 11410702, upload-time = "2025-09-29T23:29:54.591Z" }, - { url = "https://files.pythonhosted.org/packages/f9/88/702bde3ba0a94b8c73a0181e05144b10f13f29ebfc2150c3a79062a8195d/pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450", size = 11634535, upload-time = "2025-09-29T23:30:21.003Z" }, - { url = "https://files.pythonhosted.org/packages/a4/1e/1bac1a839d12e6a82ec6cb40cda2edde64a2013a66963293696bbf31fbbb/pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5", size = 12121582, upload-time = "2025-09-29T23:30:43.391Z" }, - { url = "https://files.pythonhosted.org/packages/44/91/483de934193e12a3b1d6ae7c8645d083ff88dec75f46e827562f1e4b4da6/pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788", size = 12699963, upload-time = "2025-09-29T23:31:10.009Z" }, - { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175, upload-time = "2025-09-29T23:31:59.173Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846 }, + { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618 }, + { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212 }, + { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693 }, + { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002 }, + { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971 }, + { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722 }, + { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671 }, + { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807 }, + { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872 }, + { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371 }, + { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333 }, + { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120 }, + { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991 }, + { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227 }, + { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056 }, + { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189 }, + { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912 }, + { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160 }, + { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233 }, + { url = "https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0", size = 11540635 }, + { url = "https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593", size = 10759079 }, + { url = "https://files.pythonhosted.org/packages/ca/05/d01ef80a7a3a12b2f8bbf16daba1e17c98a2f039cbc8e2f77a2c5a63d382/pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c", size = 11814049 }, + { url = "https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b", size = 12332638 }, + { url = "https://files.pythonhosted.org/packages/c5/33/dd70400631b62b9b29c3c93d2feee1d0964dc2bae2e5ad7a6c73a7f25325/pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6", size = 12886834 }, + { url = "https://files.pythonhosted.org/packages/d3/18/b5d48f55821228d0d2692b34fd5034bb185e854bdb592e9c640f6290e012/pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3", size = 13409925 }, + { url = "https://files.pythonhosted.org/packages/a6/3d/124ac75fcd0ecc09b8fdccb0246ef65e35b012030defb0e0eba2cbbbe948/pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5", size = 11109071 }, + { url = "https://files.pythonhosted.org/packages/89/9c/0e21c895c38a157e0faa1fb64587a9226d6dd46452cac4532d80c3c4a244/pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec", size = 12048504 }, + { url = "https://files.pythonhosted.org/packages/d7/82/b69a1c95df796858777b68fbe6a81d37443a33319761d7c652ce77797475/pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7", size = 11410702 }, + { url = "https://files.pythonhosted.org/packages/f9/88/702bde3ba0a94b8c73a0181e05144b10f13f29ebfc2150c3a79062a8195d/pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450", size = 11634535 }, + { url = "https://files.pythonhosted.org/packages/a4/1e/1bac1a839d12e6a82ec6cb40cda2edde64a2013a66963293696bbf31fbbb/pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5", size = 12121582 }, + { url = "https://files.pythonhosted.org/packages/44/91/483de934193e12a3b1d6ae7c8645d083ff88dec75f46e827562f1e4b4da6/pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788", size = 12699963 }, + { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175 }, ] [[package]] @@ -2485,87 +2484,87 @@ dependencies = [ { name = "treescope" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/d0/f7112dfe442a9cf2e0739bf8dfdf9c38b9dcc2d8a803d2c5d90913e22da1/penzai-0.2.5.tar.gz", hash = "sha256:4966bdd0eb9b34564be22c6197fbb84178e4a885e04a5678f6b6816d7cbc9387", size = 912243, upload-time = "2025-04-08T05:43:59.985Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/d0/f7112dfe442a9cf2e0739bf8dfdf9c38b9dcc2d8a803d2c5d90913e22da1/penzai-0.2.5.tar.gz", hash = "sha256:4966bdd0eb9b34564be22c6197fbb84178e4a885e04a5678f6b6816d7cbc9387", size = 912243 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/22/2ab6b762b58e37e7a4004a504eff9f975d96f755cb1256ab422194312bbc/penzai-0.2.5-py3-none-any.whl", hash = "sha256:3b2823b7403c37cfb033ccb27a1b81d8e05df02fb16f246344e962427b2ac43a", size = 317886, upload-time = "2025-04-08T05:43:58.269Z" }, + { url = "https://files.pythonhosted.org/packages/a2/22/2ab6b762b58e37e7a4004a504eff9f975d96f755cb1256ab422194312bbc/penzai-0.2.5-py3-none-any.whl", hash = "sha256:3b2823b7403c37cfb033ccb27a1b81d8e05df02fb16f246344e962427b2ac43a", size = 317886 }, ] [[package]] name = "pillow" version = "12.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828, upload-time = "2025-10-15T18:24:14.008Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377, upload-time = "2025-10-15T18:22:05.993Z" }, - { url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343, upload-time = "2025-10-15T18:22:07.718Z" }, - { url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981, upload-time = "2025-10-15T18:22:09.287Z" }, - { url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399, upload-time = "2025-10-15T18:22:10.872Z" }, - { url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740, upload-time = "2025-10-15T18:22:12.769Z" }, - { url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201, upload-time = "2025-10-15T18:22:14.813Z" }, - { url = "https://files.pythonhosted.org/packages/dc/4d/435c8ac688c54d11755aedfdd9f29c9eeddf68d150fe42d1d3dbd2365149/pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79", size = 6462334, upload-time = "2025-10-15T18:22:16.375Z" }, - { url = "https://files.pythonhosted.org/packages/2b/f2/ad34167a8059a59b8ad10bc5c72d4d9b35acc6b7c0877af8ac885b5f2044/pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba", size = 7134162, upload-time = "2025-10-15T18:22:17.996Z" }, - { url = "https://files.pythonhosted.org/packages/0c/b1/a7391df6adacf0a5c2cf6ac1cf1fcc1369e7d439d28f637a847f8803beb3/pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0", size = 6298769, upload-time = "2025-10-15T18:22:19.923Z" }, - { url = "https://files.pythonhosted.org/packages/a2/0b/d87733741526541c909bbf159e338dcace4f982daac6e5a8d6be225ca32d/pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a", size = 7001107, upload-time = "2025-10-15T18:22:21.644Z" }, - { url = "https://files.pythonhosted.org/packages/bc/96/aaa61ce33cc98421fb6088af2a03be4157b1e7e0e87087c888e2370a7f45/pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad", size = 2436012, upload-time = "2025-10-15T18:22:23.621Z" }, - { url = "https://files.pythonhosted.org/packages/62/f2/de993bb2d21b33a98d031ecf6a978e4b61da207bef02f7b43093774c480d/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643", size = 4045493, upload-time = "2025-10-15T18:22:25.758Z" }, - { url = "https://files.pythonhosted.org/packages/0e/b6/bc8d0c4c9f6f111a783d045310945deb769b806d7574764234ffd50bc5ea/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4", size = 4120461, upload-time = "2025-10-15T18:22:27.286Z" }, - { url = "https://files.pythonhosted.org/packages/5d/57/d60d343709366a353dc56adb4ee1e7d8a2cc34e3fbc22905f4167cfec119/pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399", size = 3576912, upload-time = "2025-10-15T18:22:28.751Z" }, - { url = "https://files.pythonhosted.org/packages/a4/a4/a0a31467e3f83b94d37568294b01d22b43ae3c5d85f2811769b9c66389dd/pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5", size = 5249132, upload-time = "2025-10-15T18:22:30.641Z" }, - { url = "https://files.pythonhosted.org/packages/83/06/48eab21dd561de2914242711434c0c0eb992ed08ff3f6107a5f44527f5e9/pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b", size = 4650099, upload-time = "2025-10-15T18:22:32.73Z" }, - { url = "https://files.pythonhosted.org/packages/fc/bd/69ed99fd46a8dba7c1887156d3572fe4484e3f031405fcc5a92e31c04035/pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3", size = 6230808, upload-time = "2025-10-15T18:22:34.337Z" }, - { url = "https://files.pythonhosted.org/packages/ea/94/8fad659bcdbf86ed70099cb60ae40be6acca434bbc8c4c0d4ef356d7e0de/pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07", size = 8037804, upload-time = "2025-10-15T18:22:36.402Z" }, - { url = "https://files.pythonhosted.org/packages/20/39/c685d05c06deecfd4e2d1950e9a908aa2ca8bc4e6c3b12d93b9cafbd7837/pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e", size = 6345553, upload-time = "2025-10-15T18:22:38.066Z" }, - { url = "https://files.pythonhosted.org/packages/38/57/755dbd06530a27a5ed74f8cb0a7a44a21722ebf318edbe67ddbd7fb28f88/pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344", size = 7037729, upload-time = "2025-10-15T18:22:39.769Z" }, - { url = "https://files.pythonhosted.org/packages/ca/b6/7e94f4c41d238615674d06ed677c14883103dce1c52e4af16f000338cfd7/pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27", size = 6459789, upload-time = "2025-10-15T18:22:41.437Z" }, - { url = "https://files.pythonhosted.org/packages/9c/14/4448bb0b5e0f22dd865290536d20ec8a23b64e2d04280b89139f09a36bb6/pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79", size = 7130917, upload-time = "2025-10-15T18:22:43.152Z" }, - { url = "https://files.pythonhosted.org/packages/dd/ca/16c6926cc1c015845745d5c16c9358e24282f1e588237a4c36d2b30f182f/pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098", size = 6302391, upload-time = "2025-10-15T18:22:44.753Z" }, - { url = "https://files.pythonhosted.org/packages/6d/2a/dd43dcfd6dae9b6a49ee28a8eedb98c7d5ff2de94a5d834565164667b97b/pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905", size = 7007477, upload-time = "2025-10-15T18:22:46.838Z" }, - { url = "https://files.pythonhosted.org/packages/77/f0/72ea067f4b5ae5ead653053212af05ce3705807906ba3f3e8f58ddf617e6/pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a", size = 2435918, upload-time = "2025-10-15T18:22:48.399Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5e/9046b423735c21f0487ea6cb5b10f89ea8f8dfbe32576fe052b5ba9d4e5b/pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3", size = 5251406, upload-time = "2025-10-15T18:22:49.905Z" }, - { url = "https://files.pythonhosted.org/packages/12/66/982ceebcdb13c97270ef7a56c3969635b4ee7cd45227fa707c94719229c5/pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced", size = 4653218, upload-time = "2025-10-15T18:22:51.587Z" }, - { url = "https://files.pythonhosted.org/packages/16/b3/81e625524688c31859450119bf12674619429cab3119eec0e30a7a1029cb/pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b", size = 6266564, upload-time = "2025-10-15T18:22:53.215Z" }, - { url = "https://files.pythonhosted.org/packages/98/59/dfb38f2a41240d2408096e1a76c671d0a105a4a8471b1871c6902719450c/pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d", size = 8069260, upload-time = "2025-10-15T18:22:54.933Z" }, - { url = "https://files.pythonhosted.org/packages/dc/3d/378dbea5cd1874b94c312425ca77b0f47776c78e0df2df751b820c8c1d6c/pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a", size = 6379248, upload-time = "2025-10-15T18:22:56.605Z" }, - { url = "https://files.pythonhosted.org/packages/84/b0/d525ef47d71590f1621510327acec75ae58c721dc071b17d8d652ca494d8/pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe", size = 7066043, upload-time = "2025-10-15T18:22:58.53Z" }, - { url = "https://files.pythonhosted.org/packages/61/2c/aced60e9cf9d0cde341d54bf7932c9ffc33ddb4a1595798b3a5150c7ec4e/pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee", size = 6490915, upload-time = "2025-10-15T18:23:00.582Z" }, - { url = "https://files.pythonhosted.org/packages/ef/26/69dcb9b91f4e59f8f34b2332a4a0a951b44f547c4ed39d3e4dcfcff48f89/pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef", size = 7157998, upload-time = "2025-10-15T18:23:02.627Z" }, - { url = "https://files.pythonhosted.org/packages/61/2b/726235842220ca95fa441ddf55dd2382b52ab5b8d9c0596fe6b3f23dafe8/pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9", size = 6306201, upload-time = "2025-10-15T18:23:04.709Z" }, - { url = "https://files.pythonhosted.org/packages/c0/3d/2afaf4e840b2df71344ababf2f8edd75a705ce500e5dc1e7227808312ae1/pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b", size = 7013165, upload-time = "2025-10-15T18:23:06.46Z" }, - { url = "https://files.pythonhosted.org/packages/6f/75/3fa09aa5cf6ed04bee3fa575798ddf1ce0bace8edb47249c798077a81f7f/pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47", size = 2437834, upload-time = "2025-10-15T18:23:08.194Z" }, - { url = "https://files.pythonhosted.org/packages/54/2a/9a8c6ba2c2c07b71bec92cf63e03370ca5e5f5c5b119b742bcc0cde3f9c5/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:beeae3f27f62308f1ddbcfb0690bf44b10732f2ef43758f169d5e9303165d3f9", size = 4045531, upload-time = "2025-10-15T18:23:10.121Z" }, - { url = "https://files.pythonhosted.org/packages/84/54/836fdbf1bfb3d66a59f0189ff0b9f5f666cee09c6188309300df04ad71fa/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d4827615da15cd59784ce39d3388275ec093ae3ee8d7f0c089b76fa87af756c2", size = 4120554, upload-time = "2025-10-15T18:23:12.14Z" }, - { url = "https://files.pythonhosted.org/packages/0d/cd/16aec9f0da4793e98e6b54778a5fbce4f375c6646fe662e80600b8797379/pillow-12.0.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:3e42edad50b6909089750e65c91aa09aaf1e0a71310d383f11321b27c224ed8a", size = 3576812, upload-time = "2025-10-15T18:23:13.962Z" }, - { url = "https://files.pythonhosted.org/packages/f6/b7/13957fda356dc46339298b351cae0d327704986337c3c69bb54628c88155/pillow-12.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e5d8efac84c9afcb40914ab49ba063d94f5dbdf5066db4482c66a992f47a3a3b", size = 5252689, upload-time = "2025-10-15T18:23:15.562Z" }, - { url = "https://files.pythonhosted.org/packages/fc/f5/eae31a306341d8f331f43edb2e9122c7661b975433de5e447939ae61c5da/pillow-12.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:266cd5f2b63ff316d5a1bba46268e603c9caf5606d44f38c2873c380950576ad", size = 4650186, upload-time = "2025-10-15T18:23:17.379Z" }, - { url = "https://files.pythonhosted.org/packages/86/62/2a88339aa40c4c77e79108facbd307d6091e2c0eb5b8d3cf4977cfca2fe6/pillow-12.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58eea5ebe51504057dd95c5b77d21700b77615ab0243d8152793dc00eb4faf01", size = 6230308, upload-time = "2025-10-15T18:23:18.971Z" }, - { url = "https://files.pythonhosted.org/packages/c7/33/5425a8992bcb32d1cb9fa3dd39a89e613d09a22f2c8083b7bf43c455f760/pillow-12.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13711b1a5ba512d647a0e4ba79280d3a9a045aaf7e0cc6fbe96b91d4cdf6b0c", size = 8039222, upload-time = "2025-10-15T18:23:20.909Z" }, - { url = "https://files.pythonhosted.org/packages/d8/61/3f5d3b35c5728f37953d3eec5b5f3e77111949523bd2dd7f31a851e50690/pillow-12.0.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6846bd2d116ff42cba6b646edf5bf61d37e5cbd256425fa089fee4ff5c07a99e", size = 6346657, upload-time = "2025-10-15T18:23:23.077Z" }, - { url = "https://files.pythonhosted.org/packages/3a/be/ee90a3d79271227e0f0a33c453531efd6ed14b2e708596ba5dd9be948da3/pillow-12.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c98fa880d695de164b4135a52fd2e9cd7b7c90a9d8ac5e9e443a24a95ef9248e", size = 7038482, upload-time = "2025-10-15T18:23:25.005Z" }, - { url = "https://files.pythonhosted.org/packages/44/34/a16b6a4d1ad727de390e9bd9f19f5f669e079e5826ec0f329010ddea492f/pillow-12.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa3ed2a29a9e9d2d488b4da81dcb54720ac3104a20bf0bd273f1e4648aff5af9", size = 6461416, upload-time = "2025-10-15T18:23:27.009Z" }, - { url = "https://files.pythonhosted.org/packages/b6/39/1aa5850d2ade7d7ba9f54e4e4c17077244ff7a2d9e25998c38a29749eb3f/pillow-12.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d034140032870024e6b9892c692fe2968493790dd57208b2c37e3fb35f6df3ab", size = 7131584, upload-time = "2025-10-15T18:23:29.752Z" }, - { url = "https://files.pythonhosted.org/packages/bf/db/4fae862f8fad0167073a7733973bfa955f47e2cac3dc3e3e6257d10fab4a/pillow-12.0.0-cp314-cp314-win32.whl", hash = "sha256:1b1b133e6e16105f524a8dec491e0586d072948ce15c9b914e41cdadd209052b", size = 6400621, upload-time = "2025-10-15T18:23:32.06Z" }, - { url = "https://files.pythonhosted.org/packages/2b/24/b350c31543fb0107ab2599464d7e28e6f856027aadda995022e695313d94/pillow-12.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8dc232e39d409036af549c86f24aed8273a40ffa459981146829a324e0848b4b", size = 7142916, upload-time = "2025-10-15T18:23:34.71Z" }, - { url = "https://files.pythonhosted.org/packages/0f/9b/0ba5a6fd9351793996ef7487c4fdbde8d3f5f75dbedc093bb598648fddf0/pillow-12.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:d52610d51e265a51518692045e372a4c363056130d922a7351429ac9f27e70b0", size = 2523836, upload-time = "2025-10-15T18:23:36.967Z" }, - { url = "https://files.pythonhosted.org/packages/f5/7a/ceee0840aebc579af529b523d530840338ecf63992395842e54edc805987/pillow-12.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1979f4566bb96c1e50a62d9831e2ea2d1211761e5662afc545fa766f996632f6", size = 5255092, upload-time = "2025-10-15T18:23:38.573Z" }, - { url = "https://files.pythonhosted.org/packages/44/76/20776057b4bfd1aef4eeca992ebde0f53a4dce874f3ae693d0ec90a4f79b/pillow-12.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b2e4b27a6e15b04832fe9bf292b94b5ca156016bbc1ea9c2c20098a0320d6cf6", size = 4653158, upload-time = "2025-10-15T18:23:40.238Z" }, - { url = "https://files.pythonhosted.org/packages/82/3f/d9ff92ace07be8836b4e7e87e6a4c7a8318d47c2f1463ffcf121fc57d9cb/pillow-12.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb3096c30df99fd01c7bf8e544f392103d0795b9f98ba71a8054bcbf56b255f1", size = 6267882, upload-time = "2025-10-15T18:23:42.434Z" }, - { url = "https://files.pythonhosted.org/packages/9f/7a/4f7ff87f00d3ad33ba21af78bfcd2f032107710baf8280e3722ceec28cda/pillow-12.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7438839e9e053ef79f7112c881cef684013855016f928b168b81ed5835f3e75e", size = 8071001, upload-time = "2025-10-15T18:23:44.29Z" }, - { url = "https://files.pythonhosted.org/packages/75/87/fcea108944a52dad8cca0715ae6247e271eb80459364a98518f1e4f480c1/pillow-12.0.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d5c411a8eaa2299322b647cd932586b1427367fd3184ffbb8f7a219ea2041ca", size = 6380146, upload-time = "2025-10-15T18:23:46.065Z" }, - { url = "https://files.pythonhosted.org/packages/91/52/0d31b5e571ef5fd111d2978b84603fce26aba1b6092f28e941cb46570745/pillow-12.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7e091d464ac59d2c7ad8e7e08105eaf9dafbc3883fd7265ffccc2baad6ac925", size = 7067344, upload-time = "2025-10-15T18:23:47.898Z" }, - { url = "https://files.pythonhosted.org/packages/7b/f4/2dd3d721f875f928d48e83bb30a434dee75a2531bca839bb996bb0aa5a91/pillow-12.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:792a2c0be4dcc18af9d4a2dfd8a11a17d5e25274a1062b0ec1c2d79c76f3e7f8", size = 6491864, upload-time = "2025-10-15T18:23:49.607Z" }, - { url = "https://files.pythonhosted.org/packages/30/4b/667dfcf3d61fc309ba5a15b141845cece5915e39b99c1ceab0f34bf1d124/pillow-12.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:afbefa430092f71a9593a99ab6a4e7538bc9eabbf7bf94f91510d3503943edc4", size = 7158911, upload-time = "2025-10-15T18:23:51.351Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2f/16cabcc6426c32218ace36bf0d55955e813f2958afddbf1d391849fee9d1/pillow-12.0.0-cp314-cp314t-win32.whl", hash = "sha256:3830c769decf88f1289680a59d4f4c46c72573446352e2befec9a8512104fa52", size = 6408045, upload-time = "2025-10-15T18:23:53.177Z" }, - { url = "https://files.pythonhosted.org/packages/35/73/e29aa0c9c666cf787628d3f0dcf379f4791fba79f4936d02f8b37165bdf8/pillow-12.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:905b0365b210c73afb0ebe9101a32572152dfd1c144c7e28968a331b9217b94a", size = 7148282, upload-time = "2025-10-15T18:23:55.316Z" }, - { url = "https://files.pythonhosted.org/packages/c1/70/6b41bdcddf541b437bbb9f47f94d2db5d9ddef6c37ccab8c9107743748a4/pillow-12.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:99353a06902c2e43b43e8ff74ee65a7d90307d82370604746738a1e0661ccca7", size = 2525630, upload-time = "2025-10-15T18:23:57.149Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377 }, + { url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343 }, + { url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981 }, + { url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399 }, + { url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740 }, + { url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201 }, + { url = "https://files.pythonhosted.org/packages/dc/4d/435c8ac688c54d11755aedfdd9f29c9eeddf68d150fe42d1d3dbd2365149/pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79", size = 6462334 }, + { url = "https://files.pythonhosted.org/packages/2b/f2/ad34167a8059a59b8ad10bc5c72d4d9b35acc6b7c0877af8ac885b5f2044/pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba", size = 7134162 }, + { url = "https://files.pythonhosted.org/packages/0c/b1/a7391df6adacf0a5c2cf6ac1cf1fcc1369e7d439d28f637a847f8803beb3/pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0", size = 6298769 }, + { url = "https://files.pythonhosted.org/packages/a2/0b/d87733741526541c909bbf159e338dcace4f982daac6e5a8d6be225ca32d/pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a", size = 7001107 }, + { url = "https://files.pythonhosted.org/packages/bc/96/aaa61ce33cc98421fb6088af2a03be4157b1e7e0e87087c888e2370a7f45/pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad", size = 2436012 }, + { url = "https://files.pythonhosted.org/packages/62/f2/de993bb2d21b33a98d031ecf6a978e4b61da207bef02f7b43093774c480d/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643", size = 4045493 }, + { url = "https://files.pythonhosted.org/packages/0e/b6/bc8d0c4c9f6f111a783d045310945deb769b806d7574764234ffd50bc5ea/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4", size = 4120461 }, + { url = "https://files.pythonhosted.org/packages/5d/57/d60d343709366a353dc56adb4ee1e7d8a2cc34e3fbc22905f4167cfec119/pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399", size = 3576912 }, + { url = "https://files.pythonhosted.org/packages/a4/a4/a0a31467e3f83b94d37568294b01d22b43ae3c5d85f2811769b9c66389dd/pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5", size = 5249132 }, + { url = "https://files.pythonhosted.org/packages/83/06/48eab21dd561de2914242711434c0c0eb992ed08ff3f6107a5f44527f5e9/pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b", size = 4650099 }, + { url = "https://files.pythonhosted.org/packages/fc/bd/69ed99fd46a8dba7c1887156d3572fe4484e3f031405fcc5a92e31c04035/pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3", size = 6230808 }, + { url = "https://files.pythonhosted.org/packages/ea/94/8fad659bcdbf86ed70099cb60ae40be6acca434bbc8c4c0d4ef356d7e0de/pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07", size = 8037804 }, + { url = "https://files.pythonhosted.org/packages/20/39/c685d05c06deecfd4e2d1950e9a908aa2ca8bc4e6c3b12d93b9cafbd7837/pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e", size = 6345553 }, + { url = "https://files.pythonhosted.org/packages/38/57/755dbd06530a27a5ed74f8cb0a7a44a21722ebf318edbe67ddbd7fb28f88/pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344", size = 7037729 }, + { url = "https://files.pythonhosted.org/packages/ca/b6/7e94f4c41d238615674d06ed677c14883103dce1c52e4af16f000338cfd7/pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27", size = 6459789 }, + { url = "https://files.pythonhosted.org/packages/9c/14/4448bb0b5e0f22dd865290536d20ec8a23b64e2d04280b89139f09a36bb6/pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79", size = 7130917 }, + { url = "https://files.pythonhosted.org/packages/dd/ca/16c6926cc1c015845745d5c16c9358e24282f1e588237a4c36d2b30f182f/pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098", size = 6302391 }, + { url = "https://files.pythonhosted.org/packages/6d/2a/dd43dcfd6dae9b6a49ee28a8eedb98c7d5ff2de94a5d834565164667b97b/pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905", size = 7007477 }, + { url = "https://files.pythonhosted.org/packages/77/f0/72ea067f4b5ae5ead653053212af05ce3705807906ba3f3e8f58ddf617e6/pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a", size = 2435918 }, + { url = "https://files.pythonhosted.org/packages/f5/5e/9046b423735c21f0487ea6cb5b10f89ea8f8dfbe32576fe052b5ba9d4e5b/pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3", size = 5251406 }, + { url = "https://files.pythonhosted.org/packages/12/66/982ceebcdb13c97270ef7a56c3969635b4ee7cd45227fa707c94719229c5/pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced", size = 4653218 }, + { url = "https://files.pythonhosted.org/packages/16/b3/81e625524688c31859450119bf12674619429cab3119eec0e30a7a1029cb/pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b", size = 6266564 }, + { url = "https://files.pythonhosted.org/packages/98/59/dfb38f2a41240d2408096e1a76c671d0a105a4a8471b1871c6902719450c/pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d", size = 8069260 }, + { url = "https://files.pythonhosted.org/packages/dc/3d/378dbea5cd1874b94c312425ca77b0f47776c78e0df2df751b820c8c1d6c/pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a", size = 6379248 }, + { url = "https://files.pythonhosted.org/packages/84/b0/d525ef47d71590f1621510327acec75ae58c721dc071b17d8d652ca494d8/pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe", size = 7066043 }, + { url = "https://files.pythonhosted.org/packages/61/2c/aced60e9cf9d0cde341d54bf7932c9ffc33ddb4a1595798b3a5150c7ec4e/pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee", size = 6490915 }, + { url = "https://files.pythonhosted.org/packages/ef/26/69dcb9b91f4e59f8f34b2332a4a0a951b44f547c4ed39d3e4dcfcff48f89/pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef", size = 7157998 }, + { url = "https://files.pythonhosted.org/packages/61/2b/726235842220ca95fa441ddf55dd2382b52ab5b8d9c0596fe6b3f23dafe8/pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9", size = 6306201 }, + { url = "https://files.pythonhosted.org/packages/c0/3d/2afaf4e840b2df71344ababf2f8edd75a705ce500e5dc1e7227808312ae1/pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b", size = 7013165 }, + { url = "https://files.pythonhosted.org/packages/6f/75/3fa09aa5cf6ed04bee3fa575798ddf1ce0bace8edb47249c798077a81f7f/pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47", size = 2437834 }, + { url = "https://files.pythonhosted.org/packages/54/2a/9a8c6ba2c2c07b71bec92cf63e03370ca5e5f5c5b119b742bcc0cde3f9c5/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:beeae3f27f62308f1ddbcfb0690bf44b10732f2ef43758f169d5e9303165d3f9", size = 4045531 }, + { url = "https://files.pythonhosted.org/packages/84/54/836fdbf1bfb3d66a59f0189ff0b9f5f666cee09c6188309300df04ad71fa/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d4827615da15cd59784ce39d3388275ec093ae3ee8d7f0c089b76fa87af756c2", size = 4120554 }, + { url = "https://files.pythonhosted.org/packages/0d/cd/16aec9f0da4793e98e6b54778a5fbce4f375c6646fe662e80600b8797379/pillow-12.0.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:3e42edad50b6909089750e65c91aa09aaf1e0a71310d383f11321b27c224ed8a", size = 3576812 }, + { url = "https://files.pythonhosted.org/packages/f6/b7/13957fda356dc46339298b351cae0d327704986337c3c69bb54628c88155/pillow-12.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e5d8efac84c9afcb40914ab49ba063d94f5dbdf5066db4482c66a992f47a3a3b", size = 5252689 }, + { url = "https://files.pythonhosted.org/packages/fc/f5/eae31a306341d8f331f43edb2e9122c7661b975433de5e447939ae61c5da/pillow-12.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:266cd5f2b63ff316d5a1bba46268e603c9caf5606d44f38c2873c380950576ad", size = 4650186 }, + { url = "https://files.pythonhosted.org/packages/86/62/2a88339aa40c4c77e79108facbd307d6091e2c0eb5b8d3cf4977cfca2fe6/pillow-12.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58eea5ebe51504057dd95c5b77d21700b77615ab0243d8152793dc00eb4faf01", size = 6230308 }, + { url = "https://files.pythonhosted.org/packages/c7/33/5425a8992bcb32d1cb9fa3dd39a89e613d09a22f2c8083b7bf43c455f760/pillow-12.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13711b1a5ba512d647a0e4ba79280d3a9a045aaf7e0cc6fbe96b91d4cdf6b0c", size = 8039222 }, + { url = "https://files.pythonhosted.org/packages/d8/61/3f5d3b35c5728f37953d3eec5b5f3e77111949523bd2dd7f31a851e50690/pillow-12.0.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6846bd2d116ff42cba6b646edf5bf61d37e5cbd256425fa089fee4ff5c07a99e", size = 6346657 }, + { url = "https://files.pythonhosted.org/packages/3a/be/ee90a3d79271227e0f0a33c453531efd6ed14b2e708596ba5dd9be948da3/pillow-12.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c98fa880d695de164b4135a52fd2e9cd7b7c90a9d8ac5e9e443a24a95ef9248e", size = 7038482 }, + { url = "https://files.pythonhosted.org/packages/44/34/a16b6a4d1ad727de390e9bd9f19f5f669e079e5826ec0f329010ddea492f/pillow-12.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa3ed2a29a9e9d2d488b4da81dcb54720ac3104a20bf0bd273f1e4648aff5af9", size = 6461416 }, + { url = "https://files.pythonhosted.org/packages/b6/39/1aa5850d2ade7d7ba9f54e4e4c17077244ff7a2d9e25998c38a29749eb3f/pillow-12.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d034140032870024e6b9892c692fe2968493790dd57208b2c37e3fb35f6df3ab", size = 7131584 }, + { url = "https://files.pythonhosted.org/packages/bf/db/4fae862f8fad0167073a7733973bfa955f47e2cac3dc3e3e6257d10fab4a/pillow-12.0.0-cp314-cp314-win32.whl", hash = "sha256:1b1b133e6e16105f524a8dec491e0586d072948ce15c9b914e41cdadd209052b", size = 6400621 }, + { url = "https://files.pythonhosted.org/packages/2b/24/b350c31543fb0107ab2599464d7e28e6f856027aadda995022e695313d94/pillow-12.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8dc232e39d409036af549c86f24aed8273a40ffa459981146829a324e0848b4b", size = 7142916 }, + { url = "https://files.pythonhosted.org/packages/0f/9b/0ba5a6fd9351793996ef7487c4fdbde8d3f5f75dbedc093bb598648fddf0/pillow-12.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:d52610d51e265a51518692045e372a4c363056130d922a7351429ac9f27e70b0", size = 2523836 }, + { url = "https://files.pythonhosted.org/packages/f5/7a/ceee0840aebc579af529b523d530840338ecf63992395842e54edc805987/pillow-12.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1979f4566bb96c1e50a62d9831e2ea2d1211761e5662afc545fa766f996632f6", size = 5255092 }, + { url = "https://files.pythonhosted.org/packages/44/76/20776057b4bfd1aef4eeca992ebde0f53a4dce874f3ae693d0ec90a4f79b/pillow-12.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b2e4b27a6e15b04832fe9bf292b94b5ca156016bbc1ea9c2c20098a0320d6cf6", size = 4653158 }, + { url = "https://files.pythonhosted.org/packages/82/3f/d9ff92ace07be8836b4e7e87e6a4c7a8318d47c2f1463ffcf121fc57d9cb/pillow-12.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb3096c30df99fd01c7bf8e544f392103d0795b9f98ba71a8054bcbf56b255f1", size = 6267882 }, + { url = "https://files.pythonhosted.org/packages/9f/7a/4f7ff87f00d3ad33ba21af78bfcd2f032107710baf8280e3722ceec28cda/pillow-12.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7438839e9e053ef79f7112c881cef684013855016f928b168b81ed5835f3e75e", size = 8071001 }, + { url = "https://files.pythonhosted.org/packages/75/87/fcea108944a52dad8cca0715ae6247e271eb80459364a98518f1e4f480c1/pillow-12.0.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d5c411a8eaa2299322b647cd932586b1427367fd3184ffbb8f7a219ea2041ca", size = 6380146 }, + { url = "https://files.pythonhosted.org/packages/91/52/0d31b5e571ef5fd111d2978b84603fce26aba1b6092f28e941cb46570745/pillow-12.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7e091d464ac59d2c7ad8e7e08105eaf9dafbc3883fd7265ffccc2baad6ac925", size = 7067344 }, + { url = "https://files.pythonhosted.org/packages/7b/f4/2dd3d721f875f928d48e83bb30a434dee75a2531bca839bb996bb0aa5a91/pillow-12.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:792a2c0be4dcc18af9d4a2dfd8a11a17d5e25274a1062b0ec1c2d79c76f3e7f8", size = 6491864 }, + { url = "https://files.pythonhosted.org/packages/30/4b/667dfcf3d61fc309ba5a15b141845cece5915e39b99c1ceab0f34bf1d124/pillow-12.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:afbefa430092f71a9593a99ab6a4e7538bc9eabbf7bf94f91510d3503943edc4", size = 7158911 }, + { url = "https://files.pythonhosted.org/packages/a2/2f/16cabcc6426c32218ace36bf0d55955e813f2958afddbf1d391849fee9d1/pillow-12.0.0-cp314-cp314t-win32.whl", hash = "sha256:3830c769decf88f1289680a59d4f4c46c72573446352e2befec9a8512104fa52", size = 6408045 }, + { url = "https://files.pythonhosted.org/packages/35/73/e29aa0c9c666cf787628d3f0dcf379f4791fba79f4936d02f8b37165bdf8/pillow-12.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:905b0365b210c73afb0ebe9101a32572152dfd1c144c7e28968a331b9217b94a", size = 7148282 }, + { url = "https://files.pythonhosted.org/packages/c1/70/6b41bdcddf541b437bbb9f47f94d2db5d9ddef6c37ccab8c9107743748a4/pillow-12.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:99353a06902c2e43b43e8ff74ee65a7d90307d82370604746738a1e0661ccca7", size = 2525630 }, ] [[package]] name = "platformdirs" -version = "4.5.0" +version = "4.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715 } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, + { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731 }, ] [[package]] @@ -2576,18 +2575,18 @@ dependencies = [ { name = "narwhals" }, { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/94/05/1199e2a03ce6637960bc1e951ca0f928209a48cfceb57355806a88f214cf/plotly-6.5.0.tar.gz", hash = "sha256:d5d38224883fd38c1409bef7d6a8dc32b74348d39313f3c52ca998b8e447f5c8", size = 7013624, upload-time = "2025-11-17T18:39:24.523Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/05/1199e2a03ce6637960bc1e951ca0f928209a48cfceb57355806a88f214cf/plotly-6.5.0.tar.gz", hash = "sha256:d5d38224883fd38c1409bef7d6a8dc32b74348d39313f3c52ca998b8e447f5c8", size = 7013624 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/c3/3031c931098de393393e1f93a38dc9ed6805d86bb801acc3cf2d5bd1e6b7/plotly-6.5.0-py3-none-any.whl", hash = "sha256:5ac851e100367735250206788a2b1325412aa4a4917a4fe3e6f0bc5aa6f3d90a", size = 9893174, upload-time = "2025-11-17T18:39:20.351Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c3/3031c931098de393393e1f93a38dc9ed6805d86bb801acc3cf2d5bd1e6b7/plotly-6.5.0-py3-none-any.whl", hash = "sha256:5ac851e100367735250206788a2b1325412aa4a4917a4fe3e6f0bc5aa6f3d90a", size = 9893174 }, ] [[package]] name = "pluggy" version = "1.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, ] [[package]] @@ -2597,186 +2596,186 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/45/b0847d88d6cfeb4413566738c8bbf1e1995fad3d42515327ff32cc1eb578/prettytable-3.17.0.tar.gz", hash = "sha256:59f2590776527f3c9e8cf9fe7b66dd215837cca96a9c39567414cbc632e8ddb0", size = 67892, upload-time = "2025-11-14T17:33:20.212Z" } +sdist = { url = "https://files.pythonhosted.org/packages/79/45/b0847d88d6cfeb4413566738c8bbf1e1995fad3d42515327ff32cc1eb578/prettytable-3.17.0.tar.gz", hash = "sha256:59f2590776527f3c9e8cf9fe7b66dd215837cca96a9c39567414cbc632e8ddb0", size = 67892 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/8c/83087ebc47ab0396ce092363001fa37c17153119ee282700c0713a195853/prettytable-3.17.0-py3-none-any.whl", hash = "sha256:aad69b294ddbe3e1f95ef8886a060ed1666a0b83018bbf56295f6f226c43d287", size = 34433, upload-time = "2025-11-14T17:33:19.093Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8c/83087ebc47ab0396ce092363001fa37c17153119ee282700c0713a195853/prettytable-3.17.0-py3-none-any.whl", hash = "sha256:aad69b294ddbe3e1f95ef8886a060ed1666a0b83018bbf56295f6f226c43d287", size = 34433 }, ] [[package]] name = "propcache" version = "0.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, - { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, - { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, - { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, - { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, - { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, - { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, - { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, - { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, - { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, - { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, - { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, - { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, - { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, - { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, - { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, - { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, - { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, - { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, - { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, - { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, - { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, - { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, - { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, - { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, - { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, - { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, - { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, - { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, - { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, - { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, - { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, - { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, - { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, - { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, - { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, - { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, - { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, - { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, - { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, - { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, - { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, - { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, - { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, - { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, - { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, - { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, - { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, - { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, - { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, - { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, - { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, - { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, - { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, - { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, - { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, - { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, - { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, - { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, - { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, - { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, - { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, - { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, - { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, - { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, - { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, - { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, - { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, - { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, - { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, - { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, - { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, - { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, - { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061 }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037 }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324 }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505 }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242 }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474 }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575 }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736 }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019 }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376 }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988 }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615 }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066 }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655 }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789 }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750 }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780 }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308 }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182 }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215 }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112 }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442 }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398 }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920 }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748 }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877 }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437 }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586 }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790 }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158 }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451 }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374 }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396 }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950 }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856 }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420 }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254 }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205 }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873 }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739 }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514 }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781 }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396 }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897 }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789 }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152 }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869 }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596 }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981 }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490 }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371 }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424 }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566 }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130 }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625 }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209 }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797 }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140 }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257 }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097 }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455 }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372 }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411 }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712 }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557 }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015 }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880 }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938 }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641 }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510 }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161 }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393 }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546 }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259 }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428 }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305 }, ] [[package]] name = "protobuf" -version = "6.33.1" +version = "6.33.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/03/a1440979a3f74f16cab3b75b0da1a1a7f922d56a8ddea96092391998edc0/protobuf-6.33.1.tar.gz", hash = "sha256:97f65757e8d09870de6fd973aeddb92f85435607235d20b2dfed93405d00c85b", size = 443432, upload-time = "2025-11-13T16:44:18.895Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/44/e49ecff446afeec9d1a66d6bbf9adc21e3c7cea7803a920ca3773379d4f6/protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4", size = 444296 } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/f1/446a9bbd2c60772ca36556bac8bfde40eceb28d9cc7838755bc41e001d8f/protobuf-6.33.1-cp310-abi3-win32.whl", hash = "sha256:f8d3fdbc966aaab1d05046d0240dd94d40f2a8c62856d41eaa141ff64a79de6b", size = 425593, upload-time = "2025-11-13T16:44:06.275Z" }, - { url = "https://files.pythonhosted.org/packages/a6/79/8780a378c650e3df849b73de8b13cf5412f521ca2ff9b78a45c247029440/protobuf-6.33.1-cp310-abi3-win_amd64.whl", hash = "sha256:923aa6d27a92bf44394f6abf7ea0500f38769d4b07f4be41cb52bd8b1123b9ed", size = 436883, upload-time = "2025-11-13T16:44:09.222Z" }, - { url = "https://files.pythonhosted.org/packages/cd/93/26213ff72b103ae55bb0d73e7fb91ea570ef407c3ab4fd2f1f27cac16044/protobuf-6.33.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:fe34575f2bdde76ac429ec7b570235bf0c788883e70aee90068e9981806f2490", size = 427522, upload-time = "2025-11-13T16:44:10.475Z" }, - { url = "https://files.pythonhosted.org/packages/c2/32/df4a35247923393aa6b887c3b3244a8c941c32a25681775f96e2b418f90e/protobuf-6.33.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:f8adba2e44cde2d7618996b3fc02341f03f5bc3f2748be72dc7b063319276178", size = 324445, upload-time = "2025-11-13T16:44:11.869Z" }, - { url = "https://files.pythonhosted.org/packages/8e/d0/d796e419e2ec93d2f3fa44888861c3f88f722cde02b7c3488fcc6a166820/protobuf-6.33.1-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:0f4cf01222c0d959c2b399142deb526de420be8236f22c71356e2a544e153c53", size = 339161, upload-time = "2025-11-13T16:44:12.778Z" }, - { url = "https://files.pythonhosted.org/packages/1d/2a/3c5f05a4af06649547027d288747f68525755de692a26a7720dced3652c0/protobuf-6.33.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:8fd7d5e0eb08cd5b87fd3df49bc193f5cfd778701f47e11d127d0afc6c39f1d1", size = 323171, upload-time = "2025-11-13T16:44:14.035Z" }, - { url = "https://files.pythonhosted.org/packages/08/b4/46310463b4f6ceef310f8348786f3cff181cea671578e3d9743ba61a459e/protobuf-6.33.1-py3-none-any.whl", hash = "sha256:d595a9fd694fdeb061a62fbe10eb039cc1e444df81ec9bb70c7fc59ebcb1eafa", size = 170477, upload-time = "2025-11-13T16:44:17.633Z" }, + { url = "https://files.pythonhosted.org/packages/bc/91/1e3a34881a88697a7354ffd177e8746e97a722e5e8db101544b47e84afb1/protobuf-6.33.2-cp310-abi3-win32.whl", hash = "sha256:87eb388bd2d0f78febd8f4c8779c79247b26a5befad525008e49a6955787ff3d", size = 425603 }, + { url = "https://files.pythonhosted.org/packages/64/20/4d50191997e917ae13ad0a235c8b42d8c1ab9c3e6fd455ca16d416944355/protobuf-6.33.2-cp310-abi3-win_amd64.whl", hash = "sha256:fc2a0e8b05b180e5fc0dd1559fe8ebdae21a27e81ac77728fb6c42b12c7419b4", size = 436930 }, + { url = "https://files.pythonhosted.org/packages/b2/ca/7e485da88ba45c920fb3f50ae78de29ab925d9e54ef0de678306abfbb497/protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43", size = 427621 }, + { url = "https://files.pythonhosted.org/packages/7d/4f/f743761e41d3b2b2566748eb76bbff2b43e14d5fcab694f494a16458b05f/protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e", size = 324460 }, + { url = "https://files.pythonhosted.org/packages/b1/fa/26468d00a92824020f6f2090d827078c09c9c587e34cbfd2d0c7911221f8/protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872", size = 339168 }, + { url = "https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f", size = 323270 }, + { url = "https://files.pythonhosted.org/packages/0e/15/4f02896cc3df04fc465010a4c6a0cd89810f54617a32a70ef531ed75d61c/protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c", size = 170501 }, ] [[package]] name = "psutil" version = "7.1.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" }, - { url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" }, - { url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" }, - { url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" }, - { url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" }, - { url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" }, - { url = "https://files.pythonhosted.org/packages/2e/bb/6670bded3e3236eb4287c7bcdc167e9fae6e1e9286e437f7111caed2f909/psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353", size = 239843, upload-time = "2025-11-02T12:26:11.968Z" }, - { url = "https://files.pythonhosted.org/packages/b8/66/853d50e75a38c9a7370ddbeefabdd3d3116b9c31ef94dc92c6729bc36bec/psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b", size = 240369, upload-time = "2025-11-02T12:26:14.358Z" }, - { url = "https://files.pythonhosted.org/packages/41/bd/313aba97cb5bfb26916dc29cf0646cbe4dd6a89ca69e8c6edce654876d39/psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9", size = 288210, upload-time = "2025-11-02T12:26:16.699Z" }, - { url = "https://files.pythonhosted.org/packages/c2/fa/76e3c06e760927a0cfb5705eb38164254de34e9bd86db656d4dbaa228b04/psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f", size = 291182, upload-time = "2025-11-02T12:26:18.848Z" }, - { url = "https://files.pythonhosted.org/packages/0f/1d/5774a91607035ee5078b8fd747686ebec28a962f178712de100d00b78a32/psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7", size = 250466, upload-time = "2025-11-02T12:26:21.183Z" }, - { url = "https://files.pythonhosted.org/packages/00/ca/e426584bacb43a5cb1ac91fae1937f478cd8fbe5e4ff96574e698a2c77cd/psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264", size = 245756, upload-time = "2025-11-02T12:26:23.148Z" }, - { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" }, - { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" }, - { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" }, - { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" }, - { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" }, - { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751 }, + { url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368 }, + { url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134 }, + { url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904 }, + { url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642 }, + { url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518 }, + { url = "https://files.pythonhosted.org/packages/2e/bb/6670bded3e3236eb4287c7bcdc167e9fae6e1e9286e437f7111caed2f909/psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353", size = 239843 }, + { url = "https://files.pythonhosted.org/packages/b8/66/853d50e75a38c9a7370ddbeefabdd3d3116b9c31ef94dc92c6729bc36bec/psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b", size = 240369 }, + { url = "https://files.pythonhosted.org/packages/41/bd/313aba97cb5bfb26916dc29cf0646cbe4dd6a89ca69e8c6edce654876d39/psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9", size = 288210 }, + { url = "https://files.pythonhosted.org/packages/c2/fa/76e3c06e760927a0cfb5705eb38164254de34e9bd86db656d4dbaa228b04/psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f", size = 291182 }, + { url = "https://files.pythonhosted.org/packages/0f/1d/5774a91607035ee5078b8fd747686ebec28a962f178712de100d00b78a32/psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7", size = 250466 }, + { url = "https://files.pythonhosted.org/packages/00/ca/e426584bacb43a5cb1ac91fae1937f478cd8fbe5e4ff96574e698a2c77cd/psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264", size = 245756 }, + { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359 }, + { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171 }, + { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261 }, + { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635 }, + { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633 }, + { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608 }, ] [[package]] name = "pyarrow" version = "22.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/53/04a7fdc63e6056116c9ddc8b43bc28c12cdd181b85cbeadb79278475f3ae/pyarrow-22.0.0.tar.gz", hash = "sha256:3d600dc583260d845c7d8a6db540339dd883081925da2bd1c5cb808f720b3cd9", size = 1151151, upload-time = "2025-10-24T12:30:00.762Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/63/ba23862d69652f85b615ca14ad14f3bcfc5bf1b99ef3f0cd04ff93fdad5a/pyarrow-22.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bea79263d55c24a32b0d79c00a1c58bb2ee5f0757ed95656b01c0fb310c5af3d", size = 34211578, upload-time = "2025-10-24T10:05:21.583Z" }, - { url = "https://files.pythonhosted.org/packages/b1/d0/f9ad86fe809efd2bcc8be32032fa72e8b0d112b01ae56a053006376c5930/pyarrow-22.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:12fe549c9b10ac98c91cf791d2945e878875d95508e1a5d14091a7aaa66d9cf8", size = 35989906, upload-time = "2025-10-24T10:05:29.485Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a8/f910afcb14630e64d673f15904ec27dd31f1e009b77033c365c84e8c1e1d/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:334f900ff08ce0423407af97e6c26ad5d4e3b0763645559ece6fbf3747d6a8f5", size = 45021677, upload-time = "2025-10-24T10:05:38.274Z" }, - { url = "https://files.pythonhosted.org/packages/13/95/aec81f781c75cd10554dc17a25849c720d54feafb6f7847690478dcf5ef8/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c6c791b09c57ed76a18b03f2631753a4960eefbbca80f846da8baefc6491fcfe", size = 47726315, upload-time = "2025-10-24T10:05:47.314Z" }, - { url = "https://files.pythonhosted.org/packages/bb/d4/74ac9f7a54cfde12ee42734ea25d5a3c9a45db78f9def949307a92720d37/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c3200cb41cdbc65156e5f8c908d739b0dfed57e890329413da2748d1a2cd1a4e", size = 47990906, upload-time = "2025-10-24T10:05:58.254Z" }, - { url = "https://files.pythonhosted.org/packages/2e/71/fedf2499bf7a95062eafc989ace56572f3343432570e1c54e6599d5b88da/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ac93252226cf288753d8b46280f4edf3433bf9508b6977f8dd8526b521a1bbb9", size = 50306783, upload-time = "2025-10-24T10:06:08.08Z" }, - { url = "https://files.pythonhosted.org/packages/68/ed/b202abd5a5b78f519722f3d29063dda03c114711093c1995a33b8e2e0f4b/pyarrow-22.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:44729980b6c50a5f2bfcc2668d36c569ce17f8b17bccaf470c4313dcbbf13c9d", size = 27972883, upload-time = "2025-10-24T10:06:14.204Z" }, - { url = "https://files.pythonhosted.org/packages/a6/d6/d0fac16a2963002fc22c8fa75180a838737203d558f0ed3b564c4a54eef5/pyarrow-22.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e6e95176209257803a8b3d0394f21604e796dadb643d2f7ca21b66c9c0b30c9a", size = 34204629, upload-time = "2025-10-24T10:06:20.274Z" }, - { url = "https://files.pythonhosted.org/packages/c6/9c/1d6357347fbae062ad3f17082f9ebc29cc733321e892c0d2085f42a2212b/pyarrow-22.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:001ea83a58024818826a9e3f89bf9310a114f7e26dfe404a4c32686f97bd7901", size = 35985783, upload-time = "2025-10-24T10:06:27.301Z" }, - { url = "https://files.pythonhosted.org/packages/ff/c0/782344c2ce58afbea010150df07e3a2f5fdad299cd631697ae7bd3bac6e3/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ce20fe000754f477c8a9125543f1936ea5b8867c5406757c224d745ed033e691", size = 45020999, upload-time = "2025-10-24T10:06:35.387Z" }, - { url = "https://files.pythonhosted.org/packages/1b/8b/5362443737a5307a7b67c1017c42cd104213189b4970bf607e05faf9c525/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e0a15757fccb38c410947df156f9749ae4a3c89b2393741a50521f39a8cf202a", size = 47724601, upload-time = "2025-10-24T10:06:43.551Z" }, - { url = "https://files.pythonhosted.org/packages/69/4d/76e567a4fc2e190ee6072967cb4672b7d9249ac59ae65af2d7e3047afa3b/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cedb9dd9358e4ea1d9bce3665ce0797f6adf97ff142c8e25b46ba9cdd508e9b6", size = 48001050, upload-time = "2025-10-24T10:06:52.284Z" }, - { url = "https://files.pythonhosted.org/packages/01/5e/5653f0535d2a1aef8223cee9d92944cb6bccfee5cf1cd3f462d7cb022790/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:252be4a05f9d9185bb8c18e83764ebcfea7185076c07a7a662253af3a8c07941", size = 50307877, upload-time = "2025-10-24T10:07:02.405Z" }, - { url = "https://files.pythonhosted.org/packages/2d/f8/1d0bd75bf9328a3b826e24a16e5517cd7f9fbf8d34a3184a4566ef5a7f29/pyarrow-22.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:a4893d31e5ef780b6edcaf63122df0f8d321088bb0dee4c8c06eccb1ca28d145", size = 27977099, upload-time = "2025-10-24T10:08:07.259Z" }, - { url = "https://files.pythonhosted.org/packages/90/81/db56870c997805bf2b0f6eeeb2d68458bf4654652dccdcf1bf7a42d80903/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:f7fe3dbe871294ba70d789be16b6e7e52b418311e166e0e3cba9522f0f437fb1", size = 34336685, upload-time = "2025-10-24T10:07:11.47Z" }, - { url = "https://files.pythonhosted.org/packages/1c/98/0727947f199aba8a120f47dfc229eeb05df15bcd7a6f1b669e9f882afc58/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ba95112d15fd4f1105fb2402c4eab9068f0554435e9b7085924bcfaac2cc306f", size = 36032158, upload-time = "2025-10-24T10:07:18.626Z" }, - { url = "https://files.pythonhosted.org/packages/96/b4/9babdef9c01720a0785945c7cf550e4acd0ebcd7bdd2e6f0aa7981fa85e2/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c064e28361c05d72eed8e744c9605cbd6d2bb7481a511c74071fd9b24bc65d7d", size = 44892060, upload-time = "2025-10-24T10:07:26.002Z" }, - { url = "https://files.pythonhosted.org/packages/f8/ca/2f8804edd6279f78a37062d813de3f16f29183874447ef6d1aadbb4efa0f/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:6f9762274496c244d951c819348afbcf212714902742225f649cf02823a6a10f", size = 47504395, upload-time = "2025-10-24T10:07:34.09Z" }, - { url = "https://files.pythonhosted.org/packages/b9/f0/77aa5198fd3943682b2e4faaf179a674f0edea0d55d326d83cb2277d9363/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a9d9ffdc2ab696f6b15b4d1f7cec6658e1d788124418cb30030afbae31c64746", size = 48066216, upload-time = "2025-10-24T10:07:43.528Z" }, - { url = "https://files.pythonhosted.org/packages/79/87/a1937b6e78b2aff18b706d738c9e46ade5bfcf11b294e39c87706a0089ac/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ec1a15968a9d80da01e1d30349b2b0d7cc91e96588ee324ce1b5228175043e95", size = 50288552, upload-time = "2025-10-24T10:07:53.519Z" }, - { url = "https://files.pythonhosted.org/packages/60/ae/b5a5811e11f25788ccfdaa8f26b6791c9807119dffcf80514505527c384c/pyarrow-22.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bba208d9c7decf9961998edf5c65e3ea4355d5818dd6cd0f6809bec1afb951cc", size = 28262504, upload-time = "2025-10-24T10:08:00.932Z" }, - { url = "https://files.pythonhosted.org/packages/bd/b0/0fa4d28a8edb42b0a7144edd20befd04173ac79819547216f8a9f36f9e50/pyarrow-22.0.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:9bddc2cade6561f6820d4cd73f99a0243532ad506bc510a75a5a65a522b2d74d", size = 34224062, upload-time = "2025-10-24T10:08:14.101Z" }, - { url = "https://files.pythonhosted.org/packages/0f/a8/7a719076b3c1be0acef56a07220c586f25cd24de0e3f3102b438d18ae5df/pyarrow-22.0.0-cp314-cp314-macosx_12_0_x86_64.whl", hash = "sha256:e70ff90c64419709d38c8932ea9fe1cc98415c4f87ea8da81719e43f02534bc9", size = 35990057, upload-time = "2025-10-24T10:08:21.842Z" }, - { url = "https://files.pythonhosted.org/packages/89/3c/359ed54c93b47fb6fe30ed16cdf50e3f0e8b9ccfb11b86218c3619ae50a8/pyarrow-22.0.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:92843c305330aa94a36e706c16209cd4df274693e777ca47112617db7d0ef3d7", size = 45068002, upload-time = "2025-10-24T10:08:29.034Z" }, - { url = "https://files.pythonhosted.org/packages/55/fc/4945896cc8638536ee787a3bd6ce7cec8ec9acf452d78ec39ab328efa0a1/pyarrow-22.0.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:6dda1ddac033d27421c20d7a7943eec60be44e0db4e079f33cc5af3b8280ccde", size = 47737765, upload-time = "2025-10-24T10:08:38.559Z" }, - { url = "https://files.pythonhosted.org/packages/cd/5e/7cb7edeb2abfaa1f79b5d5eb89432356155c8426f75d3753cbcb9592c0fd/pyarrow-22.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:84378110dd9a6c06323b41b56e129c504d157d1a983ce8f5443761eb5256bafc", size = 48048139, upload-time = "2025-10-24T10:08:46.784Z" }, - { url = "https://files.pythonhosted.org/packages/88/c6/546baa7c48185f5e9d6e59277c4b19f30f48c94d9dd938c2a80d4d6b067c/pyarrow-22.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:854794239111d2b88b40b6ef92aa478024d1e5074f364033e73e21e3f76b25e0", size = 50314244, upload-time = "2025-10-24T10:08:55.771Z" }, - { url = "https://files.pythonhosted.org/packages/3c/79/755ff2d145aafec8d347bf18f95e4e81c00127f06d080135dfc86aea417c/pyarrow-22.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:b883fe6fd85adad7932b3271c38ac289c65b7337c2c132e9569f9d3940620730", size = 28757501, upload-time = "2025-10-24T10:09:59.891Z" }, - { url = "https://files.pythonhosted.org/packages/0e/d2/237d75ac28ced3147912954e3c1a174df43a95f4f88e467809118a8165e0/pyarrow-22.0.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:7a820d8ae11facf32585507c11f04e3f38343c1e784c9b5a8b1da5c930547fe2", size = 34355506, upload-time = "2025-10-24T10:09:02.953Z" }, - { url = "https://files.pythonhosted.org/packages/1e/2c/733dfffe6d3069740f98e57ff81007809067d68626c5faef293434d11bd6/pyarrow-22.0.0-cp314-cp314t-macosx_12_0_x86_64.whl", hash = "sha256:c6ec3675d98915bf1ec8b3c7986422682f7232ea76cad276f4c8abd5b7319b70", size = 36047312, upload-time = "2025-10-24T10:09:10.334Z" }, - { url = "https://files.pythonhosted.org/packages/7c/2b/29d6e3782dc1f299727462c1543af357a0f2c1d3c160ce199950d9ca51eb/pyarrow-22.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:3e739edd001b04f654b166204fc7a9de896cf6007eaff33409ee9e50ceaff754", size = 45081609, upload-time = "2025-10-24T10:09:18.61Z" }, - { url = "https://files.pythonhosted.org/packages/8d/42/aa9355ecc05997915af1b7b947a7f66c02dcaa927f3203b87871c114ba10/pyarrow-22.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7388ac685cab5b279a41dfe0a6ccd99e4dbf322edfb63e02fc0443bf24134e91", size = 47703663, upload-time = "2025-10-24T10:09:27.369Z" }, - { url = "https://files.pythonhosted.org/packages/ee/62/45abedde480168e83a1de005b7b7043fd553321c1e8c5a9a114425f64842/pyarrow-22.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f633074f36dbc33d5c05b5dc75371e5660f1dbf9c8b1d95669def05e5425989c", size = 48066543, upload-time = "2025-10-24T10:09:34.908Z" }, - { url = "https://files.pythonhosted.org/packages/84/e9/7878940a5b072e4f3bf998770acafeae13b267f9893af5f6d4ab3904b67e/pyarrow-22.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4c19236ae2402a8663a2c8f21f1870a03cc57f0bef7e4b6eb3238cc82944de80", size = 50288838, upload-time = "2025-10-24T10:09:44.394Z" }, - { url = "https://files.pythonhosted.org/packages/7b/03/f335d6c52b4a4761bcc83499789a1e2e16d9d201a58c327a9b5cc9a41bd9/pyarrow-22.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0c34fe18094686194f204a3b1787a27456897d8a2d62caf84b61e8dfbc0252ae", size = 29185594, upload-time = "2025-10-24T10:09:53.111Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/30/53/04a7fdc63e6056116c9ddc8b43bc28c12cdd181b85cbeadb79278475f3ae/pyarrow-22.0.0.tar.gz", hash = "sha256:3d600dc583260d845c7d8a6db540339dd883081925da2bd1c5cb808f720b3cd9", size = 1151151 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/63/ba23862d69652f85b615ca14ad14f3bcfc5bf1b99ef3f0cd04ff93fdad5a/pyarrow-22.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bea79263d55c24a32b0d79c00a1c58bb2ee5f0757ed95656b01c0fb310c5af3d", size = 34211578 }, + { url = "https://files.pythonhosted.org/packages/b1/d0/f9ad86fe809efd2bcc8be32032fa72e8b0d112b01ae56a053006376c5930/pyarrow-22.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:12fe549c9b10ac98c91cf791d2945e878875d95508e1a5d14091a7aaa66d9cf8", size = 35989906 }, + { url = "https://files.pythonhosted.org/packages/b4/a8/f910afcb14630e64d673f15904ec27dd31f1e009b77033c365c84e8c1e1d/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:334f900ff08ce0423407af97e6c26ad5d4e3b0763645559ece6fbf3747d6a8f5", size = 45021677 }, + { url = "https://files.pythonhosted.org/packages/13/95/aec81f781c75cd10554dc17a25849c720d54feafb6f7847690478dcf5ef8/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c6c791b09c57ed76a18b03f2631753a4960eefbbca80f846da8baefc6491fcfe", size = 47726315 }, + { url = "https://files.pythonhosted.org/packages/bb/d4/74ac9f7a54cfde12ee42734ea25d5a3c9a45db78f9def949307a92720d37/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c3200cb41cdbc65156e5f8c908d739b0dfed57e890329413da2748d1a2cd1a4e", size = 47990906 }, + { url = "https://files.pythonhosted.org/packages/2e/71/fedf2499bf7a95062eafc989ace56572f3343432570e1c54e6599d5b88da/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ac93252226cf288753d8b46280f4edf3433bf9508b6977f8dd8526b521a1bbb9", size = 50306783 }, + { url = "https://files.pythonhosted.org/packages/68/ed/b202abd5a5b78f519722f3d29063dda03c114711093c1995a33b8e2e0f4b/pyarrow-22.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:44729980b6c50a5f2bfcc2668d36c569ce17f8b17bccaf470c4313dcbbf13c9d", size = 27972883 }, + { url = "https://files.pythonhosted.org/packages/a6/d6/d0fac16a2963002fc22c8fa75180a838737203d558f0ed3b564c4a54eef5/pyarrow-22.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e6e95176209257803a8b3d0394f21604e796dadb643d2f7ca21b66c9c0b30c9a", size = 34204629 }, + { url = "https://files.pythonhosted.org/packages/c6/9c/1d6357347fbae062ad3f17082f9ebc29cc733321e892c0d2085f42a2212b/pyarrow-22.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:001ea83a58024818826a9e3f89bf9310a114f7e26dfe404a4c32686f97bd7901", size = 35985783 }, + { url = "https://files.pythonhosted.org/packages/ff/c0/782344c2ce58afbea010150df07e3a2f5fdad299cd631697ae7bd3bac6e3/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ce20fe000754f477c8a9125543f1936ea5b8867c5406757c224d745ed033e691", size = 45020999 }, + { url = "https://files.pythonhosted.org/packages/1b/8b/5362443737a5307a7b67c1017c42cd104213189b4970bf607e05faf9c525/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e0a15757fccb38c410947df156f9749ae4a3c89b2393741a50521f39a8cf202a", size = 47724601 }, + { url = "https://files.pythonhosted.org/packages/69/4d/76e567a4fc2e190ee6072967cb4672b7d9249ac59ae65af2d7e3047afa3b/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cedb9dd9358e4ea1d9bce3665ce0797f6adf97ff142c8e25b46ba9cdd508e9b6", size = 48001050 }, + { url = "https://files.pythonhosted.org/packages/01/5e/5653f0535d2a1aef8223cee9d92944cb6bccfee5cf1cd3f462d7cb022790/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:252be4a05f9d9185bb8c18e83764ebcfea7185076c07a7a662253af3a8c07941", size = 50307877 }, + { url = "https://files.pythonhosted.org/packages/2d/f8/1d0bd75bf9328a3b826e24a16e5517cd7f9fbf8d34a3184a4566ef5a7f29/pyarrow-22.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:a4893d31e5ef780b6edcaf63122df0f8d321088bb0dee4c8c06eccb1ca28d145", size = 27977099 }, + { url = "https://files.pythonhosted.org/packages/90/81/db56870c997805bf2b0f6eeeb2d68458bf4654652dccdcf1bf7a42d80903/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:f7fe3dbe871294ba70d789be16b6e7e52b418311e166e0e3cba9522f0f437fb1", size = 34336685 }, + { url = "https://files.pythonhosted.org/packages/1c/98/0727947f199aba8a120f47dfc229eeb05df15bcd7a6f1b669e9f882afc58/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ba95112d15fd4f1105fb2402c4eab9068f0554435e9b7085924bcfaac2cc306f", size = 36032158 }, + { url = "https://files.pythonhosted.org/packages/96/b4/9babdef9c01720a0785945c7cf550e4acd0ebcd7bdd2e6f0aa7981fa85e2/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c064e28361c05d72eed8e744c9605cbd6d2bb7481a511c74071fd9b24bc65d7d", size = 44892060 }, + { url = "https://files.pythonhosted.org/packages/f8/ca/2f8804edd6279f78a37062d813de3f16f29183874447ef6d1aadbb4efa0f/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:6f9762274496c244d951c819348afbcf212714902742225f649cf02823a6a10f", size = 47504395 }, + { url = "https://files.pythonhosted.org/packages/b9/f0/77aa5198fd3943682b2e4faaf179a674f0edea0d55d326d83cb2277d9363/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a9d9ffdc2ab696f6b15b4d1f7cec6658e1d788124418cb30030afbae31c64746", size = 48066216 }, + { url = "https://files.pythonhosted.org/packages/79/87/a1937b6e78b2aff18b706d738c9e46ade5bfcf11b294e39c87706a0089ac/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ec1a15968a9d80da01e1d30349b2b0d7cc91e96588ee324ce1b5228175043e95", size = 50288552 }, + { url = "https://files.pythonhosted.org/packages/60/ae/b5a5811e11f25788ccfdaa8f26b6791c9807119dffcf80514505527c384c/pyarrow-22.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bba208d9c7decf9961998edf5c65e3ea4355d5818dd6cd0f6809bec1afb951cc", size = 28262504 }, + { url = "https://files.pythonhosted.org/packages/bd/b0/0fa4d28a8edb42b0a7144edd20befd04173ac79819547216f8a9f36f9e50/pyarrow-22.0.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:9bddc2cade6561f6820d4cd73f99a0243532ad506bc510a75a5a65a522b2d74d", size = 34224062 }, + { url = "https://files.pythonhosted.org/packages/0f/a8/7a719076b3c1be0acef56a07220c586f25cd24de0e3f3102b438d18ae5df/pyarrow-22.0.0-cp314-cp314-macosx_12_0_x86_64.whl", hash = "sha256:e70ff90c64419709d38c8932ea9fe1cc98415c4f87ea8da81719e43f02534bc9", size = 35990057 }, + { url = "https://files.pythonhosted.org/packages/89/3c/359ed54c93b47fb6fe30ed16cdf50e3f0e8b9ccfb11b86218c3619ae50a8/pyarrow-22.0.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:92843c305330aa94a36e706c16209cd4df274693e777ca47112617db7d0ef3d7", size = 45068002 }, + { url = "https://files.pythonhosted.org/packages/55/fc/4945896cc8638536ee787a3bd6ce7cec8ec9acf452d78ec39ab328efa0a1/pyarrow-22.0.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:6dda1ddac033d27421c20d7a7943eec60be44e0db4e079f33cc5af3b8280ccde", size = 47737765 }, + { url = "https://files.pythonhosted.org/packages/cd/5e/7cb7edeb2abfaa1f79b5d5eb89432356155c8426f75d3753cbcb9592c0fd/pyarrow-22.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:84378110dd9a6c06323b41b56e129c504d157d1a983ce8f5443761eb5256bafc", size = 48048139 }, + { url = "https://files.pythonhosted.org/packages/88/c6/546baa7c48185f5e9d6e59277c4b19f30f48c94d9dd938c2a80d4d6b067c/pyarrow-22.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:854794239111d2b88b40b6ef92aa478024d1e5074f364033e73e21e3f76b25e0", size = 50314244 }, + { url = "https://files.pythonhosted.org/packages/3c/79/755ff2d145aafec8d347bf18f95e4e81c00127f06d080135dfc86aea417c/pyarrow-22.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:b883fe6fd85adad7932b3271c38ac289c65b7337c2c132e9569f9d3940620730", size = 28757501 }, + { url = "https://files.pythonhosted.org/packages/0e/d2/237d75ac28ced3147912954e3c1a174df43a95f4f88e467809118a8165e0/pyarrow-22.0.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:7a820d8ae11facf32585507c11f04e3f38343c1e784c9b5a8b1da5c930547fe2", size = 34355506 }, + { url = "https://files.pythonhosted.org/packages/1e/2c/733dfffe6d3069740f98e57ff81007809067d68626c5faef293434d11bd6/pyarrow-22.0.0-cp314-cp314t-macosx_12_0_x86_64.whl", hash = "sha256:c6ec3675d98915bf1ec8b3c7986422682f7232ea76cad276f4c8abd5b7319b70", size = 36047312 }, + { url = "https://files.pythonhosted.org/packages/7c/2b/29d6e3782dc1f299727462c1543af357a0f2c1d3c160ce199950d9ca51eb/pyarrow-22.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:3e739edd001b04f654b166204fc7a9de896cf6007eaff33409ee9e50ceaff754", size = 45081609 }, + { url = "https://files.pythonhosted.org/packages/8d/42/aa9355ecc05997915af1b7b947a7f66c02dcaa927f3203b87871c114ba10/pyarrow-22.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7388ac685cab5b279a41dfe0a6ccd99e4dbf322edfb63e02fc0443bf24134e91", size = 47703663 }, + { url = "https://files.pythonhosted.org/packages/ee/62/45abedde480168e83a1de005b7b7043fd553321c1e8c5a9a114425f64842/pyarrow-22.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f633074f36dbc33d5c05b5dc75371e5660f1dbf9c8b1d95669def05e5425989c", size = 48066543 }, + { url = "https://files.pythonhosted.org/packages/84/e9/7878940a5b072e4f3bf998770acafeae13b267f9893af5f6d4ab3904b67e/pyarrow-22.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4c19236ae2402a8663a2c8f21f1870a03cc57f0bef7e4b6eb3238cc82944de80", size = 50288838 }, + { url = "https://files.pythonhosted.org/packages/7b/03/f335d6c52b4a4761bcc83499789a1e2e16d9d201a58c327a9b5cc9a41bd9/pyarrow-22.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0c34fe18094686194f204a3b1787a27456897d8a2d62caf84b61e8dfbc0252ae", size = 29185594 }, ] [[package]] name = "pyasn1" version = "0.6.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 }, ] [[package]] @@ -2786,18 +2785,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyasn1" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892 } wheels = [ - { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259 }, ] [[package]] name = "pycparser" version = "2.23" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140 }, ] [[package]] @@ -2810,9 +2809,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580 }, ] [[package]] @@ -2822,77 +2821,73 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, - { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, - { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, - { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, - { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, - { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, - { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, - { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, - { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, - { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, - { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, - { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, - { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, - { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, - { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, - { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, - { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, - { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, - { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, - { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, - { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, - { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, - { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, - { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, - { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, - { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, - { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, - { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, - { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, - { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, - { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, - { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, - { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, - { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, - { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, - { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, - { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, - { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, - { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, - { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, - { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, - { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, - { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, - { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, - { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, - { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, - { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, - { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, - { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, - { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990 }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003 }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200 }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578 }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504 }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816 }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366 }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698 }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603 }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591 }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068 }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908 }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145 }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179 }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403 }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206 }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307 }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258 }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917 }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186 }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164 }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146 }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788 }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133 }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852 }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679 }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766 }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005 }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622 }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725 }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040 }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691 }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897 }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302 }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877 }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680 }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960 }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102 }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039 }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126 }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489 }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288 }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255 }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760 }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092 }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385 }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832 }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585 }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078 }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914 }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560 }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244 }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955 }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906 }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607 }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769 }, ] [[package]] name = "pygments" version = "2.19.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 }, ] [[package]] @@ -2908,9 +2903,9 @@ dependencies = [ { name = "platformdirs" }, { name = "tomlkit" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/d2/b081da1a8930d00e3fc06352a1d449aaf815d4982319fab5d8cdb2e9ab35/pylint-4.0.4.tar.gz", hash = "sha256:d9b71674e19b1c36d79265b5887bf8e55278cbe236c9e95d22dc82cf044fdbd2", size = 1571735, upload-time = "2025-11-30T13:29:04.315Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/d2/b081da1a8930d00e3fc06352a1d449aaf815d4982319fab5d8cdb2e9ab35/pylint-4.0.4.tar.gz", hash = "sha256:d9b71674e19b1c36d79265b5887bf8e55278cbe236c9e95d22dc82cf044fdbd2", size = 1571735 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/92/d40f5d937517cc489ad848fc4414ecccc7592e4686b9071e09e64f5e378e/pylint-4.0.4-py3-none-any.whl", hash = "sha256:63e06a37d5922555ee2c20963eb42559918c20bd2b21244e4ef426e7c43b92e0", size = 536425, upload-time = "2025-11-30T13:29:02.53Z" }, + { url = "https://files.pythonhosted.org/packages/a6/92/d40f5d937517cc489ad848fc4414ecccc7592e4686b9071e09e64f5e378e/pylint-4.0.4-py3-none-any.whl", hash = "sha256:63e06a37d5922555ee2c20963eb42559918c20bd2b21244e4ef426e7c43b92e0", size = 536425 }, ] [[package]] @@ -2920,36 +2915,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pylint" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/64/b2/cf916c3c8127282f60927a3fd382ef8e477e6ef090b3d1f1fedd62bff916/pylint_per_file_ignores-3.2.0.tar.gz", hash = "sha256:5eb30b2b64c49ca616b8940346b8b5b4973eeaa15700840c8b81a4b8ba565a02", size = 63854, upload-time = "2025-11-25T14:13:14.577Z" } +sdist = { url = "https://files.pythonhosted.org/packages/64/b2/cf916c3c8127282f60927a3fd382ef8e477e6ef090b3d1f1fedd62bff916/pylint_per_file_ignores-3.2.0.tar.gz", hash = "sha256:5eb30b2b64c49ca616b8940346b8b5b4973eeaa15700840c8b81a4b8ba565a02", size = 63854 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/6a/09cbda0032e1040eea8c7daec3994e6d32b3edc26a81d226fd643537886b/pylint_per_file_ignores-3.2.0-py3-none-any.whl", hash = "sha256:8b995b7486f6652f942cf5721e24c29b72735fa911b6d22b65b2f87bad323590", size = 5576, upload-time = "2025-11-25T14:13:13.208Z" }, + { url = "https://files.pythonhosted.org/packages/cb/6a/09cbda0032e1040eea8c7daec3994e6d32b3edc26a81d226fd643537886b/pylint_per_file_ignores-3.2.0-py3-none-any.whl", hash = "sha256:8b995b7486f6652f942cf5721e24c29b72735fa911b6d22b65b2f87bad323590", size = 5576 }, ] [[package]] name = "pyparsing" version = "3.2.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274 } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" }, + { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890 }, ] [[package]] name = "pyperclip" version = "1.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/52/d87eba7cb129b81563019d1679026e7a112ef76855d6159d24754dbd2a51/pyperclip-1.11.0.tar.gz", hash = "sha256:244035963e4428530d9e3a6101a1ef97209c6825edab1567beac148ccc1db1b6", size = 12185, upload-time = "2025-09-26T14:40:37.245Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/52/d87eba7cb129b81563019d1679026e7a112ef76855d6159d24754dbd2a51/pyperclip-1.11.0.tar.gz", hash = "sha256:244035963e4428530d9e3a6101a1ef97209c6825edab1567beac148ccc1db1b6", size = 12185 } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/80/fc9d01d5ed37ba4c42ca2b55b4339ae6e200b456be3a1aaddf4a9fa99b8c/pyperclip-1.11.0-py3-none-any.whl", hash = "sha256:299403e9ff44581cb9ba2ffeed69c7aa96a008622ad0c46cb575ca75b5b84273", size = 11063, upload-time = "2025-09-26T14:40:36.069Z" }, + { url = "https://files.pythonhosted.org/packages/df/80/fc9d01d5ed37ba4c42ca2b55b4339ae6e200b456be3a1aaddf4a9fa99b8c/pyperclip-1.11.0-py3-none-any.whl", hash = "sha256:299403e9ff44581cb9ba2ffeed69c7aa96a008622ad0c46cb575ca75b5b84273", size = 11063 }, ] [[package]] name = "pyreadline3" version = "3.5.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839, upload-time = "2024-09-19T02:40:10.062Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178, upload-time = "2024-09-19T02:40:08.598Z" }, + { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 }, ] [[package]] @@ -2960,14 +2955,14 @@ dependencies = [ { name = "nodeenv" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a6/1b/0aa08ee42948b61745ac5b5b5ccaec4669e8884b53d31c8ec20b2fcd6b6f/pyright-1.1.407.tar.gz", hash = "sha256:099674dba5c10489832d4a4b2d302636152a9a42d317986c38474c76fe562262", size = 4122872, upload-time = "2025-10-24T23:17:15.145Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/1b/0aa08ee42948b61745ac5b5b5ccaec4669e8884b53d31c8ec20b2fcd6b6f/pyright-1.1.407.tar.gz", hash = "sha256:099674dba5c10489832d4a4b2d302636152a9a42d317986c38474c76fe562262", size = 4122872 } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/93/b69052907d032b00c40cb656d21438ec00b3a471733de137a3f65a49a0a0/pyright-1.1.407-py3-none-any.whl", hash = "sha256:6dd419f54fcc13f03b52285796d65e639786373f433e243f8b94cf93a7444d21", size = 5997008, upload-time = "2025-10-24T23:17:13.159Z" }, + { url = "https://files.pythonhosted.org/packages/dc/93/b69052907d032b00c40cb656d21438ec00b3a471733de137a3f65a49a0a0/pyright-1.1.407-py3-none-any.whl", hash = "sha256:6dd419f54fcc13f03b52285796d65e639786373f433e243f8b94cf93a7444d21", size = 5997008 }, ] [[package]] name = "pytest" -version = "9.0.1" +version = "9.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -2976,9 +2971,9 @@ dependencies = [ { name = "pluggy" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801 }, ] [[package]] @@ -2990,9 +2985,9 @@ dependencies = [ { name = "pluggy" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424 }, ] [[package]] @@ -3002,9 +2997,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" } +sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, + { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095 }, ] [[package]] @@ -3015,9 +3010,9 @@ dependencies = [ { name = "coverage" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4d/1d/3e4230cc67cd6205bbe03c3527500c0ccaf7f0c78b436537eac71590ee4a/pytest_testmon-2.2.0.tar.gz", hash = "sha256:01f488e955ed0e0049777bee598bf1f647dd524e06f544c31a24e68f8d775a51", size = 23108, upload-time = "2025-12-01T07:30:24.76Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/1d/3e4230cc67cd6205bbe03c3527500c0ccaf7f0c78b436537eac71590ee4a/pytest_testmon-2.2.0.tar.gz", hash = "sha256:01f488e955ed0e0049777bee598bf1f647dd524e06f544c31a24e68f8d775a51", size = 23108 } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/55/ebb3c2f59fb089f08d00f764830d35780fc4e4c41dffcadafa3264682b65/pytest_testmon-2.2.0-py3-none-any.whl", hash = "sha256:2604ca44a54d61a2e830d9ce828b41a837075e4ebc1f81b148add8e90d34815b", size = 25199, upload-time = "2025-12-01T07:30:23.623Z" }, + { url = "https://files.pythonhosted.org/packages/61/55/ebb3c2f59fb089f08d00f764830d35780fc4e4c41dffcadafa3264682b65/pytest_testmon-2.2.0-py3-none-any.whl", hash = "sha256:2604ca44a54d61a2e830d9ce828b41a837075e4ebc1f81b148add8e90d34815b", size = 25199 }, ] [[package]] @@ -3027,27 +3022,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, ] [[package]] name = "python-dotenv" version = "1.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221 } wheels = [ - { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230 }, ] [[package]] name = "pytz" version = "2025.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 }, ] [[package]] @@ -3055,139 +3050,139 @@ name = "pywin32" version = "311" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, - { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, - { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, - { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, - { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, - { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, - { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, - { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543 }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040 }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102 }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700 }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700 }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318 }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714 }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800 }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540 }, ] [[package]] name = "pyyaml" version = "6.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, - { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, - { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, - { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, - { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, - { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, - { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, - { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, - { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, - { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, - { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, - { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, - { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, - { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, - { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, - { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, - { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, - { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, - { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, - { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, - { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, - { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, - { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, - { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, - { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, - { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, - { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, - { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, - { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, - { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, - { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, - { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, - { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, - { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, - { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063 }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973 }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116 }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011 }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870 }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089 }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181 }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658 }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003 }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344 }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669 }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252 }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081 }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159 }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626 }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613 }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115 }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427 }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090 }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246 }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814 }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809 }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454 }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355 }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175 }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228 }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194 }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429 }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912 }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108 }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641 }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901 }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132 }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261 }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272 }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923 }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062 }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341 }, ] [[package]] name = "regex" version = "2025.11.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312, upload-time = "2025-11-03T21:31:34.343Z" }, - { url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256, upload-time = "2025-11-03T21:31:35.675Z" }, - { url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921, upload-time = "2025-11-03T21:31:37.07Z" }, - { url = "https://files.pythonhosted.org/packages/ea/98/6a8dff667d1af907150432cf5abc05a17ccd32c72a3615410d5365ac167a/regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7", size = 798568, upload-time = "2025-11-03T21:31:38.784Z" }, - { url = "https://files.pythonhosted.org/packages/64/15/92c1db4fa4e12733dd5a526c2dd2b6edcbfe13257e135fc0f6c57f34c173/regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69", size = 864165, upload-time = "2025-11-03T21:31:40.559Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e7/3ad7da8cdee1ce66c7cd37ab5ab05c463a86ffeb52b1a25fe7bd9293b36c/regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48", size = 912182, upload-time = "2025-11-03T21:31:42.002Z" }, - { url = "https://files.pythonhosted.org/packages/84/bd/9ce9f629fcb714ffc2c3faf62b6766ecb7a585e1e885eb699bcf130a5209/regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c", size = 803501, upload-time = "2025-11-03T21:31:43.815Z" }, - { url = "https://files.pythonhosted.org/packages/7c/0f/8dc2e4349d8e877283e6edd6c12bdcebc20f03744e86f197ab6e4492bf08/regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695", size = 787842, upload-time = "2025-11-03T21:31:45.353Z" }, - { url = "https://files.pythonhosted.org/packages/f9/73/cff02702960bc185164d5619c0c62a2f598a6abff6695d391b096237d4ab/regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98", size = 858519, upload-time = "2025-11-03T21:31:46.814Z" }, - { url = "https://files.pythonhosted.org/packages/61/83/0e8d1ae71e15bc1dc36231c90b46ee35f9d52fab2e226b0e039e7ea9c10a/regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74", size = 850611, upload-time = "2025-11-03T21:31:48.289Z" }, - { url = "https://files.pythonhosted.org/packages/c8/f5/70a5cdd781dcfaa12556f2955bf170cd603cb1c96a1827479f8faea2df97/regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0", size = 789759, upload-time = "2025-11-03T21:31:49.759Z" }, - { url = "https://files.pythonhosted.org/packages/59/9b/7c29be7903c318488983e7d97abcf8ebd3830e4c956c4c540005fcfb0462/regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204", size = 266194, upload-time = "2025-11-03T21:31:51.53Z" }, - { url = "https://files.pythonhosted.org/packages/1a/67/3b92df89f179d7c367be654ab5626ae311cb28f7d5c237b6bb976cd5fbbb/regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9", size = 277069, upload-time = "2025-11-03T21:31:53.151Z" }, - { url = "https://files.pythonhosted.org/packages/d7/55/85ba4c066fe5094d35b249c3ce8df0ba623cfd35afb22d6764f23a52a1c5/regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26", size = 270330, upload-time = "2025-11-03T21:31:54.514Z" }, - { url = "https://files.pythonhosted.org/packages/e1/a7/dda24ebd49da46a197436ad96378f17df30ceb40e52e859fc42cac45b850/regex-2025.11.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c1e448051717a334891f2b9a620fe36776ebf3dd8ec46a0b877c8ae69575feb4", size = 489081, upload-time = "2025-11-03T21:31:55.9Z" }, - { url = "https://files.pythonhosted.org/packages/19/22/af2dc751aacf88089836aa088a1a11c4f21a04707eb1b0478e8e8fb32847/regex-2025.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b5aca4d5dfd7fbfbfbdaf44850fcc7709a01146a797536a8f84952e940cca76", size = 291123, upload-time = "2025-11-03T21:31:57.758Z" }, - { url = "https://files.pythonhosted.org/packages/a3/88/1a3ea5672f4b0a84802ee9891b86743438e7c04eb0b8f8c4e16a42375327/regex-2025.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:04d2765516395cf7dda331a244a3282c0f5ae96075f728629287dfa6f76ba70a", size = 288814, upload-time = "2025-11-03T21:32:01.12Z" }, - { url = "https://files.pythonhosted.org/packages/fb/8c/f5987895bf42b8ddeea1b315c9fedcfe07cadee28b9c98cf50d00adcb14d/regex-2025.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d9903ca42bfeec4cebedba8022a7c97ad2aab22e09573ce9976ba01b65e4361", size = 798592, upload-time = "2025-11-03T21:32:03.006Z" }, - { url = "https://files.pythonhosted.org/packages/99/2a/6591ebeede78203fa77ee46a1c36649e02df9eaa77a033d1ccdf2fcd5d4e/regex-2025.11.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:639431bdc89d6429f6721625e8129413980ccd62e9d3f496be618a41d205f160", size = 864122, upload-time = "2025-11-03T21:32:04.553Z" }, - { url = "https://files.pythonhosted.org/packages/94/d6/be32a87cf28cf8ed064ff281cfbd49aefd90242a83e4b08b5a86b38e8eb4/regex-2025.11.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f117efad42068f9715677c8523ed2be1518116d1c49b1dd17987716695181efe", size = 912272, upload-time = "2025-11-03T21:32:06.148Z" }, - { url = "https://files.pythonhosted.org/packages/62/11/9bcef2d1445665b180ac7f230406ad80671f0fc2a6ffb93493b5dd8cd64c/regex-2025.11.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4aecb6f461316adf9f1f0f6a4a1a3d79e045f9b71ec76055a791affa3b285850", size = 803497, upload-time = "2025-11-03T21:32:08.162Z" }, - { url = "https://files.pythonhosted.org/packages/e5/a7/da0dc273d57f560399aa16d8a68ae7f9b57679476fc7ace46501d455fe84/regex-2025.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b3a5f320136873cc5561098dfab677eea139521cb9a9e8db98b7e64aef44cbc", size = 787892, upload-time = "2025-11-03T21:32:09.769Z" }, - { url = "https://files.pythonhosted.org/packages/da/4b/732a0c5a9736a0b8d6d720d4945a2f1e6f38f87f48f3173559f53e8d5d82/regex-2025.11.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75fa6f0056e7efb1f42a1c34e58be24072cb9e61a601340cc1196ae92326a4f9", size = 858462, upload-time = "2025-11-03T21:32:11.769Z" }, - { url = "https://files.pythonhosted.org/packages/0c/f5/a2a03df27dc4c2d0c769220f5110ba8c4084b0bfa9ab0f9b4fcfa3d2b0fc/regex-2025.11.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:dbe6095001465294f13f1adcd3311e50dd84e5a71525f20a10bd16689c61ce0b", size = 850528, upload-time = "2025-11-03T21:32:13.906Z" }, - { url = "https://files.pythonhosted.org/packages/d6/09/e1cd5bee3841c7f6eb37d95ca91cdee7100b8f88b81e41c2ef426910891a/regex-2025.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:454d9b4ae7881afbc25015b8627c16d88a597479b9dea82b8c6e7e2e07240dc7", size = 789866, upload-time = "2025-11-03T21:32:15.748Z" }, - { url = "https://files.pythonhosted.org/packages/eb/51/702f5ea74e2a9c13d855a6a85b7f80c30f9e72a95493260193c07f3f8d74/regex-2025.11.3-cp313-cp313-win32.whl", hash = "sha256:28ba4d69171fc6e9896337d4fc63a43660002b7da53fc15ac992abcf3410917c", size = 266189, upload-time = "2025-11-03T21:32:17.493Z" }, - { url = "https://files.pythonhosted.org/packages/8b/00/6e29bb314e271a743170e53649db0fdb8e8ff0b64b4f425f5602f4eb9014/regex-2025.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:bac4200befe50c670c405dc33af26dad5a3b6b255dd6c000d92fe4629f9ed6a5", size = 277054, upload-time = "2025-11-03T21:32:19.042Z" }, - { url = "https://files.pythonhosted.org/packages/25/f1/b156ff9f2ec9ac441710764dda95e4edaf5f36aca48246d1eea3f1fd96ec/regex-2025.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:2292cd5a90dab247f9abe892ac584cb24f0f54680c73fcb4a7493c66c2bf2467", size = 270325, upload-time = "2025-11-03T21:32:21.338Z" }, - { url = "https://files.pythonhosted.org/packages/20/28/fd0c63357caefe5680b8ea052131acbd7f456893b69cc2a90cc3e0dc90d4/regex-2025.11.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1eb1ebf6822b756c723e09f5186473d93236c06c579d2cc0671a722d2ab14281", size = 491984, upload-time = "2025-11-03T21:32:23.466Z" }, - { url = "https://files.pythonhosted.org/packages/df/ec/7014c15626ab46b902b3bcc4b28a7bae46d8f281fc7ea9c95e22fcaaa917/regex-2025.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1e00ec2970aab10dc5db34af535f21fcf32b4a31d99e34963419636e2f85ae39", size = 292673, upload-time = "2025-11-03T21:32:25.034Z" }, - { url = "https://files.pythonhosted.org/packages/23/ab/3b952ff7239f20d05f1f99e9e20188513905f218c81d52fb5e78d2bf7634/regex-2025.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a4cb042b615245d5ff9b3794f56be4138b5adc35a4166014d31d1814744148c7", size = 291029, upload-time = "2025-11-03T21:32:26.528Z" }, - { url = "https://files.pythonhosted.org/packages/21/7e/3dc2749fc684f455f162dcafb8a187b559e2614f3826877d3844a131f37b/regex-2025.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44f264d4bf02f3176467d90b294d59bf1db9fe53c141ff772f27a8b456b2a9ed", size = 807437, upload-time = "2025-11-03T21:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/1b/0b/d529a85ab349c6a25d1ca783235b6e3eedf187247eab536797021f7126c6/regex-2025.11.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7be0277469bf3bd7a34a9c57c1b6a724532a0d235cd0dc4e7f4316f982c28b19", size = 873368, upload-time = "2025-11-03T21:32:30.4Z" }, - { url = "https://files.pythonhosted.org/packages/7d/18/2d868155f8c9e3e9d8f9e10c64e9a9f496bb8f7e037a88a8bed26b435af6/regex-2025.11.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0d31e08426ff4b5b650f68839f5af51a92a5b51abd8554a60c2fbc7c71f25d0b", size = 914921, upload-time = "2025-11-03T21:32:32.123Z" }, - { url = "https://files.pythonhosted.org/packages/2d/71/9d72ff0f354fa783fe2ba913c8734c3b433b86406117a8db4ea2bf1c7a2f/regex-2025.11.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e43586ce5bd28f9f285a6e729466841368c4a0353f6fd08d4ce4630843d3648a", size = 812708, upload-time = "2025-11-03T21:32:34.305Z" }, - { url = "https://files.pythonhosted.org/packages/e7/19/ce4bf7f5575c97f82b6e804ffb5c4e940c62609ab2a0d9538d47a7fdf7d4/regex-2025.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f9397d561a4c16829d4e6ff75202c1c08b68a3bdbfe29dbfcdb31c9830907c6", size = 795472, upload-time = "2025-11-03T21:32:36.364Z" }, - { url = "https://files.pythonhosted.org/packages/03/86/fd1063a176ffb7b2315f9a1b08d17b18118b28d9df163132615b835a26ee/regex-2025.11.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:dd16e78eb18ffdb25ee33a0682d17912e8cc8a770e885aeee95020046128f1ce", size = 868341, upload-time = "2025-11-03T21:32:38.042Z" }, - { url = "https://files.pythonhosted.org/packages/12/43/103fb2e9811205e7386366501bc866a164a0430c79dd59eac886a2822950/regex-2025.11.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:ffcca5b9efe948ba0661e9df0fa50d2bc4b097c70b9810212d6b62f05d83b2dd", size = 854666, upload-time = "2025-11-03T21:32:40.079Z" }, - { url = "https://files.pythonhosted.org/packages/7d/22/e392e53f3869b75804762c7c848bd2dd2abf2b70fb0e526f58724638bd35/regex-2025.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c56b4d162ca2b43318ac671c65bd4d563e841a694ac70e1a976ac38fcf4ca1d2", size = 799473, upload-time = "2025-11-03T21:32:42.148Z" }, - { url = "https://files.pythonhosted.org/packages/4f/f9/8bd6b656592f925b6845fcbb4d57603a3ac2fb2373344ffa1ed70aa6820a/regex-2025.11.3-cp313-cp313t-win32.whl", hash = "sha256:9ddc42e68114e161e51e272f667d640f97e84a2b9ef14b7477c53aac20c2d59a", size = 268792, upload-time = "2025-11-03T21:32:44.13Z" }, - { url = "https://files.pythonhosted.org/packages/e5/87/0e7d603467775ff65cd2aeabf1b5b50cc1c3708556a8b849a2fa4dd1542b/regex-2025.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7a7c7fdf755032ffdd72c77e3d8096bdcb0eb92e89e17571a196f03d88b11b3c", size = 280214, upload-time = "2025-11-03T21:32:45.853Z" }, - { url = "https://files.pythonhosted.org/packages/8d/d0/2afc6f8e94e2b64bfb738a7c2b6387ac1699f09f032d363ed9447fd2bb57/regex-2025.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:df9eb838c44f570283712e7cff14c16329a9f0fb19ca492d21d4b7528ee6821e", size = 271469, upload-time = "2025-11-03T21:32:48.026Z" }, - { url = "https://files.pythonhosted.org/packages/31/e9/f6e13de7e0983837f7b6d238ad9458800a874bf37c264f7923e63409944c/regex-2025.11.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9697a52e57576c83139d7c6f213d64485d3df5bf84807c35fa409e6c970801c6", size = 489089, upload-time = "2025-11-03T21:32:50.027Z" }, - { url = "https://files.pythonhosted.org/packages/a3/5c/261f4a262f1fa65141c1b74b255988bd2fa020cc599e53b080667d591cfc/regex-2025.11.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e18bc3f73bd41243c9b38a6d9f2366cd0e0137a9aebe2d8ff76c5b67d4c0a3f4", size = 291059, upload-time = "2025-11-03T21:32:51.682Z" }, - { url = "https://files.pythonhosted.org/packages/8e/57/f14eeb7f072b0e9a5a090d1712741fd8f214ec193dba773cf5410108bb7d/regex-2025.11.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:61a08bcb0ec14ff4e0ed2044aad948d0659604f824cbd50b55e30b0ec6f09c73", size = 288900, upload-time = "2025-11-03T21:32:53.569Z" }, - { url = "https://files.pythonhosted.org/packages/3c/6b/1d650c45e99a9b327586739d926a1cd4e94666b1bd4af90428b36af66dc7/regex-2025.11.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9c30003b9347c24bcc210958c5d167b9e4f9be786cb380a7d32f14f9b84674f", size = 799010, upload-time = "2025-11-03T21:32:55.222Z" }, - { url = "https://files.pythonhosted.org/packages/99/ee/d66dcbc6b628ce4e3f7f0cbbb84603aa2fc0ffc878babc857726b8aab2e9/regex-2025.11.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4e1e592789704459900728d88d41a46fe3969b82ab62945560a31732ffc19a6d", size = 864893, upload-time = "2025-11-03T21:32:57.239Z" }, - { url = "https://files.pythonhosted.org/packages/bf/2d/f238229f1caba7ac87a6c4153d79947fb0261415827ae0f77c304260c7d3/regex-2025.11.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6538241f45eb5a25aa575dbba1069ad786f68a4f2773a29a2bd3dd1f9de787be", size = 911522, upload-time = "2025-11-03T21:32:59.274Z" }, - { url = "https://files.pythonhosted.org/packages/bd/3d/22a4eaba214a917c80e04f6025d26143690f0419511e0116508e24b11c9b/regex-2025.11.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce22519c989bb72a7e6b36a199384c53db7722fe669ba891da75907fe3587db", size = 803272, upload-time = "2025-11-03T21:33:01.393Z" }, - { url = "https://files.pythonhosted.org/packages/84/b1/03188f634a409353a84b5ef49754b97dbcc0c0f6fd6c8ede505a8960a0a4/regex-2025.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:66d559b21d3640203ab9075797a55165d79017520685fb407b9234d72ab63c62", size = 787958, upload-time = "2025-11-03T21:33:03.379Z" }, - { url = "https://files.pythonhosted.org/packages/99/6a/27d072f7fbf6fadd59c64d210305e1ff865cc3b78b526fd147db768c553b/regex-2025.11.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:669dcfb2e38f9e8c69507bace46f4889e3abbfd9b0c29719202883c0a603598f", size = 859289, upload-time = "2025-11-03T21:33:05.374Z" }, - { url = "https://files.pythonhosted.org/packages/9a/70/1b3878f648e0b6abe023172dacb02157e685564853cc363d9961bcccde4e/regex-2025.11.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:32f74f35ff0f25a5021373ac61442edcb150731fbaa28286bbc8bb1582c89d02", size = 850026, upload-time = "2025-11-03T21:33:07.131Z" }, - { url = "https://files.pythonhosted.org/packages/dd/d5/68e25559b526b8baab8e66839304ede68ff6727237a47727d240006bd0ff/regex-2025.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e6c7a21dffba883234baefe91bc3388e629779582038f75d2a5be918e250f0ed", size = 789499, upload-time = "2025-11-03T21:33:09.141Z" }, - { url = "https://files.pythonhosted.org/packages/fc/df/43971264857140a350910d4e33df725e8c94dd9dee8d2e4729fa0d63d49e/regex-2025.11.3-cp314-cp314-win32.whl", hash = "sha256:795ea137b1d809eb6836b43748b12634291c0ed55ad50a7d72d21edf1cd565c4", size = 271604, upload-time = "2025-11-03T21:33:10.9Z" }, - { url = "https://files.pythonhosted.org/packages/01/6f/9711b57dc6894a55faf80a4c1b5aa4f8649805cb9c7aef46f7d27e2b9206/regex-2025.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:9f95fbaa0ee1610ec0fc6b26668e9917a582ba80c52cc6d9ada15e30aa9ab9ad", size = 280320, upload-time = "2025-11-03T21:33:12.572Z" }, - { url = "https://files.pythonhosted.org/packages/f1/7e/f6eaa207d4377481f5e1775cdeb5a443b5a59b392d0065f3417d31d80f87/regex-2025.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:dfec44d532be4c07088c3de2876130ff0fbeeacaa89a137decbbb5f665855a0f", size = 273372, upload-time = "2025-11-03T21:33:14.219Z" }, - { url = "https://files.pythonhosted.org/packages/c3/06/49b198550ee0f5e4184271cee87ba4dfd9692c91ec55289e6282f0f86ccf/regex-2025.11.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ba0d8a5d7f04f73ee7d01d974d47c5834f8a1b0224390e4fe7c12a3a92a78ecc", size = 491985, upload-time = "2025-11-03T21:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/ce/bf/abdafade008f0b1c9da10d934034cb670432d6cf6cbe38bbb53a1cfd6cf8/regex-2025.11.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:442d86cf1cfe4faabf97db7d901ef58347efd004934da045c745e7b5bd57ac49", size = 292669, upload-time = "2025-11-03T21:33:18.32Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ef/0c357bb8edbd2ad8e273fcb9e1761bc37b8acbc6e1be050bebd6475f19c1/regex-2025.11.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fd0a5e563c756de210bb964789b5abe4f114dacae9104a47e1a649b910361536", size = 291030, upload-time = "2025-11-03T21:33:20.048Z" }, - { url = "https://files.pythonhosted.org/packages/79/06/edbb67257596649b8fb088d6aeacbcb248ac195714b18a65e018bf4c0b50/regex-2025.11.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf3490bcbb985a1ae97b2ce9ad1c0f06a852d5b19dde9b07bdf25bf224248c95", size = 807674, upload-time = "2025-11-03T21:33:21.797Z" }, - { url = "https://files.pythonhosted.org/packages/f4/d9/ad4deccfce0ea336296bd087f1a191543bb99ee1c53093dcd4c64d951d00/regex-2025.11.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3809988f0a8b8c9dcc0f92478d6501fac7200b9ec56aecf0ec21f4a2ec4b6009", size = 873451, upload-time = "2025-11-03T21:33:23.741Z" }, - { url = "https://files.pythonhosted.org/packages/13/75/a55a4724c56ef13e3e04acaab29df26582f6978c000ac9cd6810ad1f341f/regex-2025.11.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f4ff94e58e84aedb9c9fce66d4ef9f27a190285b451420f297c9a09f2b9abee9", size = 914980, upload-time = "2025-11-03T21:33:25.999Z" }, - { url = "https://files.pythonhosted.org/packages/67/1e/a1657ee15bd9116f70d4a530c736983eed997b361e20ecd8f5ca3759d5c5/regex-2025.11.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eb542fd347ce61e1321b0a6b945d5701528dca0cd9759c2e3bb8bd57e47964d", size = 812852, upload-time = "2025-11-03T21:33:27.852Z" }, - { url = "https://files.pythonhosted.org/packages/b8/6f/f7516dde5506a588a561d296b2d0044839de06035bb486b326065b4c101e/regex-2025.11.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2d5919075a1f2e413c00b056ea0c2f065b3f5fe83c3d07d325ab92dce51d6", size = 795566, upload-time = "2025-11-03T21:33:32.364Z" }, - { url = "https://files.pythonhosted.org/packages/d9/dd/3d10b9e170cc16fb34cb2cef91513cf3df65f440b3366030631b2984a264/regex-2025.11.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3f8bf11a4827cc7ce5a53d4ef6cddd5ad25595d3c1435ef08f76825851343154", size = 868463, upload-time = "2025-11-03T21:33:34.459Z" }, - { url = "https://files.pythonhosted.org/packages/f5/8e/935e6beff1695aa9085ff83195daccd72acc82c81793df480f34569330de/regex-2025.11.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:22c12d837298651e5550ac1d964e4ff57c3f56965fc1812c90c9fb2028eaf267", size = 854694, upload-time = "2025-11-03T21:33:36.793Z" }, - { url = "https://files.pythonhosted.org/packages/92/12/10650181a040978b2f5720a6a74d44f841371a3d984c2083fc1752e4acf6/regex-2025.11.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:62ba394a3dda9ad41c7c780f60f6e4a70988741415ae96f6d1bf6c239cf01379", size = 799691, upload-time = "2025-11-03T21:33:39.079Z" }, - { url = "https://files.pythonhosted.org/packages/67/90/8f37138181c9a7690e7e4cb388debbd389342db3c7381d636d2875940752/regex-2025.11.3-cp314-cp314t-win32.whl", hash = "sha256:4bf146dca15cdd53224a1bf46d628bd7590e4a07fbb69e720d561aea43a32b38", size = 274583, upload-time = "2025-11-03T21:33:41.302Z" }, - { url = "https://files.pythonhosted.org/packages/8f/cd/867f5ec442d56beb56f5f854f40abcfc75e11d10b11fdb1869dd39c63aaf/regex-2025.11.3-cp314-cp314t-win_amd64.whl", hash = "sha256:adad1a1bcf1c9e76346e091d22d23ac54ef28e1365117d99521631078dfec9de", size = 284286, upload-time = "2025-11-03T21:33:43.324Z" }, - { url = "https://files.pythonhosted.org/packages/20/31/32c0c4610cbc070362bf1d2e4ea86d1ea29014d400a6d6c2486fcfd57766/regex-2025.11.3-cp314-cp314t-win_arm64.whl", hash = "sha256:c54f768482cef41e219720013cd05933b6f971d9562544d691c68699bf2b6801", size = 274741, upload-time = "2025-11-03T21:33:45.557Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312 }, + { url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256 }, + { url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921 }, + { url = "https://files.pythonhosted.org/packages/ea/98/6a8dff667d1af907150432cf5abc05a17ccd32c72a3615410d5365ac167a/regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7", size = 798568 }, + { url = "https://files.pythonhosted.org/packages/64/15/92c1db4fa4e12733dd5a526c2dd2b6edcbfe13257e135fc0f6c57f34c173/regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69", size = 864165 }, + { url = "https://files.pythonhosted.org/packages/f9/e7/3ad7da8cdee1ce66c7cd37ab5ab05c463a86ffeb52b1a25fe7bd9293b36c/regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48", size = 912182 }, + { url = "https://files.pythonhosted.org/packages/84/bd/9ce9f629fcb714ffc2c3faf62b6766ecb7a585e1e885eb699bcf130a5209/regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c", size = 803501 }, + { url = "https://files.pythonhosted.org/packages/7c/0f/8dc2e4349d8e877283e6edd6c12bdcebc20f03744e86f197ab6e4492bf08/regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695", size = 787842 }, + { url = "https://files.pythonhosted.org/packages/f9/73/cff02702960bc185164d5619c0c62a2f598a6abff6695d391b096237d4ab/regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98", size = 858519 }, + { url = "https://files.pythonhosted.org/packages/61/83/0e8d1ae71e15bc1dc36231c90b46ee35f9d52fab2e226b0e039e7ea9c10a/regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74", size = 850611 }, + { url = "https://files.pythonhosted.org/packages/c8/f5/70a5cdd781dcfaa12556f2955bf170cd603cb1c96a1827479f8faea2df97/regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0", size = 789759 }, + { url = "https://files.pythonhosted.org/packages/59/9b/7c29be7903c318488983e7d97abcf8ebd3830e4c956c4c540005fcfb0462/regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204", size = 266194 }, + { url = "https://files.pythonhosted.org/packages/1a/67/3b92df89f179d7c367be654ab5626ae311cb28f7d5c237b6bb976cd5fbbb/regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9", size = 277069 }, + { url = "https://files.pythonhosted.org/packages/d7/55/85ba4c066fe5094d35b249c3ce8df0ba623cfd35afb22d6764f23a52a1c5/regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26", size = 270330 }, + { url = "https://files.pythonhosted.org/packages/e1/a7/dda24ebd49da46a197436ad96378f17df30ceb40e52e859fc42cac45b850/regex-2025.11.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c1e448051717a334891f2b9a620fe36776ebf3dd8ec46a0b877c8ae69575feb4", size = 489081 }, + { url = "https://files.pythonhosted.org/packages/19/22/af2dc751aacf88089836aa088a1a11c4f21a04707eb1b0478e8e8fb32847/regex-2025.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b5aca4d5dfd7fbfbfbdaf44850fcc7709a01146a797536a8f84952e940cca76", size = 291123 }, + { url = "https://files.pythonhosted.org/packages/a3/88/1a3ea5672f4b0a84802ee9891b86743438e7c04eb0b8f8c4e16a42375327/regex-2025.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:04d2765516395cf7dda331a244a3282c0f5ae96075f728629287dfa6f76ba70a", size = 288814 }, + { url = "https://files.pythonhosted.org/packages/fb/8c/f5987895bf42b8ddeea1b315c9fedcfe07cadee28b9c98cf50d00adcb14d/regex-2025.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d9903ca42bfeec4cebedba8022a7c97ad2aab22e09573ce9976ba01b65e4361", size = 798592 }, + { url = "https://files.pythonhosted.org/packages/99/2a/6591ebeede78203fa77ee46a1c36649e02df9eaa77a033d1ccdf2fcd5d4e/regex-2025.11.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:639431bdc89d6429f6721625e8129413980ccd62e9d3f496be618a41d205f160", size = 864122 }, + { url = "https://files.pythonhosted.org/packages/94/d6/be32a87cf28cf8ed064ff281cfbd49aefd90242a83e4b08b5a86b38e8eb4/regex-2025.11.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f117efad42068f9715677c8523ed2be1518116d1c49b1dd17987716695181efe", size = 912272 }, + { url = "https://files.pythonhosted.org/packages/62/11/9bcef2d1445665b180ac7f230406ad80671f0fc2a6ffb93493b5dd8cd64c/regex-2025.11.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4aecb6f461316adf9f1f0f6a4a1a3d79e045f9b71ec76055a791affa3b285850", size = 803497 }, + { url = "https://files.pythonhosted.org/packages/e5/a7/da0dc273d57f560399aa16d8a68ae7f9b57679476fc7ace46501d455fe84/regex-2025.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b3a5f320136873cc5561098dfab677eea139521cb9a9e8db98b7e64aef44cbc", size = 787892 }, + { url = "https://files.pythonhosted.org/packages/da/4b/732a0c5a9736a0b8d6d720d4945a2f1e6f38f87f48f3173559f53e8d5d82/regex-2025.11.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75fa6f0056e7efb1f42a1c34e58be24072cb9e61a601340cc1196ae92326a4f9", size = 858462 }, + { url = "https://files.pythonhosted.org/packages/0c/f5/a2a03df27dc4c2d0c769220f5110ba8c4084b0bfa9ab0f9b4fcfa3d2b0fc/regex-2025.11.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:dbe6095001465294f13f1adcd3311e50dd84e5a71525f20a10bd16689c61ce0b", size = 850528 }, + { url = "https://files.pythonhosted.org/packages/d6/09/e1cd5bee3841c7f6eb37d95ca91cdee7100b8f88b81e41c2ef426910891a/regex-2025.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:454d9b4ae7881afbc25015b8627c16d88a597479b9dea82b8c6e7e2e07240dc7", size = 789866 }, + { url = "https://files.pythonhosted.org/packages/eb/51/702f5ea74e2a9c13d855a6a85b7f80c30f9e72a95493260193c07f3f8d74/regex-2025.11.3-cp313-cp313-win32.whl", hash = "sha256:28ba4d69171fc6e9896337d4fc63a43660002b7da53fc15ac992abcf3410917c", size = 266189 }, + { url = "https://files.pythonhosted.org/packages/8b/00/6e29bb314e271a743170e53649db0fdb8e8ff0b64b4f425f5602f4eb9014/regex-2025.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:bac4200befe50c670c405dc33af26dad5a3b6b255dd6c000d92fe4629f9ed6a5", size = 277054 }, + { url = "https://files.pythonhosted.org/packages/25/f1/b156ff9f2ec9ac441710764dda95e4edaf5f36aca48246d1eea3f1fd96ec/regex-2025.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:2292cd5a90dab247f9abe892ac584cb24f0f54680c73fcb4a7493c66c2bf2467", size = 270325 }, + { url = "https://files.pythonhosted.org/packages/20/28/fd0c63357caefe5680b8ea052131acbd7f456893b69cc2a90cc3e0dc90d4/regex-2025.11.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1eb1ebf6822b756c723e09f5186473d93236c06c579d2cc0671a722d2ab14281", size = 491984 }, + { url = "https://files.pythonhosted.org/packages/df/ec/7014c15626ab46b902b3bcc4b28a7bae46d8f281fc7ea9c95e22fcaaa917/regex-2025.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1e00ec2970aab10dc5db34af535f21fcf32b4a31d99e34963419636e2f85ae39", size = 292673 }, + { url = "https://files.pythonhosted.org/packages/23/ab/3b952ff7239f20d05f1f99e9e20188513905f218c81d52fb5e78d2bf7634/regex-2025.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a4cb042b615245d5ff9b3794f56be4138b5adc35a4166014d31d1814744148c7", size = 291029 }, + { url = "https://files.pythonhosted.org/packages/21/7e/3dc2749fc684f455f162dcafb8a187b559e2614f3826877d3844a131f37b/regex-2025.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44f264d4bf02f3176467d90b294d59bf1db9fe53c141ff772f27a8b456b2a9ed", size = 807437 }, + { url = "https://files.pythonhosted.org/packages/1b/0b/d529a85ab349c6a25d1ca783235b6e3eedf187247eab536797021f7126c6/regex-2025.11.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7be0277469bf3bd7a34a9c57c1b6a724532a0d235cd0dc4e7f4316f982c28b19", size = 873368 }, + { url = "https://files.pythonhosted.org/packages/7d/18/2d868155f8c9e3e9d8f9e10c64e9a9f496bb8f7e037a88a8bed26b435af6/regex-2025.11.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0d31e08426ff4b5b650f68839f5af51a92a5b51abd8554a60c2fbc7c71f25d0b", size = 914921 }, + { url = "https://files.pythonhosted.org/packages/2d/71/9d72ff0f354fa783fe2ba913c8734c3b433b86406117a8db4ea2bf1c7a2f/regex-2025.11.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e43586ce5bd28f9f285a6e729466841368c4a0353f6fd08d4ce4630843d3648a", size = 812708 }, + { url = "https://files.pythonhosted.org/packages/e7/19/ce4bf7f5575c97f82b6e804ffb5c4e940c62609ab2a0d9538d47a7fdf7d4/regex-2025.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f9397d561a4c16829d4e6ff75202c1c08b68a3bdbfe29dbfcdb31c9830907c6", size = 795472 }, + { url = "https://files.pythonhosted.org/packages/03/86/fd1063a176ffb7b2315f9a1b08d17b18118b28d9df163132615b835a26ee/regex-2025.11.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:dd16e78eb18ffdb25ee33a0682d17912e8cc8a770e885aeee95020046128f1ce", size = 868341 }, + { url = "https://files.pythonhosted.org/packages/12/43/103fb2e9811205e7386366501bc866a164a0430c79dd59eac886a2822950/regex-2025.11.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:ffcca5b9efe948ba0661e9df0fa50d2bc4b097c70b9810212d6b62f05d83b2dd", size = 854666 }, + { url = "https://files.pythonhosted.org/packages/7d/22/e392e53f3869b75804762c7c848bd2dd2abf2b70fb0e526f58724638bd35/regex-2025.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c56b4d162ca2b43318ac671c65bd4d563e841a694ac70e1a976ac38fcf4ca1d2", size = 799473 }, + { url = "https://files.pythonhosted.org/packages/4f/f9/8bd6b656592f925b6845fcbb4d57603a3ac2fb2373344ffa1ed70aa6820a/regex-2025.11.3-cp313-cp313t-win32.whl", hash = "sha256:9ddc42e68114e161e51e272f667d640f97e84a2b9ef14b7477c53aac20c2d59a", size = 268792 }, + { url = "https://files.pythonhosted.org/packages/e5/87/0e7d603467775ff65cd2aeabf1b5b50cc1c3708556a8b849a2fa4dd1542b/regex-2025.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7a7c7fdf755032ffdd72c77e3d8096bdcb0eb92e89e17571a196f03d88b11b3c", size = 280214 }, + { url = "https://files.pythonhosted.org/packages/8d/d0/2afc6f8e94e2b64bfb738a7c2b6387ac1699f09f032d363ed9447fd2bb57/regex-2025.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:df9eb838c44f570283712e7cff14c16329a9f0fb19ca492d21d4b7528ee6821e", size = 271469 }, + { url = "https://files.pythonhosted.org/packages/31/e9/f6e13de7e0983837f7b6d238ad9458800a874bf37c264f7923e63409944c/regex-2025.11.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9697a52e57576c83139d7c6f213d64485d3df5bf84807c35fa409e6c970801c6", size = 489089 }, + { url = "https://files.pythonhosted.org/packages/a3/5c/261f4a262f1fa65141c1b74b255988bd2fa020cc599e53b080667d591cfc/regex-2025.11.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e18bc3f73bd41243c9b38a6d9f2366cd0e0137a9aebe2d8ff76c5b67d4c0a3f4", size = 291059 }, + { url = "https://files.pythonhosted.org/packages/8e/57/f14eeb7f072b0e9a5a090d1712741fd8f214ec193dba773cf5410108bb7d/regex-2025.11.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:61a08bcb0ec14ff4e0ed2044aad948d0659604f824cbd50b55e30b0ec6f09c73", size = 288900 }, + { url = "https://files.pythonhosted.org/packages/3c/6b/1d650c45e99a9b327586739d926a1cd4e94666b1bd4af90428b36af66dc7/regex-2025.11.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9c30003b9347c24bcc210958c5d167b9e4f9be786cb380a7d32f14f9b84674f", size = 799010 }, + { url = "https://files.pythonhosted.org/packages/99/ee/d66dcbc6b628ce4e3f7f0cbbb84603aa2fc0ffc878babc857726b8aab2e9/regex-2025.11.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4e1e592789704459900728d88d41a46fe3969b82ab62945560a31732ffc19a6d", size = 864893 }, + { url = "https://files.pythonhosted.org/packages/bf/2d/f238229f1caba7ac87a6c4153d79947fb0261415827ae0f77c304260c7d3/regex-2025.11.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6538241f45eb5a25aa575dbba1069ad786f68a4f2773a29a2bd3dd1f9de787be", size = 911522 }, + { url = "https://files.pythonhosted.org/packages/bd/3d/22a4eaba214a917c80e04f6025d26143690f0419511e0116508e24b11c9b/regex-2025.11.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce22519c989bb72a7e6b36a199384c53db7722fe669ba891da75907fe3587db", size = 803272 }, + { url = "https://files.pythonhosted.org/packages/84/b1/03188f634a409353a84b5ef49754b97dbcc0c0f6fd6c8ede505a8960a0a4/regex-2025.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:66d559b21d3640203ab9075797a55165d79017520685fb407b9234d72ab63c62", size = 787958 }, + { url = "https://files.pythonhosted.org/packages/99/6a/27d072f7fbf6fadd59c64d210305e1ff865cc3b78b526fd147db768c553b/regex-2025.11.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:669dcfb2e38f9e8c69507bace46f4889e3abbfd9b0c29719202883c0a603598f", size = 859289 }, + { url = "https://files.pythonhosted.org/packages/9a/70/1b3878f648e0b6abe023172dacb02157e685564853cc363d9961bcccde4e/regex-2025.11.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:32f74f35ff0f25a5021373ac61442edcb150731fbaa28286bbc8bb1582c89d02", size = 850026 }, + { url = "https://files.pythonhosted.org/packages/dd/d5/68e25559b526b8baab8e66839304ede68ff6727237a47727d240006bd0ff/regex-2025.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e6c7a21dffba883234baefe91bc3388e629779582038f75d2a5be918e250f0ed", size = 789499 }, + { url = "https://files.pythonhosted.org/packages/fc/df/43971264857140a350910d4e33df725e8c94dd9dee8d2e4729fa0d63d49e/regex-2025.11.3-cp314-cp314-win32.whl", hash = "sha256:795ea137b1d809eb6836b43748b12634291c0ed55ad50a7d72d21edf1cd565c4", size = 271604 }, + { url = "https://files.pythonhosted.org/packages/01/6f/9711b57dc6894a55faf80a4c1b5aa4f8649805cb9c7aef46f7d27e2b9206/regex-2025.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:9f95fbaa0ee1610ec0fc6b26668e9917a582ba80c52cc6d9ada15e30aa9ab9ad", size = 280320 }, + { url = "https://files.pythonhosted.org/packages/f1/7e/f6eaa207d4377481f5e1775cdeb5a443b5a59b392d0065f3417d31d80f87/regex-2025.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:dfec44d532be4c07088c3de2876130ff0fbeeacaa89a137decbbb5f665855a0f", size = 273372 }, + { url = "https://files.pythonhosted.org/packages/c3/06/49b198550ee0f5e4184271cee87ba4dfd9692c91ec55289e6282f0f86ccf/regex-2025.11.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ba0d8a5d7f04f73ee7d01d974d47c5834f8a1b0224390e4fe7c12a3a92a78ecc", size = 491985 }, + { url = "https://files.pythonhosted.org/packages/ce/bf/abdafade008f0b1c9da10d934034cb670432d6cf6cbe38bbb53a1cfd6cf8/regex-2025.11.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:442d86cf1cfe4faabf97db7d901ef58347efd004934da045c745e7b5bd57ac49", size = 292669 }, + { url = "https://files.pythonhosted.org/packages/f9/ef/0c357bb8edbd2ad8e273fcb9e1761bc37b8acbc6e1be050bebd6475f19c1/regex-2025.11.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fd0a5e563c756de210bb964789b5abe4f114dacae9104a47e1a649b910361536", size = 291030 }, + { url = "https://files.pythonhosted.org/packages/79/06/edbb67257596649b8fb088d6aeacbcb248ac195714b18a65e018bf4c0b50/regex-2025.11.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf3490bcbb985a1ae97b2ce9ad1c0f06a852d5b19dde9b07bdf25bf224248c95", size = 807674 }, + { url = "https://files.pythonhosted.org/packages/f4/d9/ad4deccfce0ea336296bd087f1a191543bb99ee1c53093dcd4c64d951d00/regex-2025.11.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3809988f0a8b8c9dcc0f92478d6501fac7200b9ec56aecf0ec21f4a2ec4b6009", size = 873451 }, + { url = "https://files.pythonhosted.org/packages/13/75/a55a4724c56ef13e3e04acaab29df26582f6978c000ac9cd6810ad1f341f/regex-2025.11.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f4ff94e58e84aedb9c9fce66d4ef9f27a190285b451420f297c9a09f2b9abee9", size = 914980 }, + { url = "https://files.pythonhosted.org/packages/67/1e/a1657ee15bd9116f70d4a530c736983eed997b361e20ecd8f5ca3759d5c5/regex-2025.11.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eb542fd347ce61e1321b0a6b945d5701528dca0cd9759c2e3bb8bd57e47964d", size = 812852 }, + { url = "https://files.pythonhosted.org/packages/b8/6f/f7516dde5506a588a561d296b2d0044839de06035bb486b326065b4c101e/regex-2025.11.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2d5919075a1f2e413c00b056ea0c2f065b3f5fe83c3d07d325ab92dce51d6", size = 795566 }, + { url = "https://files.pythonhosted.org/packages/d9/dd/3d10b9e170cc16fb34cb2cef91513cf3df65f440b3366030631b2984a264/regex-2025.11.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3f8bf11a4827cc7ce5a53d4ef6cddd5ad25595d3c1435ef08f76825851343154", size = 868463 }, + { url = "https://files.pythonhosted.org/packages/f5/8e/935e6beff1695aa9085ff83195daccd72acc82c81793df480f34569330de/regex-2025.11.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:22c12d837298651e5550ac1d964e4ff57c3f56965fc1812c90c9fb2028eaf267", size = 854694 }, + { url = "https://files.pythonhosted.org/packages/92/12/10650181a040978b2f5720a6a74d44f841371a3d984c2083fc1752e4acf6/regex-2025.11.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:62ba394a3dda9ad41c7c780f60f6e4a70988741415ae96f6d1bf6c239cf01379", size = 799691 }, + { url = "https://files.pythonhosted.org/packages/67/90/8f37138181c9a7690e7e4cb388debbd389342db3c7381d636d2875940752/regex-2025.11.3-cp314-cp314t-win32.whl", hash = "sha256:4bf146dca15cdd53224a1bf46d628bd7590e4a07fbb69e720d561aea43a32b38", size = 274583 }, + { url = "https://files.pythonhosted.org/packages/8f/cd/867f5ec442d56beb56f5f854f40abcfc75e11d10b11fdb1869dd39c63aaf/regex-2025.11.3-cp314-cp314t-win_amd64.whl", hash = "sha256:adad1a1bcf1c9e76346e091d22d23ac54ef28e1365117d99521631078dfec9de", size = 284286 }, + { url = "https://files.pythonhosted.org/packages/20/31/32c0c4610cbc070362bf1d2e4ea86d1ea29014d400a6d6c2486fcfd57766/regex-2025.11.3-cp314-cp314t-win_arm64.whl", hash = "sha256:c54f768482cef41e219720013cd05933b6f971d9562544d691c68699bf2b6801", size = 274741 }, ] [[package]] @@ -3200,9 +3195,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738 }, ] [[package]] @@ -3213,9 +3208,9 @@ dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990 } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393 }, ] [[package]] @@ -3225,9 +3220,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "rich" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4c/f7/1c65e0245d4c7009a87ac92908294a66e7e7635eccf76a68550f40c6df80/rich_argparse-1.7.2.tar.gz", hash = "sha256:64fd2e948fc96e8a1a06e0e72c111c2ce7f3af74126d75c0f5f63926e7289cd1", size = 38500, upload-time = "2025-11-01T10:35:44.232Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/f7/1c65e0245d4c7009a87ac92908294a66e7e7635eccf76a68550f40c6df80/rich_argparse-1.7.2.tar.gz", hash = "sha256:64fd2e948fc96e8a1a06e0e72c111c2ce7f3af74126d75c0f5f63926e7289cd1", size = 38500 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/80/97b6f357ac458d9ad9872cc3183ca09ef7439ac89e030ea43053ba1294b6/rich_argparse-1.7.2-py3-none-any.whl", hash = "sha256:0559b1f47a19bbeb82bf15f95a057f99bcbbc98385532f57937f9fc57acc501a", size = 25476, upload-time = "2025-11-01T10:35:42.681Z" }, + { url = "https://files.pythonhosted.org/packages/04/80/97b6f357ac458d9ad9872cc3183ca09ef7439ac89e030ea43053ba1294b6/rich_argparse-1.7.2-py3-none-any.whl", hash = "sha256:0559b1f47a19bbeb82bf15f95a057f99bcbbc98385532f57937f9fc57acc501a", size = 25476 }, ] [[package]] @@ -3237,35 +3232,35 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyasn1" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034 } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696 }, ] [[package]] name = "ruff" version = "0.14.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ed/d9/f7a0c4b3a2bf2556cd5d99b05372c29980249ef71e8e32669ba77428c82c/ruff-0.14.8.tar.gz", hash = "sha256:774ed0dd87d6ce925e3b8496feb3a00ac564bea52b9feb551ecd17e0a23d1eed", size = 5765385, upload-time = "2025-12-04T15:06:17.669Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/48/b8/9537b52010134b1d2b72870cc3f92d5fb759394094741b09ceccae183fbe/ruff-0.14.8-py3-none-linux_armv6l.whl", hash = "sha256:ec071e9c82eca417f6111fd39f7043acb53cd3fde9b1f95bbed745962e345afb", size = 13441540, upload-time = "2025-12-04T15:06:14.896Z" }, - { url = "https://files.pythonhosted.org/packages/24/00/99031684efb025829713682012b6dd37279b1f695ed1b01725f85fd94b38/ruff-0.14.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8cdb162a7159f4ca36ce980a18c43d8f036966e7f73f866ac8f493b75e0c27e9", size = 13669384, upload-time = "2025-12-04T15:06:51.809Z" }, - { url = "https://files.pythonhosted.org/packages/72/64/3eb5949169fc19c50c04f28ece2c189d3b6edd57e5b533649dae6ca484fe/ruff-0.14.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e2fcbefe91f9fad0916850edf0854530c15bd1926b6b779de47e9ab619ea38f", size = 12806917, upload-time = "2025-12-04T15:06:08.925Z" }, - { url = "https://files.pythonhosted.org/packages/c4/08/5250babb0b1b11910f470370ec0cbc67470231f7cdc033cee57d4976f941/ruff-0.14.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d70721066a296f45786ec31916dc287b44040f553da21564de0ab4d45a869b", size = 13256112, upload-time = "2025-12-04T15:06:23.498Z" }, - { url = "https://files.pythonhosted.org/packages/78/4c/6c588e97a8e8c2d4b522c31a579e1df2b4d003eddfbe23d1f262b1a431ff/ruff-0.14.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2c87e09b3cd9d126fc67a9ecd3b5b1d3ded2b9c7fce3f16e315346b9d05cfb52", size = 13227559, upload-time = "2025-12-04T15:06:33.432Z" }, - { url = "https://files.pythonhosted.org/packages/23/ce/5f78cea13eda8eceac71b5f6fa6e9223df9b87bb2c1891c166d1f0dce9f1/ruff-0.14.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d62cb310c4fbcb9ee4ac023fe17f984ae1e12b8a4a02e3d21489f9a2a5f730c", size = 13896379, upload-time = "2025-12-04T15:06:02.687Z" }, - { url = "https://files.pythonhosted.org/packages/cf/79/13de4517c4dadce9218a20035b21212a4c180e009507731f0d3b3f5df85a/ruff-0.14.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1af35c2d62633d4da0521178e8a2641c636d2a7153da0bac1b30cfd4ccd91344", size = 15372786, upload-time = "2025-12-04T15:06:29.828Z" }, - { url = "https://files.pythonhosted.org/packages/00/06/33df72b3bb42be8a1c3815fd4fae83fa2945fc725a25d87ba3e42d1cc108/ruff-0.14.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25add4575ffecc53d60eed3f24b1e934493631b48ebbc6ebaf9d8517924aca4b", size = 14990029, upload-time = "2025-12-04T15:06:36.812Z" }, - { url = "https://files.pythonhosted.org/packages/64/61/0f34927bd90925880394de0e081ce1afab66d7b3525336f5771dcf0cb46c/ruff-0.14.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c943d847b7f02f7db4201a0600ea7d244d8a404fbb639b439e987edcf2baf9a", size = 14407037, upload-time = "2025-12-04T15:06:39.979Z" }, - { url = "https://files.pythonhosted.org/packages/96/bc/058fe0aefc0fbf0d19614cb6d1a3e2c048f7dc77ca64957f33b12cfdc5ef/ruff-0.14.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb6e8bf7b4f627548daa1b69283dac5a296bfe9ce856703b03130732e20ddfe2", size = 14102390, upload-time = "2025-12-04T15:06:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/af/a4/e4f77b02b804546f4c17e8b37a524c27012dd6ff05855d2243b49a7d3cb9/ruff-0.14.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:7aaf2974f378e6b01d1e257c6948207aec6a9b5ba53fab23d0182efb887a0e4a", size = 14230793, upload-time = "2025-12-04T15:06:20.497Z" }, - { url = "https://files.pythonhosted.org/packages/3f/52/bb8c02373f79552e8d087cedaffad76b8892033d2876c2498a2582f09dcf/ruff-0.14.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e5758ca513c43ad8a4ef13f0f081f80f08008f410790f3611a21a92421ab045b", size = 13160039, upload-time = "2025-12-04T15:06:49.06Z" }, - { url = "https://files.pythonhosted.org/packages/1f/ad/b69d6962e477842e25c0b11622548df746290cc6d76f9e0f4ed7456c2c31/ruff-0.14.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f74f7ba163b6e85a8d81a590363bf71618847e5078d90827749bfda1d88c9cdf", size = 13205158, upload-time = "2025-12-04T15:06:54.574Z" }, - { url = "https://files.pythonhosted.org/packages/06/63/54f23da1315c0b3dfc1bc03fbc34e10378918a20c0b0f086418734e57e74/ruff-0.14.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:eed28f6fafcc9591994c42254f5a5c5ca40e69a30721d2ab18bb0bb3baac3ab6", size = 13469550, upload-time = "2025-12-04T15:05:59.209Z" }, - { url = "https://files.pythonhosted.org/packages/70/7d/a4d7b1961e4903bc37fffb7ddcfaa7beb250f67d97cfd1ee1d5cddb1ec90/ruff-0.14.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:21d48fa744c9d1cb8d71eb0a740c4dd02751a5de9db9a730a8ef75ca34cf138e", size = 14211332, upload-time = "2025-12-04T15:06:06.027Z" }, - { url = "https://files.pythonhosted.org/packages/5d/93/2a5063341fa17054e5c86582136e9895db773e3c2ffb770dde50a09f35f0/ruff-0.14.8-py3-none-win32.whl", hash = "sha256:15f04cb45c051159baebb0f0037f404f1dc2f15a927418f29730f411a79bc4e7", size = 13151890, upload-time = "2025-12-04T15:06:11.668Z" }, - { url = "https://files.pythonhosted.org/packages/02/1c/65c61a0859c0add13a3e1cbb6024b42de587456a43006ca2d4fd3d1618fe/ruff-0.14.8-py3-none-win_amd64.whl", hash = "sha256:9eeb0b24242b5bbff3011409a739929f497f3fb5fe3b5698aba5e77e8c833097", size = 14537826, upload-time = "2025-12-04T15:06:26.409Z" }, - { url = "https://files.pythonhosted.org/packages/6d/63/8b41cea3afd7f58eb64ac9251668ee0073789a3bc9ac6f816c8c6fef986d/ruff-0.14.8-py3-none-win_arm64.whl", hash = "sha256:965a582c93c63fe715fd3e3f8aa37c4b776777203d8e1d8aa3cc0c14424a4b99", size = 13634522, upload-time = "2025-12-04T15:06:43.212Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/ed/d9/f7a0c4b3a2bf2556cd5d99b05372c29980249ef71e8e32669ba77428c82c/ruff-0.14.8.tar.gz", hash = "sha256:774ed0dd87d6ce925e3b8496feb3a00ac564bea52b9feb551ecd17e0a23d1eed", size = 5765385 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/b8/9537b52010134b1d2b72870cc3f92d5fb759394094741b09ceccae183fbe/ruff-0.14.8-py3-none-linux_armv6l.whl", hash = "sha256:ec071e9c82eca417f6111fd39f7043acb53cd3fde9b1f95bbed745962e345afb", size = 13441540 }, + { url = "https://files.pythonhosted.org/packages/24/00/99031684efb025829713682012b6dd37279b1f695ed1b01725f85fd94b38/ruff-0.14.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8cdb162a7159f4ca36ce980a18c43d8f036966e7f73f866ac8f493b75e0c27e9", size = 13669384 }, + { url = "https://files.pythonhosted.org/packages/72/64/3eb5949169fc19c50c04f28ece2c189d3b6edd57e5b533649dae6ca484fe/ruff-0.14.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e2fcbefe91f9fad0916850edf0854530c15bd1926b6b779de47e9ab619ea38f", size = 12806917 }, + { url = "https://files.pythonhosted.org/packages/c4/08/5250babb0b1b11910f470370ec0cbc67470231f7cdc033cee57d4976f941/ruff-0.14.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d70721066a296f45786ec31916dc287b44040f553da21564de0ab4d45a869b", size = 13256112 }, + { url = "https://files.pythonhosted.org/packages/78/4c/6c588e97a8e8c2d4b522c31a579e1df2b4d003eddfbe23d1f262b1a431ff/ruff-0.14.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2c87e09b3cd9d126fc67a9ecd3b5b1d3ded2b9c7fce3f16e315346b9d05cfb52", size = 13227559 }, + { url = "https://files.pythonhosted.org/packages/23/ce/5f78cea13eda8eceac71b5f6fa6e9223df9b87bb2c1891c166d1f0dce9f1/ruff-0.14.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d62cb310c4fbcb9ee4ac023fe17f984ae1e12b8a4a02e3d21489f9a2a5f730c", size = 13896379 }, + { url = "https://files.pythonhosted.org/packages/cf/79/13de4517c4dadce9218a20035b21212a4c180e009507731f0d3b3f5df85a/ruff-0.14.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1af35c2d62633d4da0521178e8a2641c636d2a7153da0bac1b30cfd4ccd91344", size = 15372786 }, + { url = "https://files.pythonhosted.org/packages/00/06/33df72b3bb42be8a1c3815fd4fae83fa2945fc725a25d87ba3e42d1cc108/ruff-0.14.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25add4575ffecc53d60eed3f24b1e934493631b48ebbc6ebaf9d8517924aca4b", size = 14990029 }, + { url = "https://files.pythonhosted.org/packages/64/61/0f34927bd90925880394de0e081ce1afab66d7b3525336f5771dcf0cb46c/ruff-0.14.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c943d847b7f02f7db4201a0600ea7d244d8a404fbb639b439e987edcf2baf9a", size = 14407037 }, + { url = "https://files.pythonhosted.org/packages/96/bc/058fe0aefc0fbf0d19614cb6d1a3e2c048f7dc77ca64957f33b12cfdc5ef/ruff-0.14.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb6e8bf7b4f627548daa1b69283dac5a296bfe9ce856703b03130732e20ddfe2", size = 14102390 }, + { url = "https://files.pythonhosted.org/packages/af/a4/e4f77b02b804546f4c17e8b37a524c27012dd6ff05855d2243b49a7d3cb9/ruff-0.14.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:7aaf2974f378e6b01d1e257c6948207aec6a9b5ba53fab23d0182efb887a0e4a", size = 14230793 }, + { url = "https://files.pythonhosted.org/packages/3f/52/bb8c02373f79552e8d087cedaffad76b8892033d2876c2498a2582f09dcf/ruff-0.14.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e5758ca513c43ad8a4ef13f0f081f80f08008f410790f3611a21a92421ab045b", size = 13160039 }, + { url = "https://files.pythonhosted.org/packages/1f/ad/b69d6962e477842e25c0b11622548df746290cc6d76f9e0f4ed7456c2c31/ruff-0.14.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f74f7ba163b6e85a8d81a590363bf71618847e5078d90827749bfda1d88c9cdf", size = 13205158 }, + { url = "https://files.pythonhosted.org/packages/06/63/54f23da1315c0b3dfc1bc03fbc34e10378918a20c0b0f086418734e57e74/ruff-0.14.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:eed28f6fafcc9591994c42254f5a5c5ca40e69a30721d2ab18bb0bb3baac3ab6", size = 13469550 }, + { url = "https://files.pythonhosted.org/packages/70/7d/a4d7b1961e4903bc37fffb7ddcfaa7beb250f67d97cfd1ee1d5cddb1ec90/ruff-0.14.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:21d48fa744c9d1cb8d71eb0a740c4dd02751a5de9db9a730a8ef75ca34cf138e", size = 14211332 }, + { url = "https://files.pythonhosted.org/packages/5d/93/2a5063341fa17054e5c86582136e9895db773e3c2ffb770dde50a09f35f0/ruff-0.14.8-py3-none-win32.whl", hash = "sha256:15f04cb45c051159baebb0f0037f404f1dc2f15a927418f29730f411a79bc4e7", size = 13151890 }, + { url = "https://files.pythonhosted.org/packages/02/1c/65c61a0859c0add13a3e1cbb6024b42de587456a43006ca2d4fd3d1618fe/ruff-0.14.8-py3-none-win_amd64.whl", hash = "sha256:9eeb0b24242b5bbff3011409a739929f497f3fb5fe3b5698aba5e77e8c833097", size = 14537826 }, + { url = "https://files.pythonhosted.org/packages/6d/63/8b41cea3afd7f58eb64ac9251668ee0073789a3bc9ac6f816c8c6fef986d/ruff-0.14.8-py3-none-win_arm64.whl", hash = "sha256:965a582c93c63fe715fd3e3f8aa37c4b776777203d8e1d8aa3cc0c14424a4b99", size = 13634522 }, ] [[package]] @@ -3275,31 +3270,31 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827, upload-time = "2025-12-01T02:30:59.114Z" } +sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830, upload-time = "2025-12-01T02:30:57.729Z" }, + { url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830 }, ] [[package]] name = "safetensors" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/29/9c/6e74567782559a63bd040a236edca26fd71bc7ba88de2ef35d75df3bca5e/safetensors-0.7.0.tar.gz", hash = "sha256:07663963b67e8bd9f0b8ad15bb9163606cd27cc5a1b96235a50d8369803b96b0", size = 200878, upload-time = "2025-11-19T15:18:43.199Z" } +sdist = { url = "https://files.pythonhosted.org/packages/29/9c/6e74567782559a63bd040a236edca26fd71bc7ba88de2ef35d75df3bca5e/safetensors-0.7.0.tar.gz", hash = "sha256:07663963b67e8bd9f0b8ad15bb9163606cd27cc5a1b96235a50d8369803b96b0", size = 200878 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/47/aef6c06649039accf914afef490268e1067ed82be62bcfa5b7e886ad15e8/safetensors-0.7.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c82f4d474cf725255d9e6acf17252991c3c8aac038d6ef363a4bf8be2f6db517", size = 467781, upload-time = "2025-11-19T15:18:35.84Z" }, - { url = "https://files.pythonhosted.org/packages/e8/00/374c0c068e30cd31f1e1b46b4b5738168ec79e7689ca82ee93ddfea05109/safetensors-0.7.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:94fd4858284736bb67a897a41608b5b0c2496c9bdb3bf2af1fa3409127f20d57", size = 447058, upload-time = "2025-11-19T15:18:34.416Z" }, - { url = "https://files.pythonhosted.org/packages/f1/06/578ffed52c2296f93d7fd2d844cabfa92be51a587c38c8afbb8ae449ca89/safetensors-0.7.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e07d91d0c92a31200f25351f4acb2bc6aff7f48094e13ebb1d0fb995b54b6542", size = 491748, upload-time = "2025-11-19T15:18:09.79Z" }, - { url = "https://files.pythonhosted.org/packages/ae/33/1debbbb70e4791dde185edb9413d1fe01619255abb64b300157d7f15dddd/safetensors-0.7.0-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8469155f4cb518bafb4acf4865e8bb9d6804110d2d9bdcaa78564b9fd841e104", size = 503881, upload-time = "2025-11-19T15:18:16.145Z" }, - { url = "https://files.pythonhosted.org/packages/8e/1c/40c2ca924d60792c3be509833df711b553c60effbd91da6f5284a83f7122/safetensors-0.7.0-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54bef08bf00a2bff599982f6b08e8770e09cc012d7bba00783fc7ea38f1fb37d", size = 623463, upload-time = "2025-11-19T15:18:21.11Z" }, - { url = "https://files.pythonhosted.org/packages/9b/3a/13784a9364bd43b0d61eef4bea2845039bc2030458b16594a1bd787ae26e/safetensors-0.7.0-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42cb091236206bb2016d245c377ed383aa7f78691748f3bb6ee1bfa51ae2ce6a", size = 532855, upload-time = "2025-11-19T15:18:25.719Z" }, - { url = "https://files.pythonhosted.org/packages/a0/60/429e9b1cb3fc651937727befe258ea24122d9663e4d5709a48c9cbfceecb/safetensors-0.7.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac7252938f0696ddea46f5e855dd3138444e82236e3be475f54929f0c510d48", size = 507152, upload-time = "2025-11-19T15:18:33.023Z" }, - { url = "https://files.pythonhosted.org/packages/3c/a8/4b45e4e059270d17af60359713ffd83f97900d45a6afa73aaa0d737d48b6/safetensors-0.7.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1d060c70284127fa805085d8f10fbd0962792aed71879d00864acda69dbab981", size = 541856, upload-time = "2025-11-19T15:18:31.075Z" }, - { url = "https://files.pythonhosted.org/packages/06/87/d26d8407c44175d8ae164a95b5a62707fcc445f3c0c56108e37d98070a3d/safetensors-0.7.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cdab83a366799fa730f90a4ebb563e494f28e9e92c4819e556152ad55e43591b", size = 674060, upload-time = "2025-11-19T15:18:37.211Z" }, - { url = "https://files.pythonhosted.org/packages/11/f5/57644a2ff08dc6325816ba7217e5095f17269dada2554b658442c66aed51/safetensors-0.7.0-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:672132907fcad9f2aedcb705b2d7b3b93354a2aec1b2f706c4db852abe338f85", size = 771715, upload-time = "2025-11-19T15:18:38.689Z" }, - { url = "https://files.pythonhosted.org/packages/86/31/17883e13a814bd278ae6e266b13282a01049b0c81341da7fd0e3e71a80a3/safetensors-0.7.0-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:5d72abdb8a4d56d4020713724ba81dac065fedb7f3667151c4a637f1d3fb26c0", size = 714377, upload-time = "2025-11-19T15:18:40.162Z" }, - { url = "https://files.pythonhosted.org/packages/4a/d8/0c8a7dc9b41dcac53c4cbf9df2b9c83e0e0097203de8b37a712b345c0be5/safetensors-0.7.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0f6d66c1c538d5a94a73aa9ddca8ccc4227e6c9ff555322ea40bdd142391dd4", size = 677368, upload-time = "2025-11-19T15:18:41.627Z" }, - { url = "https://files.pythonhosted.org/packages/05/e5/cb4b713c8a93469e3c5be7c3f8d77d307e65fe89673e731f5c2bfd0a9237/safetensors-0.7.0-cp38-abi3-win32.whl", hash = "sha256:c74af94bf3ac15ac4d0f2a7c7b4663a15f8c2ab15ed0fc7531ca61d0835eccba", size = 326423, upload-time = "2025-11-19T15:18:45.74Z" }, - { url = "https://files.pythonhosted.org/packages/5d/e6/ec8471c8072382cb91233ba7267fd931219753bb43814cbc71757bfd4dab/safetensors-0.7.0-cp38-abi3-win_amd64.whl", hash = "sha256:d1239932053f56f3456f32eb9625590cc7582e905021f94636202a864d470755", size = 341380, upload-time = "2025-11-19T15:18:44.427Z" }, + { url = "https://files.pythonhosted.org/packages/fa/47/aef6c06649039accf914afef490268e1067ed82be62bcfa5b7e886ad15e8/safetensors-0.7.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c82f4d474cf725255d9e6acf17252991c3c8aac038d6ef363a4bf8be2f6db517", size = 467781 }, + { url = "https://files.pythonhosted.org/packages/e8/00/374c0c068e30cd31f1e1b46b4b5738168ec79e7689ca82ee93ddfea05109/safetensors-0.7.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:94fd4858284736bb67a897a41608b5b0c2496c9bdb3bf2af1fa3409127f20d57", size = 447058 }, + { url = "https://files.pythonhosted.org/packages/f1/06/578ffed52c2296f93d7fd2d844cabfa92be51a587c38c8afbb8ae449ca89/safetensors-0.7.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e07d91d0c92a31200f25351f4acb2bc6aff7f48094e13ebb1d0fb995b54b6542", size = 491748 }, + { url = "https://files.pythonhosted.org/packages/ae/33/1debbbb70e4791dde185edb9413d1fe01619255abb64b300157d7f15dddd/safetensors-0.7.0-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8469155f4cb518bafb4acf4865e8bb9d6804110d2d9bdcaa78564b9fd841e104", size = 503881 }, + { url = "https://files.pythonhosted.org/packages/8e/1c/40c2ca924d60792c3be509833df711b553c60effbd91da6f5284a83f7122/safetensors-0.7.0-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54bef08bf00a2bff599982f6b08e8770e09cc012d7bba00783fc7ea38f1fb37d", size = 623463 }, + { url = "https://files.pythonhosted.org/packages/9b/3a/13784a9364bd43b0d61eef4bea2845039bc2030458b16594a1bd787ae26e/safetensors-0.7.0-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42cb091236206bb2016d245c377ed383aa7f78691748f3bb6ee1bfa51ae2ce6a", size = 532855 }, + { url = "https://files.pythonhosted.org/packages/a0/60/429e9b1cb3fc651937727befe258ea24122d9663e4d5709a48c9cbfceecb/safetensors-0.7.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac7252938f0696ddea46f5e855dd3138444e82236e3be475f54929f0c510d48", size = 507152 }, + { url = "https://files.pythonhosted.org/packages/3c/a8/4b45e4e059270d17af60359713ffd83f97900d45a6afa73aaa0d737d48b6/safetensors-0.7.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1d060c70284127fa805085d8f10fbd0962792aed71879d00864acda69dbab981", size = 541856 }, + { url = "https://files.pythonhosted.org/packages/06/87/d26d8407c44175d8ae164a95b5a62707fcc445f3c0c56108e37d98070a3d/safetensors-0.7.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cdab83a366799fa730f90a4ebb563e494f28e9e92c4819e556152ad55e43591b", size = 674060 }, + { url = "https://files.pythonhosted.org/packages/11/f5/57644a2ff08dc6325816ba7217e5095f17269dada2554b658442c66aed51/safetensors-0.7.0-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:672132907fcad9f2aedcb705b2d7b3b93354a2aec1b2f706c4db852abe338f85", size = 771715 }, + { url = "https://files.pythonhosted.org/packages/86/31/17883e13a814bd278ae6e266b13282a01049b0c81341da7fd0e3e71a80a3/safetensors-0.7.0-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:5d72abdb8a4d56d4020713724ba81dac065fedb7f3667151c4a637f1d3fb26c0", size = 714377 }, + { url = "https://files.pythonhosted.org/packages/4a/d8/0c8a7dc9b41dcac53c4cbf9df2b9c83e0e0097203de8b37a712b345c0be5/safetensors-0.7.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0f6d66c1c538d5a94a73aa9ddca8ccc4227e6c9ff555322ea40bdd142391dd4", size = 677368 }, + { url = "https://files.pythonhosted.org/packages/05/e5/cb4b713c8a93469e3c5be7c3f8d77d307e65fe89673e731f5c2bfd0a9237/safetensors-0.7.0-cp38-abi3-win32.whl", hash = "sha256:c74af94bf3ac15ac4d0f2a7c7b4663a15f8c2ab15ed0fc7531ca61d0835eccba", size = 326423 }, + { url = "https://files.pythonhosted.org/packages/5d/e6/ec8471c8072382cb91233ba7267fd931219753bb43814cbc71757bfd4dab/safetensors-0.7.0-cp38-abi3-win_amd64.whl", hash = "sha256:d1239932053f56f3456f32eb9625590cc7582e905021f94636202a864d470755", size = 341380 }, ] [[package]] @@ -3312,28 +3307,28 @@ dependencies = [ { name = "scipy" }, { name = "threadpoolctl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/c2/a7855e41c9d285dfe86dc50b250978105dce513d6e459ea66a6aeb0e1e0c/scikit_learn-1.7.2.tar.gz", hash = "sha256:20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda", size = 7193136, upload-time = "2025-09-09T08:21:29.075Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/aa/3996e2196075689afb9fce0410ebdb4a09099d7964d061d7213700204409/scikit_learn-1.7.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8d91a97fa2b706943822398ab943cde71858a50245e31bc71dba62aab1d60a96", size = 9259818, upload-time = "2025-09-09T08:20:43.19Z" }, - { url = "https://files.pythonhosted.org/packages/43/5d/779320063e88af9c4a7c2cf463ff11c21ac9c8bd730c4a294b0000b666c9/scikit_learn-1.7.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:acbc0f5fd2edd3432a22c69bed78e837c70cf896cd7993d71d51ba6708507476", size = 8636997, upload-time = "2025-09-09T08:20:45.468Z" }, - { url = "https://files.pythonhosted.org/packages/5c/d0/0c577d9325b05594fdd33aa970bf53fb673f051a45496842caee13cfd7fe/scikit_learn-1.7.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e5bf3d930aee75a65478df91ac1225ff89cd28e9ac7bd1196853a9229b6adb0b", size = 9478381, upload-time = "2025-09-09T08:20:47.982Z" }, - { url = "https://files.pythonhosted.org/packages/82/70/8bf44b933837ba8494ca0fc9a9ab60f1c13b062ad0197f60a56e2fc4c43e/scikit_learn-1.7.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d6e9deed1a47aca9fe2f267ab8e8fe82ee20b4526b2c0cd9e135cea10feb44", size = 9300296, upload-time = "2025-09-09T08:20:50.366Z" }, - { url = "https://files.pythonhosted.org/packages/c6/99/ed35197a158f1fdc2fe7c3680e9c70d0128f662e1fee4ed495f4b5e13db0/scikit_learn-1.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:6088aa475f0785e01bcf8529f55280a3d7d298679f50c0bb70a2364a82d0b290", size = 8731256, upload-time = "2025-09-09T08:20:52.627Z" }, - { url = "https://files.pythonhosted.org/packages/ae/93/a3038cb0293037fd335f77f31fe053b89c72f17b1c8908c576c29d953e84/scikit_learn-1.7.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b7dacaa05e5d76759fb071558a8b5130f4845166d88654a0f9bdf3eb57851b7", size = 9212382, upload-time = "2025-09-09T08:20:54.731Z" }, - { url = "https://files.pythonhosted.org/packages/40/dd/9a88879b0c1104259136146e4742026b52df8540c39fec21a6383f8292c7/scikit_learn-1.7.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:abebbd61ad9e1deed54cca45caea8ad5f79e1b93173dece40bb8e0c658dbe6fe", size = 8592042, upload-time = "2025-09-09T08:20:57.313Z" }, - { url = "https://files.pythonhosted.org/packages/46/af/c5e286471b7d10871b811b72ae794ac5fe2989c0a2df07f0ec723030f5f5/scikit_learn-1.7.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:502c18e39849c0ea1a5d681af1dbcf15f6cce601aebb657aabbfe84133c1907f", size = 9434180, upload-time = "2025-09-09T08:20:59.671Z" }, - { url = "https://files.pythonhosted.org/packages/f1/fd/df59faa53312d585023b2da27e866524ffb8faf87a68516c23896c718320/scikit_learn-1.7.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a4c328a71785382fe3fe676a9ecf2c86189249beff90bf85e22bdb7efaf9ae0", size = 9283660, upload-time = "2025-09-09T08:21:01.71Z" }, - { url = "https://files.pythonhosted.org/packages/a7/c7/03000262759d7b6f38c836ff9d512f438a70d8a8ddae68ee80de72dcfb63/scikit_learn-1.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:63a9afd6f7b229aad94618c01c252ce9e6fa97918c5ca19c9a17a087d819440c", size = 8702057, upload-time = "2025-09-09T08:21:04.234Z" }, - { url = "https://files.pythonhosted.org/packages/55/87/ef5eb1f267084532c8e4aef98a28b6ffe7425acbfd64b5e2f2e066bc29b3/scikit_learn-1.7.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9acb6c5e867447b4e1390930e3944a005e2cb115922e693c08a323421a6966e8", size = 9558731, upload-time = "2025-09-09T08:21:06.381Z" }, - { url = "https://files.pythonhosted.org/packages/93/f8/6c1e3fc14b10118068d7938878a9f3f4e6d7b74a8ddb1e5bed65159ccda8/scikit_learn-1.7.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:2a41e2a0ef45063e654152ec9d8bcfc39f7afce35b08902bfe290c2498a67a6a", size = 9038852, upload-time = "2025-09-09T08:21:08.628Z" }, - { url = "https://files.pythonhosted.org/packages/83/87/066cafc896ee540c34becf95d30375fe5cbe93c3b75a0ee9aa852cd60021/scikit_learn-1.7.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98335fb98509b73385b3ab2bd0639b1f610541d3988ee675c670371d6a87aa7c", size = 9527094, upload-time = "2025-09-09T08:21:11.486Z" }, - { url = "https://files.pythonhosted.org/packages/9c/2b/4903e1ccafa1f6453b1ab78413938c8800633988c838aa0be386cbb33072/scikit_learn-1.7.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191e5550980d45449126e23ed1d5e9e24b2c68329ee1f691a3987476e115e09c", size = 9367436, upload-time = "2025-09-09T08:21:13.602Z" }, - { url = "https://files.pythonhosted.org/packages/b5/aa/8444be3cfb10451617ff9d177b3c190288f4563e6c50ff02728be67ad094/scikit_learn-1.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:57dc4deb1d3762c75d685507fbd0bc17160144b2f2ba4ccea5dc285ab0d0e973", size = 9275749, upload-time = "2025-09-09T08:21:15.96Z" }, - { url = "https://files.pythonhosted.org/packages/d9/82/dee5acf66837852e8e68df6d8d3a6cb22d3df997b733b032f513d95205b7/scikit_learn-1.7.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fa8f63940e29c82d1e67a45d5297bdebbcb585f5a5a50c4914cc2e852ab77f33", size = 9208906, upload-time = "2025-09-09T08:21:18.557Z" }, - { url = "https://files.pythonhosted.org/packages/3c/30/9029e54e17b87cb7d50d51a5926429c683d5b4c1732f0507a6c3bed9bf65/scikit_learn-1.7.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f95dc55b7902b91331fa4e5845dd5bde0580c9cd9612b1b2791b7e80c3d32615", size = 8627836, upload-time = "2025-09-09T08:21:20.695Z" }, - { url = "https://files.pythonhosted.org/packages/60/18/4a52c635c71b536879f4b971c2cedf32c35ee78f48367885ed8025d1f7ee/scikit_learn-1.7.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9656e4a53e54578ad10a434dc1f993330568cfee176dff07112b8785fb413106", size = 9426236, upload-time = "2025-09-09T08:21:22.645Z" }, - { url = "https://files.pythonhosted.org/packages/99/7e/290362f6ab582128c53445458a5befd471ed1ea37953d5bcf80604619250/scikit_learn-1.7.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96dc05a854add0e50d3f47a1ef21a10a595016da5b007c7d9cd9d0bffd1fcc61", size = 9312593, upload-time = "2025-09-09T08:21:24.65Z" }, - { url = "https://files.pythonhosted.org/packages/8e/87/24f541b6d62b1794939ae6422f8023703bbf6900378b2b34e0b4384dfefd/scikit_learn-1.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:bb24510ed3f9f61476181e4db51ce801e2ba37541def12dc9333b946fc7a9cf8", size = 8820007, upload-time = "2025-09-09T08:21:26.713Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/98/c2/a7855e41c9d285dfe86dc50b250978105dce513d6e459ea66a6aeb0e1e0c/scikit_learn-1.7.2.tar.gz", hash = "sha256:20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda", size = 7193136 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/aa/3996e2196075689afb9fce0410ebdb4a09099d7964d061d7213700204409/scikit_learn-1.7.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8d91a97fa2b706943822398ab943cde71858a50245e31bc71dba62aab1d60a96", size = 9259818 }, + { url = "https://files.pythonhosted.org/packages/43/5d/779320063e88af9c4a7c2cf463ff11c21ac9c8bd730c4a294b0000b666c9/scikit_learn-1.7.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:acbc0f5fd2edd3432a22c69bed78e837c70cf896cd7993d71d51ba6708507476", size = 8636997 }, + { url = "https://files.pythonhosted.org/packages/5c/d0/0c577d9325b05594fdd33aa970bf53fb673f051a45496842caee13cfd7fe/scikit_learn-1.7.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e5bf3d930aee75a65478df91ac1225ff89cd28e9ac7bd1196853a9229b6adb0b", size = 9478381 }, + { url = "https://files.pythonhosted.org/packages/82/70/8bf44b933837ba8494ca0fc9a9ab60f1c13b062ad0197f60a56e2fc4c43e/scikit_learn-1.7.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d6e9deed1a47aca9fe2f267ab8e8fe82ee20b4526b2c0cd9e135cea10feb44", size = 9300296 }, + { url = "https://files.pythonhosted.org/packages/c6/99/ed35197a158f1fdc2fe7c3680e9c70d0128f662e1fee4ed495f4b5e13db0/scikit_learn-1.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:6088aa475f0785e01bcf8529f55280a3d7d298679f50c0bb70a2364a82d0b290", size = 8731256 }, + { url = "https://files.pythonhosted.org/packages/ae/93/a3038cb0293037fd335f77f31fe053b89c72f17b1c8908c576c29d953e84/scikit_learn-1.7.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b7dacaa05e5d76759fb071558a8b5130f4845166d88654a0f9bdf3eb57851b7", size = 9212382 }, + { url = "https://files.pythonhosted.org/packages/40/dd/9a88879b0c1104259136146e4742026b52df8540c39fec21a6383f8292c7/scikit_learn-1.7.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:abebbd61ad9e1deed54cca45caea8ad5f79e1b93173dece40bb8e0c658dbe6fe", size = 8592042 }, + { url = "https://files.pythonhosted.org/packages/46/af/c5e286471b7d10871b811b72ae794ac5fe2989c0a2df07f0ec723030f5f5/scikit_learn-1.7.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:502c18e39849c0ea1a5d681af1dbcf15f6cce601aebb657aabbfe84133c1907f", size = 9434180 }, + { url = "https://files.pythonhosted.org/packages/f1/fd/df59faa53312d585023b2da27e866524ffb8faf87a68516c23896c718320/scikit_learn-1.7.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a4c328a71785382fe3fe676a9ecf2c86189249beff90bf85e22bdb7efaf9ae0", size = 9283660 }, + { url = "https://files.pythonhosted.org/packages/a7/c7/03000262759d7b6f38c836ff9d512f438a70d8a8ddae68ee80de72dcfb63/scikit_learn-1.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:63a9afd6f7b229aad94618c01c252ce9e6fa97918c5ca19c9a17a087d819440c", size = 8702057 }, + { url = "https://files.pythonhosted.org/packages/55/87/ef5eb1f267084532c8e4aef98a28b6ffe7425acbfd64b5e2f2e066bc29b3/scikit_learn-1.7.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9acb6c5e867447b4e1390930e3944a005e2cb115922e693c08a323421a6966e8", size = 9558731 }, + { url = "https://files.pythonhosted.org/packages/93/f8/6c1e3fc14b10118068d7938878a9f3f4e6d7b74a8ddb1e5bed65159ccda8/scikit_learn-1.7.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:2a41e2a0ef45063e654152ec9d8bcfc39f7afce35b08902bfe290c2498a67a6a", size = 9038852 }, + { url = "https://files.pythonhosted.org/packages/83/87/066cafc896ee540c34becf95d30375fe5cbe93c3b75a0ee9aa852cd60021/scikit_learn-1.7.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98335fb98509b73385b3ab2bd0639b1f610541d3988ee675c670371d6a87aa7c", size = 9527094 }, + { url = "https://files.pythonhosted.org/packages/9c/2b/4903e1ccafa1f6453b1ab78413938c8800633988c838aa0be386cbb33072/scikit_learn-1.7.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191e5550980d45449126e23ed1d5e9e24b2c68329ee1f691a3987476e115e09c", size = 9367436 }, + { url = "https://files.pythonhosted.org/packages/b5/aa/8444be3cfb10451617ff9d177b3c190288f4563e6c50ff02728be67ad094/scikit_learn-1.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:57dc4deb1d3762c75d685507fbd0bc17160144b2f2ba4ccea5dc285ab0d0e973", size = 9275749 }, + { url = "https://files.pythonhosted.org/packages/d9/82/dee5acf66837852e8e68df6d8d3a6cb22d3df997b733b032f513d95205b7/scikit_learn-1.7.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fa8f63940e29c82d1e67a45d5297bdebbcb585f5a5a50c4914cc2e852ab77f33", size = 9208906 }, + { url = "https://files.pythonhosted.org/packages/3c/30/9029e54e17b87cb7d50d51a5926429c683d5b4c1732f0507a6c3bed9bf65/scikit_learn-1.7.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f95dc55b7902b91331fa4e5845dd5bde0580c9cd9612b1b2791b7e80c3d32615", size = 8627836 }, + { url = "https://files.pythonhosted.org/packages/60/18/4a52c635c71b536879f4b971c2cedf32c35ee78f48367885ed8025d1f7ee/scikit_learn-1.7.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9656e4a53e54578ad10a434dc1f993330568cfee176dff07112b8785fb413106", size = 9426236 }, + { url = "https://files.pythonhosted.org/packages/99/7e/290362f6ab582128c53445458a5befd471ed1ea37953d5bcf80604619250/scikit_learn-1.7.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96dc05a854add0e50d3f47a1ef21a10a595016da5b007c7d9cd9d0bffd1fcc61", size = 9312593 }, + { url = "https://files.pythonhosted.org/packages/8e/87/24f541b6d62b1794939ae6422f8023703bbf6900378b2b34e0b4384dfefd/scikit_learn-1.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:bb24510ed3f9f61476181e4db51ce801e2ba37541def12dc9333b946fc7a9cf8", size = 8820007 }, ] [[package]] @@ -3343,106 +3338,106 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0a/ca/d8ace4f98322d01abcd52d381134344bf7b431eba7ed8b42bdea5a3c2ac9/scipy-1.16.3.tar.gz", hash = "sha256:01e87659402762f43bd2fee13370553a17ada367d42e7487800bf2916535aecb", size = 30597883, upload-time = "2025-10-28T17:38:54.068Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/41/5bf55c3f386b1643812f3a5674edf74b26184378ef0f3e7c7a09a7e2ca7f/scipy-1.16.3-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81fc5827606858cf71446a5e98715ba0e11f0dbc83d71c7409d05486592a45d6", size = 36659043, upload-time = "2025-10-28T17:32:40.285Z" }, - { url = "https://files.pythonhosted.org/packages/1e/0f/65582071948cfc45d43e9870bf7ca5f0e0684e165d7c9ef4e50d783073eb/scipy-1.16.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c97176013d404c7346bf57874eaac5187d969293bf40497140b0a2b2b7482e07", size = 28898986, upload-time = "2025-10-28T17:32:45.325Z" }, - { url = "https://files.pythonhosted.org/packages/96/5e/36bf3f0ac298187d1ceadde9051177d6a4fe4d507e8f59067dc9dd39e650/scipy-1.16.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2b71d93c8a9936046866acebc915e2af2e292b883ed6e2cbe5c34beb094b82d9", size = 20889814, upload-time = "2025-10-28T17:32:49.277Z" }, - { url = "https://files.pythonhosted.org/packages/80/35/178d9d0c35394d5d5211bbff7ac4f2986c5488b59506fef9e1de13ea28d3/scipy-1.16.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3d4a07a8e785d80289dfe66b7c27d8634a773020742ec7187b85ccc4b0e7b686", size = 23565795, upload-time = "2025-10-28T17:32:53.337Z" }, - { url = "https://files.pythonhosted.org/packages/fa/46/d1146ff536d034d02f83c8afc3c4bab2eddb634624d6529a8512f3afc9da/scipy-1.16.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0553371015692a898e1aa858fed67a3576c34edefa6b7ebdb4e9dde49ce5c203", size = 33349476, upload-time = "2025-10-28T17:32:58.353Z" }, - { url = "https://files.pythonhosted.org/packages/79/2e/415119c9ab3e62249e18c2b082c07aff907a273741b3f8160414b0e9193c/scipy-1.16.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:72d1717fd3b5e6ec747327ce9bda32d5463f472c9dce9f54499e81fbd50245a1", size = 35676692, upload-time = "2025-10-28T17:33:03.88Z" }, - { url = "https://files.pythonhosted.org/packages/27/82/df26e44da78bf8d2aeaf7566082260cfa15955a5a6e96e6a29935b64132f/scipy-1.16.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fb2472e72e24d1530debe6ae078db70fb1605350c88a3d14bc401d6306dbffe", size = 36019345, upload-time = "2025-10-28T17:33:09.773Z" }, - { url = "https://files.pythonhosted.org/packages/82/31/006cbb4b648ba379a95c87262c2855cd0d09453e500937f78b30f02fa1cd/scipy-1.16.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c5192722cffe15f9329a3948c4b1db789fbb1f05c97899187dcf009b283aea70", size = 38678975, upload-time = "2025-10-28T17:33:15.809Z" }, - { url = "https://files.pythonhosted.org/packages/c2/7f/acbd28c97e990b421af7d6d6cd416358c9c293fc958b8529e0bd5d2a2a19/scipy-1.16.3-cp312-cp312-win_amd64.whl", hash = "sha256:56edc65510d1331dae01ef9b658d428e33ed48b4f77b1d51caf479a0253f96dc", size = 38555926, upload-time = "2025-10-28T17:33:21.388Z" }, - { url = "https://files.pythonhosted.org/packages/ce/69/c5c7807fd007dad4f48e0a5f2153038dc96e8725d3345b9ee31b2b7bed46/scipy-1.16.3-cp312-cp312-win_arm64.whl", hash = "sha256:a8a26c78ef223d3e30920ef759e25625a0ecdd0d60e5a8818b7513c3e5384cf2", size = 25463014, upload-time = "2025-10-28T17:33:25.975Z" }, - { url = "https://files.pythonhosted.org/packages/72/f1/57e8327ab1508272029e27eeef34f2302ffc156b69e7e233e906c2a5c379/scipy-1.16.3-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:d2ec56337675e61b312179a1ad124f5f570c00f920cc75e1000025451b88241c", size = 36617856, upload-time = "2025-10-28T17:33:31.375Z" }, - { url = "https://files.pythonhosted.org/packages/44/13/7e63cfba8a7452eb756306aa2fd9b37a29a323b672b964b4fdeded9a3f21/scipy-1.16.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:16b8bc35a4cc24db80a0ec836a9286d0e31b2503cb2fd7ff7fb0e0374a97081d", size = 28874306, upload-time = "2025-10-28T17:33:36.516Z" }, - { url = "https://files.pythonhosted.org/packages/15/65/3a9400efd0228a176e6ec3454b1fa998fbbb5a8defa1672c3f65706987db/scipy-1.16.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:5803c5fadd29de0cf27fa08ccbfe7a9e5d741bf63e4ab1085437266f12460ff9", size = 20865371, upload-time = "2025-10-28T17:33:42.094Z" }, - { url = "https://files.pythonhosted.org/packages/33/d7/eda09adf009a9fb81827194d4dd02d2e4bc752cef16737cc4ef065234031/scipy-1.16.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:b81c27fc41954319a943d43b20e07c40bdcd3ff7cf013f4fb86286faefe546c4", size = 23524877, upload-time = "2025-10-28T17:33:48.483Z" }, - { url = "https://files.pythonhosted.org/packages/7d/6b/3f911e1ebc364cb81320223a3422aab7d26c9c7973109a9cd0f27c64c6c0/scipy-1.16.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0c3b4dd3d9b08dbce0f3440032c52e9e2ab9f96ade2d3943313dfe51a7056959", size = 33342103, upload-time = "2025-10-28T17:33:56.495Z" }, - { url = "https://files.pythonhosted.org/packages/21/f6/4bfb5695d8941e5c570a04d9fcd0d36bce7511b7d78e6e75c8f9791f82d0/scipy-1.16.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7dc1360c06535ea6116a2220f760ae572db9f661aba2d88074fe30ec2aa1ff88", size = 35697297, upload-time = "2025-10-28T17:34:04.722Z" }, - { url = "https://files.pythonhosted.org/packages/04/e1/6496dadbc80d8d896ff72511ecfe2316b50313bfc3ebf07a3f580f08bd8c/scipy-1.16.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:663b8d66a8748051c3ee9c96465fb417509315b99c71550fda2591d7dd634234", size = 36021756, upload-time = "2025-10-28T17:34:13.482Z" }, - { url = "https://files.pythonhosted.org/packages/fe/bd/a8c7799e0136b987bda3e1b23d155bcb31aec68a4a472554df5f0937eef7/scipy-1.16.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eab43fae33a0c39006a88096cd7b4f4ef545ea0447d250d5ac18202d40b6611d", size = 38696566, upload-time = "2025-10-28T17:34:22.384Z" }, - { url = "https://files.pythonhosted.org/packages/cd/01/1204382461fcbfeb05b6161b594f4007e78b6eba9b375382f79153172b4d/scipy-1.16.3-cp313-cp313-win_amd64.whl", hash = "sha256:062246acacbe9f8210de8e751b16fc37458213f124bef161a5a02c7a39284304", size = 38529877, upload-time = "2025-10-28T17:35:51.076Z" }, - { url = "https://files.pythonhosted.org/packages/7f/14/9d9fbcaa1260a94f4bb5b64ba9213ceb5d03cd88841fe9fd1ffd47a45b73/scipy-1.16.3-cp313-cp313-win_arm64.whl", hash = "sha256:50a3dbf286dbc7d84f176f9a1574c705f277cb6565069f88f60db9eafdbe3ee2", size = 25455366, upload-time = "2025-10-28T17:35:59.014Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a3/9ec205bd49f42d45d77f1730dbad9ccf146244c1647605cf834b3a8c4f36/scipy-1.16.3-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:fb4b29f4cf8cc5a8d628bc8d8e26d12d7278cd1f219f22698a378c3d67db5e4b", size = 37027931, upload-time = "2025-10-28T17:34:31.451Z" }, - { url = "https://files.pythonhosted.org/packages/25/06/ca9fd1f3a4589cbd825b1447e5db3a8ebb969c1eaf22c8579bd286f51b6d/scipy-1.16.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:8d09d72dc92742988b0e7750bddb8060b0c7079606c0d24a8cc8e9c9c11f9079", size = 29400081, upload-time = "2025-10-28T17:34:39.087Z" }, - { url = "https://files.pythonhosted.org/packages/6a/56/933e68210d92657d93fb0e381683bc0e53a965048d7358ff5fbf9e6a1b17/scipy-1.16.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:03192a35e661470197556de24e7cb1330d84b35b94ead65c46ad6f16f6b28f2a", size = 21391244, upload-time = "2025-10-28T17:34:45.234Z" }, - { url = "https://files.pythonhosted.org/packages/a8/7e/779845db03dc1418e215726329674b40576879b91814568757ff0014ad65/scipy-1.16.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:57d01cb6f85e34f0946b33caa66e892aae072b64b034183f3d87c4025802a119", size = 23929753, upload-time = "2025-10-28T17:34:51.793Z" }, - { url = "https://files.pythonhosted.org/packages/4c/4b/f756cf8161d5365dcdef9e5f460ab226c068211030a175d2fc7f3f41ca64/scipy-1.16.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:96491a6a54e995f00a28a3c3badfff58fd093bf26cd5fb34a2188c8c756a3a2c", size = 33496912, upload-time = "2025-10-28T17:34:59.8Z" }, - { url = "https://files.pythonhosted.org/packages/09/b5/222b1e49a58668f23839ca1542a6322bb095ab8d6590d4f71723869a6c2c/scipy-1.16.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cd13e354df9938598af2be05822c323e97132d5e6306b83a3b4ee6724c6e522e", size = 35802371, upload-time = "2025-10-28T17:35:08.173Z" }, - { url = "https://files.pythonhosted.org/packages/c1/8d/5964ef68bb31829bde27611f8c9deeac13764589fe74a75390242b64ca44/scipy-1.16.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63d3cdacb8a824a295191a723ee5e4ea7768ca5ca5f2838532d9f2e2b3ce2135", size = 36190477, upload-time = "2025-10-28T17:35:16.7Z" }, - { url = "https://files.pythonhosted.org/packages/ab/f2/b31d75cb9b5fa4dd39a0a931ee9b33e7f6f36f23be5ef560bf72e0f92f32/scipy-1.16.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e7efa2681ea410b10dde31a52b18b0154d66f2485328830e45fdf183af5aefc6", size = 38796678, upload-time = "2025-10-28T17:35:26.354Z" }, - { url = "https://files.pythonhosted.org/packages/b4/1e/b3723d8ff64ab548c38d87055483714fefe6ee20e0189b62352b5e015bb1/scipy-1.16.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2d1ae2cf0c350e7705168ff2429962a89ad90c2d49d1dd300686d8b2a5af22fc", size = 38640178, upload-time = "2025-10-28T17:35:35.304Z" }, - { url = "https://files.pythonhosted.org/packages/8e/f3/d854ff38789aca9b0cc23008d607ced9de4f7ab14fa1ca4329f86b3758ca/scipy-1.16.3-cp313-cp313t-win_arm64.whl", hash = "sha256:0c623a54f7b79dd88ef56da19bc2873afec9673a48f3b85b18e4d402bdd29a5a", size = 25803246, upload-time = "2025-10-28T17:35:42.155Z" }, - { url = "https://files.pythonhosted.org/packages/99/f6/99b10fd70f2d864c1e29a28bbcaa0c6340f9d8518396542d9ea3b4aaae15/scipy-1.16.3-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:875555ce62743e1d54f06cdf22c1e0bc47b91130ac40fe5d783b6dfa114beeb6", size = 36606469, upload-time = "2025-10-28T17:36:08.741Z" }, - { url = "https://files.pythonhosted.org/packages/4d/74/043b54f2319f48ea940dd025779fa28ee360e6b95acb7cd188fad4391c6b/scipy-1.16.3-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:bb61878c18a470021fb515a843dc7a76961a8daceaaaa8bad1332f1bf4b54657", size = 28872043, upload-time = "2025-10-28T17:36:16.599Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e1/24b7e50cc1c4ee6ffbcb1f27fe9f4c8b40e7911675f6d2d20955f41c6348/scipy-1.16.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f2622206f5559784fa5c4b53a950c3c7c1cf3e84ca1b9c4b6c03f062f289ca26", size = 20862952, upload-time = "2025-10-28T17:36:22.966Z" }, - { url = "https://files.pythonhosted.org/packages/dd/3a/3e8c01a4d742b730df368e063787c6808597ccb38636ed821d10b39ca51b/scipy-1.16.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7f68154688c515cdb541a31ef8eb66d8cd1050605be9dcd74199cbd22ac739bc", size = 23508512, upload-time = "2025-10-28T17:36:29.731Z" }, - { url = "https://files.pythonhosted.org/packages/1f/60/c45a12b98ad591536bfe5330cb3cfe1850d7570259303563b1721564d458/scipy-1.16.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3c820ddb80029fe9f43d61b81d8b488d3ef8ca010d15122b152db77dc94c22", size = 33413639, upload-time = "2025-10-28T17:36:37.982Z" }, - { url = "https://files.pythonhosted.org/packages/71/bc/35957d88645476307e4839712642896689df442f3e53b0fa016ecf8a3357/scipy-1.16.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d3837938ae715fc0fe3c39c0202de3a8853aff22ca66781ddc2ade7554b7e2cc", size = 35704729, upload-time = "2025-10-28T17:36:46.547Z" }, - { url = "https://files.pythonhosted.org/packages/3b/15/89105e659041b1ca11c386e9995aefacd513a78493656e57789f9d9eab61/scipy-1.16.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:aadd23f98f9cb069b3bd64ddc900c4d277778242e961751f77a8cb5c4b946fb0", size = 36086251, upload-time = "2025-10-28T17:36:55.161Z" }, - { url = "https://files.pythonhosted.org/packages/1a/87/c0ea673ac9c6cc50b3da2196d860273bc7389aa69b64efa8493bdd25b093/scipy-1.16.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b7c5f1bda1354d6a19bc6af73a649f8285ca63ac6b52e64e658a5a11d4d69800", size = 38716681, upload-time = "2025-10-28T17:37:04.1Z" }, - { url = "https://files.pythonhosted.org/packages/91/06/837893227b043fb9b0d13e4bd7586982d8136cb249ffb3492930dab905b8/scipy-1.16.3-cp314-cp314-win_amd64.whl", hash = "sha256:e5d42a9472e7579e473879a1990327830493a7047506d58d73fc429b84c1d49d", size = 39358423, upload-time = "2025-10-28T17:38:20.005Z" }, - { url = "https://files.pythonhosted.org/packages/95/03/28bce0355e4d34a7c034727505a02d19548549e190bedd13a721e35380b7/scipy-1.16.3-cp314-cp314-win_arm64.whl", hash = "sha256:6020470b9d00245926f2d5bb93b119ca0340f0d564eb6fbaad843eaebf9d690f", size = 26135027, upload-time = "2025-10-28T17:38:24.966Z" }, - { url = "https://files.pythonhosted.org/packages/b2/6f/69f1e2b682efe9de8fe9f91040f0cd32f13cfccba690512ba4c582b0bc29/scipy-1.16.3-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:e1d27cbcb4602680a49d787d90664fa4974063ac9d4134813332a8c53dbe667c", size = 37028379, upload-time = "2025-10-28T17:37:14.061Z" }, - { url = "https://files.pythonhosted.org/packages/7c/2d/e826f31624a5ebbab1cd93d30fd74349914753076ed0593e1d56a98c4fb4/scipy-1.16.3-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:9b9c9c07b6d56a35777a1b4cc8966118fb16cfd8daf6743867d17d36cfad2d40", size = 29400052, upload-time = "2025-10-28T17:37:21.709Z" }, - { url = "https://files.pythonhosted.org/packages/69/27/d24feb80155f41fd1f156bf144e7e049b4e2b9dd06261a242905e3bc7a03/scipy-1.16.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:3a4c460301fb2cffb7f88528f30b3127742cff583603aa7dc964a52c463b385d", size = 21391183, upload-time = "2025-10-28T17:37:29.559Z" }, - { url = "https://files.pythonhosted.org/packages/f8/d3/1b229e433074c5738a24277eca520a2319aac7465eea7310ea6ae0e98ae2/scipy-1.16.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:f667a4542cc8917af1db06366d3f78a5c8e83badd56409f94d1eac8d8d9133fa", size = 23930174, upload-time = "2025-10-28T17:37:36.306Z" }, - { url = "https://files.pythonhosted.org/packages/16/9d/d9e148b0ec680c0f042581a2be79a28a7ab66c0c4946697f9e7553ead337/scipy-1.16.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f379b54b77a597aa7ee5e697df0d66903e41b9c85a6dd7946159e356319158e8", size = 33497852, upload-time = "2025-10-28T17:37:42.228Z" }, - { url = "https://files.pythonhosted.org/packages/2f/22/4e5f7561e4f98b7bea63cf3fd7934bff1e3182e9f1626b089a679914d5c8/scipy-1.16.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4aff59800a3b7f786b70bfd6ab551001cb553244988d7d6b8299cb1ea653b353", size = 35798595, upload-time = "2025-10-28T17:37:48.102Z" }, - { url = "https://files.pythonhosted.org/packages/83/42/6644d714c179429fc7196857866f219fef25238319b650bb32dde7bf7a48/scipy-1.16.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:da7763f55885045036fabcebd80144b757d3db06ab0861415d1c3b7c69042146", size = 36186269, upload-time = "2025-10-28T17:37:53.72Z" }, - { url = "https://files.pythonhosted.org/packages/ac/70/64b4d7ca92f9cf2e6fc6aaa2eecf80bb9b6b985043a9583f32f8177ea122/scipy-1.16.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ffa6eea95283b2b8079b821dc11f50a17d0571c92b43e2b5b12764dc5f9b285d", size = 38802779, upload-time = "2025-10-28T17:37:59.393Z" }, - { url = "https://files.pythonhosted.org/packages/61/82/8d0e39f62764cce5ffd5284131e109f07cf8955aef9ab8ed4e3aa5e30539/scipy-1.16.3-cp314-cp314t-win_amd64.whl", hash = "sha256:d9f48cafc7ce94cf9b15c6bffdc443a81a27bf7075cf2dcd5c8b40f85d10c4e7", size = 39471128, upload-time = "2025-10-28T17:38:05.259Z" }, - { url = "https://files.pythonhosted.org/packages/64/47/a494741db7280eae6dc033510c319e34d42dd41b7ac0c7ead39354d1a2b5/scipy-1.16.3-cp314-cp314t-win_arm64.whl", hash = "sha256:21d9d6b197227a12dcbf9633320a4e34c6b0e51c57268df255a0942983bac562", size = 26464127, upload-time = "2025-10-28T17:38:11.34Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/0a/ca/d8ace4f98322d01abcd52d381134344bf7b431eba7ed8b42bdea5a3c2ac9/scipy-1.16.3.tar.gz", hash = "sha256:01e87659402762f43bd2fee13370553a17ada367d42e7487800bf2916535aecb", size = 30597883 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/41/5bf55c3f386b1643812f3a5674edf74b26184378ef0f3e7c7a09a7e2ca7f/scipy-1.16.3-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81fc5827606858cf71446a5e98715ba0e11f0dbc83d71c7409d05486592a45d6", size = 36659043 }, + { url = "https://files.pythonhosted.org/packages/1e/0f/65582071948cfc45d43e9870bf7ca5f0e0684e165d7c9ef4e50d783073eb/scipy-1.16.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c97176013d404c7346bf57874eaac5187d969293bf40497140b0a2b2b7482e07", size = 28898986 }, + { url = "https://files.pythonhosted.org/packages/96/5e/36bf3f0ac298187d1ceadde9051177d6a4fe4d507e8f59067dc9dd39e650/scipy-1.16.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2b71d93c8a9936046866acebc915e2af2e292b883ed6e2cbe5c34beb094b82d9", size = 20889814 }, + { url = "https://files.pythonhosted.org/packages/80/35/178d9d0c35394d5d5211bbff7ac4f2986c5488b59506fef9e1de13ea28d3/scipy-1.16.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3d4a07a8e785d80289dfe66b7c27d8634a773020742ec7187b85ccc4b0e7b686", size = 23565795 }, + { url = "https://files.pythonhosted.org/packages/fa/46/d1146ff536d034d02f83c8afc3c4bab2eddb634624d6529a8512f3afc9da/scipy-1.16.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0553371015692a898e1aa858fed67a3576c34edefa6b7ebdb4e9dde49ce5c203", size = 33349476 }, + { url = "https://files.pythonhosted.org/packages/79/2e/415119c9ab3e62249e18c2b082c07aff907a273741b3f8160414b0e9193c/scipy-1.16.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:72d1717fd3b5e6ec747327ce9bda32d5463f472c9dce9f54499e81fbd50245a1", size = 35676692 }, + { url = "https://files.pythonhosted.org/packages/27/82/df26e44da78bf8d2aeaf7566082260cfa15955a5a6e96e6a29935b64132f/scipy-1.16.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fb2472e72e24d1530debe6ae078db70fb1605350c88a3d14bc401d6306dbffe", size = 36019345 }, + { url = "https://files.pythonhosted.org/packages/82/31/006cbb4b648ba379a95c87262c2855cd0d09453e500937f78b30f02fa1cd/scipy-1.16.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c5192722cffe15f9329a3948c4b1db789fbb1f05c97899187dcf009b283aea70", size = 38678975 }, + { url = "https://files.pythonhosted.org/packages/c2/7f/acbd28c97e990b421af7d6d6cd416358c9c293fc958b8529e0bd5d2a2a19/scipy-1.16.3-cp312-cp312-win_amd64.whl", hash = "sha256:56edc65510d1331dae01ef9b658d428e33ed48b4f77b1d51caf479a0253f96dc", size = 38555926 }, + { url = "https://files.pythonhosted.org/packages/ce/69/c5c7807fd007dad4f48e0a5f2153038dc96e8725d3345b9ee31b2b7bed46/scipy-1.16.3-cp312-cp312-win_arm64.whl", hash = "sha256:a8a26c78ef223d3e30920ef759e25625a0ecdd0d60e5a8818b7513c3e5384cf2", size = 25463014 }, + { url = "https://files.pythonhosted.org/packages/72/f1/57e8327ab1508272029e27eeef34f2302ffc156b69e7e233e906c2a5c379/scipy-1.16.3-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:d2ec56337675e61b312179a1ad124f5f570c00f920cc75e1000025451b88241c", size = 36617856 }, + { url = "https://files.pythonhosted.org/packages/44/13/7e63cfba8a7452eb756306aa2fd9b37a29a323b672b964b4fdeded9a3f21/scipy-1.16.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:16b8bc35a4cc24db80a0ec836a9286d0e31b2503cb2fd7ff7fb0e0374a97081d", size = 28874306 }, + { url = "https://files.pythonhosted.org/packages/15/65/3a9400efd0228a176e6ec3454b1fa998fbbb5a8defa1672c3f65706987db/scipy-1.16.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:5803c5fadd29de0cf27fa08ccbfe7a9e5d741bf63e4ab1085437266f12460ff9", size = 20865371 }, + { url = "https://files.pythonhosted.org/packages/33/d7/eda09adf009a9fb81827194d4dd02d2e4bc752cef16737cc4ef065234031/scipy-1.16.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:b81c27fc41954319a943d43b20e07c40bdcd3ff7cf013f4fb86286faefe546c4", size = 23524877 }, + { url = "https://files.pythonhosted.org/packages/7d/6b/3f911e1ebc364cb81320223a3422aab7d26c9c7973109a9cd0f27c64c6c0/scipy-1.16.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0c3b4dd3d9b08dbce0f3440032c52e9e2ab9f96ade2d3943313dfe51a7056959", size = 33342103 }, + { url = "https://files.pythonhosted.org/packages/21/f6/4bfb5695d8941e5c570a04d9fcd0d36bce7511b7d78e6e75c8f9791f82d0/scipy-1.16.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7dc1360c06535ea6116a2220f760ae572db9f661aba2d88074fe30ec2aa1ff88", size = 35697297 }, + { url = "https://files.pythonhosted.org/packages/04/e1/6496dadbc80d8d896ff72511ecfe2316b50313bfc3ebf07a3f580f08bd8c/scipy-1.16.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:663b8d66a8748051c3ee9c96465fb417509315b99c71550fda2591d7dd634234", size = 36021756 }, + { url = "https://files.pythonhosted.org/packages/fe/bd/a8c7799e0136b987bda3e1b23d155bcb31aec68a4a472554df5f0937eef7/scipy-1.16.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eab43fae33a0c39006a88096cd7b4f4ef545ea0447d250d5ac18202d40b6611d", size = 38696566 }, + { url = "https://files.pythonhosted.org/packages/cd/01/1204382461fcbfeb05b6161b594f4007e78b6eba9b375382f79153172b4d/scipy-1.16.3-cp313-cp313-win_amd64.whl", hash = "sha256:062246acacbe9f8210de8e751b16fc37458213f124bef161a5a02c7a39284304", size = 38529877 }, + { url = "https://files.pythonhosted.org/packages/7f/14/9d9fbcaa1260a94f4bb5b64ba9213ceb5d03cd88841fe9fd1ffd47a45b73/scipy-1.16.3-cp313-cp313-win_arm64.whl", hash = "sha256:50a3dbf286dbc7d84f176f9a1574c705f277cb6565069f88f60db9eafdbe3ee2", size = 25455366 }, + { url = "https://files.pythonhosted.org/packages/e2/a3/9ec205bd49f42d45d77f1730dbad9ccf146244c1647605cf834b3a8c4f36/scipy-1.16.3-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:fb4b29f4cf8cc5a8d628bc8d8e26d12d7278cd1f219f22698a378c3d67db5e4b", size = 37027931 }, + { url = "https://files.pythonhosted.org/packages/25/06/ca9fd1f3a4589cbd825b1447e5db3a8ebb969c1eaf22c8579bd286f51b6d/scipy-1.16.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:8d09d72dc92742988b0e7750bddb8060b0c7079606c0d24a8cc8e9c9c11f9079", size = 29400081 }, + { url = "https://files.pythonhosted.org/packages/6a/56/933e68210d92657d93fb0e381683bc0e53a965048d7358ff5fbf9e6a1b17/scipy-1.16.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:03192a35e661470197556de24e7cb1330d84b35b94ead65c46ad6f16f6b28f2a", size = 21391244 }, + { url = "https://files.pythonhosted.org/packages/a8/7e/779845db03dc1418e215726329674b40576879b91814568757ff0014ad65/scipy-1.16.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:57d01cb6f85e34f0946b33caa66e892aae072b64b034183f3d87c4025802a119", size = 23929753 }, + { url = "https://files.pythonhosted.org/packages/4c/4b/f756cf8161d5365dcdef9e5f460ab226c068211030a175d2fc7f3f41ca64/scipy-1.16.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:96491a6a54e995f00a28a3c3badfff58fd093bf26cd5fb34a2188c8c756a3a2c", size = 33496912 }, + { url = "https://files.pythonhosted.org/packages/09/b5/222b1e49a58668f23839ca1542a6322bb095ab8d6590d4f71723869a6c2c/scipy-1.16.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cd13e354df9938598af2be05822c323e97132d5e6306b83a3b4ee6724c6e522e", size = 35802371 }, + { url = "https://files.pythonhosted.org/packages/c1/8d/5964ef68bb31829bde27611f8c9deeac13764589fe74a75390242b64ca44/scipy-1.16.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63d3cdacb8a824a295191a723ee5e4ea7768ca5ca5f2838532d9f2e2b3ce2135", size = 36190477 }, + { url = "https://files.pythonhosted.org/packages/ab/f2/b31d75cb9b5fa4dd39a0a931ee9b33e7f6f36f23be5ef560bf72e0f92f32/scipy-1.16.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e7efa2681ea410b10dde31a52b18b0154d66f2485328830e45fdf183af5aefc6", size = 38796678 }, + { url = "https://files.pythonhosted.org/packages/b4/1e/b3723d8ff64ab548c38d87055483714fefe6ee20e0189b62352b5e015bb1/scipy-1.16.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2d1ae2cf0c350e7705168ff2429962a89ad90c2d49d1dd300686d8b2a5af22fc", size = 38640178 }, + { url = "https://files.pythonhosted.org/packages/8e/f3/d854ff38789aca9b0cc23008d607ced9de4f7ab14fa1ca4329f86b3758ca/scipy-1.16.3-cp313-cp313t-win_arm64.whl", hash = "sha256:0c623a54f7b79dd88ef56da19bc2873afec9673a48f3b85b18e4d402bdd29a5a", size = 25803246 }, + { url = "https://files.pythonhosted.org/packages/99/f6/99b10fd70f2d864c1e29a28bbcaa0c6340f9d8518396542d9ea3b4aaae15/scipy-1.16.3-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:875555ce62743e1d54f06cdf22c1e0bc47b91130ac40fe5d783b6dfa114beeb6", size = 36606469 }, + { url = "https://files.pythonhosted.org/packages/4d/74/043b54f2319f48ea940dd025779fa28ee360e6b95acb7cd188fad4391c6b/scipy-1.16.3-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:bb61878c18a470021fb515a843dc7a76961a8daceaaaa8bad1332f1bf4b54657", size = 28872043 }, + { url = "https://files.pythonhosted.org/packages/4d/e1/24b7e50cc1c4ee6ffbcb1f27fe9f4c8b40e7911675f6d2d20955f41c6348/scipy-1.16.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f2622206f5559784fa5c4b53a950c3c7c1cf3e84ca1b9c4b6c03f062f289ca26", size = 20862952 }, + { url = "https://files.pythonhosted.org/packages/dd/3a/3e8c01a4d742b730df368e063787c6808597ccb38636ed821d10b39ca51b/scipy-1.16.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7f68154688c515cdb541a31ef8eb66d8cd1050605be9dcd74199cbd22ac739bc", size = 23508512 }, + { url = "https://files.pythonhosted.org/packages/1f/60/c45a12b98ad591536bfe5330cb3cfe1850d7570259303563b1721564d458/scipy-1.16.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3c820ddb80029fe9f43d61b81d8b488d3ef8ca010d15122b152db77dc94c22", size = 33413639 }, + { url = "https://files.pythonhosted.org/packages/71/bc/35957d88645476307e4839712642896689df442f3e53b0fa016ecf8a3357/scipy-1.16.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d3837938ae715fc0fe3c39c0202de3a8853aff22ca66781ddc2ade7554b7e2cc", size = 35704729 }, + { url = "https://files.pythonhosted.org/packages/3b/15/89105e659041b1ca11c386e9995aefacd513a78493656e57789f9d9eab61/scipy-1.16.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:aadd23f98f9cb069b3bd64ddc900c4d277778242e961751f77a8cb5c4b946fb0", size = 36086251 }, + { url = "https://files.pythonhosted.org/packages/1a/87/c0ea673ac9c6cc50b3da2196d860273bc7389aa69b64efa8493bdd25b093/scipy-1.16.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b7c5f1bda1354d6a19bc6af73a649f8285ca63ac6b52e64e658a5a11d4d69800", size = 38716681 }, + { url = "https://files.pythonhosted.org/packages/91/06/837893227b043fb9b0d13e4bd7586982d8136cb249ffb3492930dab905b8/scipy-1.16.3-cp314-cp314-win_amd64.whl", hash = "sha256:e5d42a9472e7579e473879a1990327830493a7047506d58d73fc429b84c1d49d", size = 39358423 }, + { url = "https://files.pythonhosted.org/packages/95/03/28bce0355e4d34a7c034727505a02d19548549e190bedd13a721e35380b7/scipy-1.16.3-cp314-cp314-win_arm64.whl", hash = "sha256:6020470b9d00245926f2d5bb93b119ca0340f0d564eb6fbaad843eaebf9d690f", size = 26135027 }, + { url = "https://files.pythonhosted.org/packages/b2/6f/69f1e2b682efe9de8fe9f91040f0cd32f13cfccba690512ba4c582b0bc29/scipy-1.16.3-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:e1d27cbcb4602680a49d787d90664fa4974063ac9d4134813332a8c53dbe667c", size = 37028379 }, + { url = "https://files.pythonhosted.org/packages/7c/2d/e826f31624a5ebbab1cd93d30fd74349914753076ed0593e1d56a98c4fb4/scipy-1.16.3-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:9b9c9c07b6d56a35777a1b4cc8966118fb16cfd8daf6743867d17d36cfad2d40", size = 29400052 }, + { url = "https://files.pythonhosted.org/packages/69/27/d24feb80155f41fd1f156bf144e7e049b4e2b9dd06261a242905e3bc7a03/scipy-1.16.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:3a4c460301fb2cffb7f88528f30b3127742cff583603aa7dc964a52c463b385d", size = 21391183 }, + { url = "https://files.pythonhosted.org/packages/f8/d3/1b229e433074c5738a24277eca520a2319aac7465eea7310ea6ae0e98ae2/scipy-1.16.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:f667a4542cc8917af1db06366d3f78a5c8e83badd56409f94d1eac8d8d9133fa", size = 23930174 }, + { url = "https://files.pythonhosted.org/packages/16/9d/d9e148b0ec680c0f042581a2be79a28a7ab66c0c4946697f9e7553ead337/scipy-1.16.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f379b54b77a597aa7ee5e697df0d66903e41b9c85a6dd7946159e356319158e8", size = 33497852 }, + { url = "https://files.pythonhosted.org/packages/2f/22/4e5f7561e4f98b7bea63cf3fd7934bff1e3182e9f1626b089a679914d5c8/scipy-1.16.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4aff59800a3b7f786b70bfd6ab551001cb553244988d7d6b8299cb1ea653b353", size = 35798595 }, + { url = "https://files.pythonhosted.org/packages/83/42/6644d714c179429fc7196857866f219fef25238319b650bb32dde7bf7a48/scipy-1.16.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:da7763f55885045036fabcebd80144b757d3db06ab0861415d1c3b7c69042146", size = 36186269 }, + { url = "https://files.pythonhosted.org/packages/ac/70/64b4d7ca92f9cf2e6fc6aaa2eecf80bb9b6b985043a9583f32f8177ea122/scipy-1.16.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ffa6eea95283b2b8079b821dc11f50a17d0571c92b43e2b5b12764dc5f9b285d", size = 38802779 }, + { url = "https://files.pythonhosted.org/packages/61/82/8d0e39f62764cce5ffd5284131e109f07cf8955aef9ab8ed4e3aa5e30539/scipy-1.16.3-cp314-cp314t-win_amd64.whl", hash = "sha256:d9f48cafc7ce94cf9b15c6bffdc443a81a27bf7075cf2dcd5c8b40f85d10c4e7", size = 39471128 }, + { url = "https://files.pythonhosted.org/packages/64/47/a494741db7280eae6dc033510c319e34d42dd41b7ac0c7ead39354d1a2b5/scipy-1.16.3-cp314-cp314t-win_arm64.whl", hash = "sha256:21d9d6b197227a12dcbf9633320a4e34c6b0e51c57268df255a0942983bac562", size = 26464127 }, ] [[package]] name = "sentencepiece" version = "0.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/15/2e7a025fc62d764b151ae6d0f2a92f8081755ebe8d4a64099accc6f77ba6/sentencepiece-0.2.1.tar.gz", hash = "sha256:8138cec27c2f2282f4a34d9a016e3374cd40e5c6e9cb335063db66a0a3b71fad", size = 3228515, upload-time = "2025-08-12T07:00:51.718Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/be/32ce495aa1d0e0c323dcb1ba87096037358edee539cac5baf8755a6bd396/sentencepiece-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:57cae326c8727de58c85977b175af132a7138d84c764635d7e71bbee7e774133", size = 1943152, upload-time = "2025-08-12T06:59:40.048Z" }, - { url = "https://files.pythonhosted.org/packages/88/7e/ff23008899a58678e98c6ff592bf4d368eee5a71af96d0df6b38a039dd4f/sentencepiece-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:56dd39a3c4d6493db3cdca7e8cc68c6b633f0d4195495cbadfcf5af8a22d05a6", size = 1325651, upload-time = "2025-08-12T06:59:41.536Z" }, - { url = "https://files.pythonhosted.org/packages/19/84/42eb3ce4796777a1b5d3699dfd4dca85113e68b637f194a6c8d786f16a04/sentencepiece-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9381351182ff9888cc80e41c632e7e274b106f450de33d67a9e8f6043da6f76", size = 1253645, upload-time = "2025-08-12T06:59:42.903Z" }, - { url = "https://files.pythonhosted.org/packages/89/fa/d3d5ebcba3cb9e6d3775a096251860c41a6bc53a1b9461151df83fe93255/sentencepiece-0.2.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99f955df238021bf11f0fc37cdb54fd5e5b5f7fd30ecc3d93fb48b6815437167", size = 1316273, upload-time = "2025-08-12T06:59:44.476Z" }, - { url = "https://files.pythonhosted.org/packages/04/88/14f2f4a2b922d8b39be45bf63d79e6cd3a9b2f248b2fcb98a69b12af12f5/sentencepiece-0.2.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cdfecef430d985f1c2bcbfff3defd1d95dae876fbd0173376012d2d7d24044b", size = 1387881, upload-time = "2025-08-12T06:59:46.09Z" }, - { url = "https://files.pythonhosted.org/packages/fd/b8/903e5ccb77b4ef140605d5d71b4f9e0ad95d456d6184688073ed11712809/sentencepiece-0.2.1-cp312-cp312-win32.whl", hash = "sha256:a483fd29a34c3e34c39ac5556b0a90942bec253d260235729e50976f5dba1068", size = 999540, upload-time = "2025-08-12T06:59:48.023Z" }, - { url = "https://files.pythonhosted.org/packages/2d/81/92df5673c067148c2545b1bfe49adfd775bcc3a169a047f5a0e6575ddaca/sentencepiece-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4cdc7c36234fda305e85c32949c5211faaf8dd886096c7cea289ddc12a2d02de", size = 1054671, upload-time = "2025-08-12T06:59:49.895Z" }, - { url = "https://files.pythonhosted.org/packages/fe/02/c5e3bc518655d714622bec87d83db9cdba1cd0619a4a04e2109751c4f47f/sentencepiece-0.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:daeb5e9e9fcad012324807856113708614d534f596d5008638eb9b40112cd9e4", size = 1033923, upload-time = "2025-08-12T06:59:51.952Z" }, - { url = "https://files.pythonhosted.org/packages/ba/4a/85fbe1706d4d04a7e826b53f327c4b80f849cf1c7b7c5e31a20a97d8f28b/sentencepiece-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dcd8161eee7b41aae57ded06272905dbd680a0a04b91edd0f64790c796b2f706", size = 1943150, upload-time = "2025-08-12T06:59:53.588Z" }, - { url = "https://files.pythonhosted.org/packages/c2/83/4cfb393e287509fc2155480b9d184706ef8d9fa8cbf5505d02a5792bf220/sentencepiece-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c6c8f42949f419ff8c7e9960dbadcfbc982d7b5efc2f6748210d3dd53a7de062", size = 1325651, upload-time = "2025-08-12T06:59:55.073Z" }, - { url = "https://files.pythonhosted.org/packages/8d/de/5a007fb53b1ab0aafc69d11a5a3dd72a289d5a3e78dcf2c3a3d9b14ffe93/sentencepiece-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:097f3394e99456e9e4efba1737c3749d7e23563dd1588ce71a3d007f25475fff", size = 1253641, upload-time = "2025-08-12T06:59:56.562Z" }, - { url = "https://files.pythonhosted.org/packages/2c/d2/f552be5928105588f4f4d66ee37dd4c61460d8097e62d0e2e0eec41bc61d/sentencepiece-0.2.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d7b670879c370d350557edabadbad1f6561a9e6968126e6debca4029e5547820", size = 1316271, upload-time = "2025-08-12T06:59:58.109Z" }, - { url = "https://files.pythonhosted.org/packages/96/df/0cfe748ace5485be740fed9476dee7877f109da32ed0d280312c94ec259f/sentencepiece-0.2.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7f0fd2f2693309e6628aeeb2e2faf6edd221134dfccac3308ca0de01f8dab47", size = 1387882, upload-time = "2025-08-12T07:00:00.701Z" }, - { url = "https://files.pythonhosted.org/packages/ac/dd/f7774d42a881ced8e1739f393ab1e82ece39fc9abd4779e28050c2e975b5/sentencepiece-0.2.1-cp313-cp313-win32.whl", hash = "sha256:92b3816aa2339355fda2c8c4e021a5de92180b00aaccaf5e2808972e77a4b22f", size = 999541, upload-time = "2025-08-12T07:00:02.709Z" }, - { url = "https://files.pythonhosted.org/packages/dd/e9/932b9eae6fd7019548321eee1ab8d5e3b3d1294df9d9a0c9ac517c7b636d/sentencepiece-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:10ed3dab2044c47f7a2e7b4969b0c430420cdd45735d78c8f853191fa0e3148b", size = 1054669, upload-time = "2025-08-12T07:00:04.915Z" }, - { url = "https://files.pythonhosted.org/packages/c9/3a/76488a00ea7d6931689cda28726a1447d66bf1a4837943489314593d5596/sentencepiece-0.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac650534e2251083c5f75dde4ff28896ce7c8904133dc8fef42780f4d5588fcd", size = 1033922, upload-time = "2025-08-12T07:00:06.496Z" }, - { url = "https://files.pythonhosted.org/packages/4a/b6/08fe2ce819e02ccb0296f4843e3f195764ce9829cbda61b7513f29b95718/sentencepiece-0.2.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:8dd4b477a7b069648d19363aad0cab9bad2f4e83b2d179be668efa672500dc94", size = 1946052, upload-time = "2025-08-12T07:00:08.136Z" }, - { url = "https://files.pythonhosted.org/packages/ab/d9/1ea0e740591ff4c6fc2b6eb1d7510d02f3fb885093f19b2f3abd1363b402/sentencepiece-0.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0c0f672da370cc490e4c59d89e12289778310a0e71d176c541e4834759e1ae07", size = 1327408, upload-time = "2025-08-12T07:00:09.572Z" }, - { url = "https://files.pythonhosted.org/packages/99/7e/1fb26e8a21613f6200e1ab88824d5d203714162cf2883248b517deb500b7/sentencepiece-0.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ad8493bea8432dae8d6830365352350f3b4144415a1d09c4c8cb8d30cf3b6c3c", size = 1254857, upload-time = "2025-08-12T07:00:11.021Z" }, - { url = "https://files.pythonhosted.org/packages/bc/85/c72fd1f3c7a6010544d6ae07f8ddb38b5e2a7e33bd4318f87266c0bbafbf/sentencepiece-0.2.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b81a24733726e3678d2db63619acc5a8dccd074f7aa7a54ecd5ca33ca6d2d596", size = 1315722, upload-time = "2025-08-12T07:00:12.989Z" }, - { url = "https://files.pythonhosted.org/packages/4a/e8/661e5bd82a8aa641fd6c1020bd0e890ef73230a2b7215ddf9c8cd8e941c2/sentencepiece-0.2.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0a81799d0a68d618e89063fb423c3001a034c893069135ffe51fee439ae474d6", size = 1387452, upload-time = "2025-08-12T07:00:15.088Z" }, - { url = "https://files.pythonhosted.org/packages/99/5e/ae66c361023a470afcbc1fbb8da722c72ea678a2fcd9a18f1a12598c7501/sentencepiece-0.2.1-cp313-cp313t-win32.whl", hash = "sha256:89a3ea015517c42c0341d0d962f3e6aaf2cf10d71b1932d475c44ba48d00aa2b", size = 1002501, upload-time = "2025-08-12T07:00:16.966Z" }, - { url = "https://files.pythonhosted.org/packages/c1/03/d332828c4ff764e16c1b56c2c8f9a33488bbe796b53fb6b9c4205ddbf167/sentencepiece-0.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:33f068c9382dc2e7c228eedfd8163b52baa86bb92f50d0488bf2b7da7032e484", size = 1057555, upload-time = "2025-08-12T07:00:18.573Z" }, - { url = "https://files.pythonhosted.org/packages/88/14/5aee0bf0864df9bd82bd59e7711362908e4935e3f9cdc1f57246b5d5c9b9/sentencepiece-0.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:b3616ad246f360e52c85781e47682d31abfb6554c779e42b65333d4b5f44ecc0", size = 1036042, upload-time = "2025-08-12T07:00:20.209Z" }, - { url = "https://files.pythonhosted.org/packages/24/9c/89eb8b2052f720a612478baf11c8227dcf1dc28cd4ea4c0c19506b5af2a2/sentencepiece-0.2.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:5d0350b686c320068702116276cfb26c066dc7e65cfef173980b11bb4d606719", size = 1943147, upload-time = "2025-08-12T07:00:21.809Z" }, - { url = "https://files.pythonhosted.org/packages/82/0b/a1432bc87f97c2ace36386ca23e8bd3b91fb40581b5e6148d24b24186419/sentencepiece-0.2.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c7f54a31cde6fa5cb030370566f68152a742f433f8d2be458463d06c208aef33", size = 1325624, upload-time = "2025-08-12T07:00:23.289Z" }, - { url = "https://files.pythonhosted.org/packages/ea/99/bbe054ebb5a5039457c590e0a4156ed073fb0fe9ce4f7523404dd5b37463/sentencepiece-0.2.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c83b85ab2d6576607f31df77ff86f28182be4a8de6d175d2c33ca609925f5da1", size = 1253670, upload-time = "2025-08-12T07:00:24.69Z" }, - { url = "https://files.pythonhosted.org/packages/19/ad/d5c7075f701bd97971d7c2ac2904f227566f51ef0838dfbdfdccb58cd212/sentencepiece-0.2.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1855f57db07b51fb51ed6c9c452f570624d2b169b36f0f79ef71a6e6c618cd8b", size = 1316247, upload-time = "2025-08-12T07:00:26.435Z" }, - { url = "https://files.pythonhosted.org/packages/fb/03/35fbe5f3d9a7435eebd0b473e09584bd3cc354ce118b960445b060d33781/sentencepiece-0.2.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01e6912125cb45d3792f530a4d38f8e21bf884d6b4d4ade1b2de5cf7a8d2a52b", size = 1387894, upload-time = "2025-08-12T07:00:28.339Z" }, - { url = "https://files.pythonhosted.org/packages/dc/aa/956ef729aafb6c8f9c443104c9636489093bb5c61d6b90fc27aa1a865574/sentencepiece-0.2.1-cp314-cp314-win32.whl", hash = "sha256:c415c9de1447e0a74ae3fdb2e52f967cb544113a3a5ce3a194df185cbc1f962f", size = 1096698, upload-time = "2025-08-12T07:00:29.764Z" }, - { url = "https://files.pythonhosted.org/packages/b8/cb/fe400d8836952cc535c81a0ce47dc6875160e5fedb71d2d9ff0e9894c2a6/sentencepiece-0.2.1-cp314-cp314-win_amd64.whl", hash = "sha256:881b2e44b14fc19feade3cbed314be37de639fc415375cefaa5bc81a4be137fd", size = 1155115, upload-time = "2025-08-12T07:00:32.865Z" }, - { url = "https://files.pythonhosted.org/packages/32/89/047921cf70f36c7b6b6390876b2399b3633ab73b8d0cb857e5a964238941/sentencepiece-0.2.1-cp314-cp314-win_arm64.whl", hash = "sha256:2005242a16d2dc3ac5fe18aa7667549134d37854823df4c4db244752453b78a8", size = 1133890, upload-time = "2025-08-12T07:00:34.763Z" }, - { url = "https://files.pythonhosted.org/packages/a1/11/5b414b9fae6255b5fb1e22e2ed3dc3a72d3a694e5703910e640ac78346bb/sentencepiece-0.2.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:a19adcec27c524cb7069a1c741060add95f942d1cbf7ad0d104dffa0a7d28a2b", size = 1946081, upload-time = "2025-08-12T07:00:36.97Z" }, - { url = "https://files.pythonhosted.org/packages/77/eb/7a5682bb25824db8545f8e5662e7f3e32d72a508fdce086029d89695106b/sentencepiece-0.2.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e37e4b4c4a11662b5db521def4e44d4d30ae69a1743241412a93ae40fdcab4bb", size = 1327406, upload-time = "2025-08-12T07:00:38.669Z" }, - { url = "https://files.pythonhosted.org/packages/03/b0/811dae8fb9f2784e138785d481469788f2e0d0c109c5737372454415f55f/sentencepiece-0.2.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:477c81505db072b3ab627e7eab972ea1025331bd3a92bacbf798df2b75ea86ec", size = 1254846, upload-time = "2025-08-12T07:00:40.611Z" }, - { url = "https://files.pythonhosted.org/packages/ef/23/195b2e7ec85ebb6a547969f60b723c7aca5a75800ece6cc3f41da872d14e/sentencepiece-0.2.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:010f025a544ef770bb395091d57cb94deb9652d8972e0d09f71d85d5a0816c8c", size = 1315721, upload-time = "2025-08-12T07:00:42.914Z" }, - { url = "https://files.pythonhosted.org/packages/7e/aa/553dbe4178b5f23eb28e59393dddd64186178b56b81d9b8d5c3ff1c28395/sentencepiece-0.2.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:733e59ff1794d26db706cd41fc2d7ca5f6c64a820709cb801dc0ea31780d64ab", size = 1387458, upload-time = "2025-08-12T07:00:44.56Z" }, - { url = "https://files.pythonhosted.org/packages/66/7c/08ff0012507297a4dd74a5420fdc0eb9e3e80f4e88cab1538d7f28db303d/sentencepiece-0.2.1-cp314-cp314t-win32.whl", hash = "sha256:d3233770f78e637dc8b1fda2cd7c3b99ec77e7505041934188a4e7fe751de3b0", size = 1099765, upload-time = "2025-08-12T07:00:46.058Z" }, - { url = "https://files.pythonhosted.org/packages/91/d5/2a69e1ce15881beb9ddfc7e3f998322f5cedcd5e4d244cb74dade9441663/sentencepiece-0.2.1-cp314-cp314t-win_amd64.whl", hash = "sha256:5e4366c97b68218fd30ea72d70c525e6e78a6c0a88650f57ac4c43c63b234a9d", size = 1157807, upload-time = "2025-08-12T07:00:47.673Z" }, - { url = "https://files.pythonhosted.org/packages/f3/16/54f611fcfc2d1c46cbe3ec4169780b2cfa7cf63708ef2b71611136db7513/sentencepiece-0.2.1-cp314-cp314t-win_arm64.whl", hash = "sha256:105e36e75cbac1292642045458e8da677b2342dcd33df503e640f0b457cb6751", size = 1136264, upload-time = "2025-08-12T07:00:49.485Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/15/15/2e7a025fc62d764b151ae6d0f2a92f8081755ebe8d4a64099accc6f77ba6/sentencepiece-0.2.1.tar.gz", hash = "sha256:8138cec27c2f2282f4a34d9a016e3374cd40e5c6e9cb335063db66a0a3b71fad", size = 3228515 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/be/32ce495aa1d0e0c323dcb1ba87096037358edee539cac5baf8755a6bd396/sentencepiece-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:57cae326c8727de58c85977b175af132a7138d84c764635d7e71bbee7e774133", size = 1943152 }, + { url = "https://files.pythonhosted.org/packages/88/7e/ff23008899a58678e98c6ff592bf4d368eee5a71af96d0df6b38a039dd4f/sentencepiece-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:56dd39a3c4d6493db3cdca7e8cc68c6b633f0d4195495cbadfcf5af8a22d05a6", size = 1325651 }, + { url = "https://files.pythonhosted.org/packages/19/84/42eb3ce4796777a1b5d3699dfd4dca85113e68b637f194a6c8d786f16a04/sentencepiece-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9381351182ff9888cc80e41c632e7e274b106f450de33d67a9e8f6043da6f76", size = 1253645 }, + { url = "https://files.pythonhosted.org/packages/89/fa/d3d5ebcba3cb9e6d3775a096251860c41a6bc53a1b9461151df83fe93255/sentencepiece-0.2.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99f955df238021bf11f0fc37cdb54fd5e5b5f7fd30ecc3d93fb48b6815437167", size = 1316273 }, + { url = "https://files.pythonhosted.org/packages/04/88/14f2f4a2b922d8b39be45bf63d79e6cd3a9b2f248b2fcb98a69b12af12f5/sentencepiece-0.2.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cdfecef430d985f1c2bcbfff3defd1d95dae876fbd0173376012d2d7d24044b", size = 1387881 }, + { url = "https://files.pythonhosted.org/packages/fd/b8/903e5ccb77b4ef140605d5d71b4f9e0ad95d456d6184688073ed11712809/sentencepiece-0.2.1-cp312-cp312-win32.whl", hash = "sha256:a483fd29a34c3e34c39ac5556b0a90942bec253d260235729e50976f5dba1068", size = 999540 }, + { url = "https://files.pythonhosted.org/packages/2d/81/92df5673c067148c2545b1bfe49adfd775bcc3a169a047f5a0e6575ddaca/sentencepiece-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4cdc7c36234fda305e85c32949c5211faaf8dd886096c7cea289ddc12a2d02de", size = 1054671 }, + { url = "https://files.pythonhosted.org/packages/fe/02/c5e3bc518655d714622bec87d83db9cdba1cd0619a4a04e2109751c4f47f/sentencepiece-0.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:daeb5e9e9fcad012324807856113708614d534f596d5008638eb9b40112cd9e4", size = 1033923 }, + { url = "https://files.pythonhosted.org/packages/ba/4a/85fbe1706d4d04a7e826b53f327c4b80f849cf1c7b7c5e31a20a97d8f28b/sentencepiece-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dcd8161eee7b41aae57ded06272905dbd680a0a04b91edd0f64790c796b2f706", size = 1943150 }, + { url = "https://files.pythonhosted.org/packages/c2/83/4cfb393e287509fc2155480b9d184706ef8d9fa8cbf5505d02a5792bf220/sentencepiece-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c6c8f42949f419ff8c7e9960dbadcfbc982d7b5efc2f6748210d3dd53a7de062", size = 1325651 }, + { url = "https://files.pythonhosted.org/packages/8d/de/5a007fb53b1ab0aafc69d11a5a3dd72a289d5a3e78dcf2c3a3d9b14ffe93/sentencepiece-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:097f3394e99456e9e4efba1737c3749d7e23563dd1588ce71a3d007f25475fff", size = 1253641 }, + { url = "https://files.pythonhosted.org/packages/2c/d2/f552be5928105588f4f4d66ee37dd4c61460d8097e62d0e2e0eec41bc61d/sentencepiece-0.2.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d7b670879c370d350557edabadbad1f6561a9e6968126e6debca4029e5547820", size = 1316271 }, + { url = "https://files.pythonhosted.org/packages/96/df/0cfe748ace5485be740fed9476dee7877f109da32ed0d280312c94ec259f/sentencepiece-0.2.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7f0fd2f2693309e6628aeeb2e2faf6edd221134dfccac3308ca0de01f8dab47", size = 1387882 }, + { url = "https://files.pythonhosted.org/packages/ac/dd/f7774d42a881ced8e1739f393ab1e82ece39fc9abd4779e28050c2e975b5/sentencepiece-0.2.1-cp313-cp313-win32.whl", hash = "sha256:92b3816aa2339355fda2c8c4e021a5de92180b00aaccaf5e2808972e77a4b22f", size = 999541 }, + { url = "https://files.pythonhosted.org/packages/dd/e9/932b9eae6fd7019548321eee1ab8d5e3b3d1294df9d9a0c9ac517c7b636d/sentencepiece-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:10ed3dab2044c47f7a2e7b4969b0c430420cdd45735d78c8f853191fa0e3148b", size = 1054669 }, + { url = "https://files.pythonhosted.org/packages/c9/3a/76488a00ea7d6931689cda28726a1447d66bf1a4837943489314593d5596/sentencepiece-0.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac650534e2251083c5f75dde4ff28896ce7c8904133dc8fef42780f4d5588fcd", size = 1033922 }, + { url = "https://files.pythonhosted.org/packages/4a/b6/08fe2ce819e02ccb0296f4843e3f195764ce9829cbda61b7513f29b95718/sentencepiece-0.2.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:8dd4b477a7b069648d19363aad0cab9bad2f4e83b2d179be668efa672500dc94", size = 1946052 }, + { url = "https://files.pythonhosted.org/packages/ab/d9/1ea0e740591ff4c6fc2b6eb1d7510d02f3fb885093f19b2f3abd1363b402/sentencepiece-0.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0c0f672da370cc490e4c59d89e12289778310a0e71d176c541e4834759e1ae07", size = 1327408 }, + { url = "https://files.pythonhosted.org/packages/99/7e/1fb26e8a21613f6200e1ab88824d5d203714162cf2883248b517deb500b7/sentencepiece-0.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ad8493bea8432dae8d6830365352350f3b4144415a1d09c4c8cb8d30cf3b6c3c", size = 1254857 }, + { url = "https://files.pythonhosted.org/packages/bc/85/c72fd1f3c7a6010544d6ae07f8ddb38b5e2a7e33bd4318f87266c0bbafbf/sentencepiece-0.2.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b81a24733726e3678d2db63619acc5a8dccd074f7aa7a54ecd5ca33ca6d2d596", size = 1315722 }, + { url = "https://files.pythonhosted.org/packages/4a/e8/661e5bd82a8aa641fd6c1020bd0e890ef73230a2b7215ddf9c8cd8e941c2/sentencepiece-0.2.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0a81799d0a68d618e89063fb423c3001a034c893069135ffe51fee439ae474d6", size = 1387452 }, + { url = "https://files.pythonhosted.org/packages/99/5e/ae66c361023a470afcbc1fbb8da722c72ea678a2fcd9a18f1a12598c7501/sentencepiece-0.2.1-cp313-cp313t-win32.whl", hash = "sha256:89a3ea015517c42c0341d0d962f3e6aaf2cf10d71b1932d475c44ba48d00aa2b", size = 1002501 }, + { url = "https://files.pythonhosted.org/packages/c1/03/d332828c4ff764e16c1b56c2c8f9a33488bbe796b53fb6b9c4205ddbf167/sentencepiece-0.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:33f068c9382dc2e7c228eedfd8163b52baa86bb92f50d0488bf2b7da7032e484", size = 1057555 }, + { url = "https://files.pythonhosted.org/packages/88/14/5aee0bf0864df9bd82bd59e7711362908e4935e3f9cdc1f57246b5d5c9b9/sentencepiece-0.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:b3616ad246f360e52c85781e47682d31abfb6554c779e42b65333d4b5f44ecc0", size = 1036042 }, + { url = "https://files.pythonhosted.org/packages/24/9c/89eb8b2052f720a612478baf11c8227dcf1dc28cd4ea4c0c19506b5af2a2/sentencepiece-0.2.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:5d0350b686c320068702116276cfb26c066dc7e65cfef173980b11bb4d606719", size = 1943147 }, + { url = "https://files.pythonhosted.org/packages/82/0b/a1432bc87f97c2ace36386ca23e8bd3b91fb40581b5e6148d24b24186419/sentencepiece-0.2.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c7f54a31cde6fa5cb030370566f68152a742f433f8d2be458463d06c208aef33", size = 1325624 }, + { url = "https://files.pythonhosted.org/packages/ea/99/bbe054ebb5a5039457c590e0a4156ed073fb0fe9ce4f7523404dd5b37463/sentencepiece-0.2.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c83b85ab2d6576607f31df77ff86f28182be4a8de6d175d2c33ca609925f5da1", size = 1253670 }, + { url = "https://files.pythonhosted.org/packages/19/ad/d5c7075f701bd97971d7c2ac2904f227566f51ef0838dfbdfdccb58cd212/sentencepiece-0.2.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1855f57db07b51fb51ed6c9c452f570624d2b169b36f0f79ef71a6e6c618cd8b", size = 1316247 }, + { url = "https://files.pythonhosted.org/packages/fb/03/35fbe5f3d9a7435eebd0b473e09584bd3cc354ce118b960445b060d33781/sentencepiece-0.2.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01e6912125cb45d3792f530a4d38f8e21bf884d6b4d4ade1b2de5cf7a8d2a52b", size = 1387894 }, + { url = "https://files.pythonhosted.org/packages/dc/aa/956ef729aafb6c8f9c443104c9636489093bb5c61d6b90fc27aa1a865574/sentencepiece-0.2.1-cp314-cp314-win32.whl", hash = "sha256:c415c9de1447e0a74ae3fdb2e52f967cb544113a3a5ce3a194df185cbc1f962f", size = 1096698 }, + { url = "https://files.pythonhosted.org/packages/b8/cb/fe400d8836952cc535c81a0ce47dc6875160e5fedb71d2d9ff0e9894c2a6/sentencepiece-0.2.1-cp314-cp314-win_amd64.whl", hash = "sha256:881b2e44b14fc19feade3cbed314be37de639fc415375cefaa5bc81a4be137fd", size = 1155115 }, + { url = "https://files.pythonhosted.org/packages/32/89/047921cf70f36c7b6b6390876b2399b3633ab73b8d0cb857e5a964238941/sentencepiece-0.2.1-cp314-cp314-win_arm64.whl", hash = "sha256:2005242a16d2dc3ac5fe18aa7667549134d37854823df4c4db244752453b78a8", size = 1133890 }, + { url = "https://files.pythonhosted.org/packages/a1/11/5b414b9fae6255b5fb1e22e2ed3dc3a72d3a694e5703910e640ac78346bb/sentencepiece-0.2.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:a19adcec27c524cb7069a1c741060add95f942d1cbf7ad0d104dffa0a7d28a2b", size = 1946081 }, + { url = "https://files.pythonhosted.org/packages/77/eb/7a5682bb25824db8545f8e5662e7f3e32d72a508fdce086029d89695106b/sentencepiece-0.2.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e37e4b4c4a11662b5db521def4e44d4d30ae69a1743241412a93ae40fdcab4bb", size = 1327406 }, + { url = "https://files.pythonhosted.org/packages/03/b0/811dae8fb9f2784e138785d481469788f2e0d0c109c5737372454415f55f/sentencepiece-0.2.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:477c81505db072b3ab627e7eab972ea1025331bd3a92bacbf798df2b75ea86ec", size = 1254846 }, + { url = "https://files.pythonhosted.org/packages/ef/23/195b2e7ec85ebb6a547969f60b723c7aca5a75800ece6cc3f41da872d14e/sentencepiece-0.2.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:010f025a544ef770bb395091d57cb94deb9652d8972e0d09f71d85d5a0816c8c", size = 1315721 }, + { url = "https://files.pythonhosted.org/packages/7e/aa/553dbe4178b5f23eb28e59393dddd64186178b56b81d9b8d5c3ff1c28395/sentencepiece-0.2.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:733e59ff1794d26db706cd41fc2d7ca5f6c64a820709cb801dc0ea31780d64ab", size = 1387458 }, + { url = "https://files.pythonhosted.org/packages/66/7c/08ff0012507297a4dd74a5420fdc0eb9e3e80f4e88cab1538d7f28db303d/sentencepiece-0.2.1-cp314-cp314t-win32.whl", hash = "sha256:d3233770f78e637dc8b1fda2cd7c3b99ec77e7505041934188a4e7fe751de3b0", size = 1099765 }, + { url = "https://files.pythonhosted.org/packages/91/d5/2a69e1ce15881beb9ddfc7e3f998322f5cedcd5e4d244cb74dade9441663/sentencepiece-0.2.1-cp314-cp314t-win_amd64.whl", hash = "sha256:5e4366c97b68218fd30ea72d70c525e6e78a6c0a88650f57ac4c43c63b234a9d", size = 1157807 }, + { url = "https://files.pythonhosted.org/packages/f3/16/54f611fcfc2d1c46cbe3ec4169780b2cfa7cf63708ef2b71611136db7513/sentencepiece-0.2.1-cp314-cp314t-win_arm64.whl", hash = "sha256:105e36e75cbac1292642045458e8da677b2342dcd33df503e640f0b457cb6751", size = 1136264 }, ] [[package]] @@ -3453,53 +3448,53 @@ dependencies = [ { name = "certifi" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4a/2a/d225cbf87b6c8ecce5664db7bcecb82c317e448e3b24a2dcdaacb18ca9a7/sentry_sdk-2.47.0.tar.gz", hash = "sha256:8218891d5e41b4ea8d61d2aed62ed10c80e39d9f2959d6f939efbf056857e050", size = 381895, upload-time = "2025-12-03T14:06:36.846Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/2a/d225cbf87b6c8ecce5664db7bcecb82c317e448e3b24a2dcdaacb18ca9a7/sentry_sdk-2.47.0.tar.gz", hash = "sha256:8218891d5e41b4ea8d61d2aed62ed10c80e39d9f2959d6f939efbf056857e050", size = 381895 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/ac/d6286ea0d49e7b58847faf67b00e56bb4ba3d525281e2ac306e1f1f353da/sentry_sdk-2.47.0-py2.py3-none-any.whl", hash = "sha256:d72f8c61025b7d1d9e52510d03a6247b280094a327dd900d987717a4fce93412", size = 411088, upload-time = "2025-12-03T14:06:35.374Z" }, + { url = "https://files.pythonhosted.org/packages/bd/ac/d6286ea0d49e7b58847faf67b00e56bb4ba3d525281e2ac306e1f1f353da/sentry_sdk-2.47.0-py2.py3-none-any.whl", hash = "sha256:d72f8c61025b7d1d9e52510d03a6247b280094a327dd900d987717a4fce93412", size = 411088 }, ] [[package]] name = "setuptools" version = "80.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486 }, ] [[package]] name = "simplejson" version = "3.20.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/f4/a1ac5ed32f7ed9a088d62a59d410d4c204b3b3815722e2ccfb491fa8251b/simplejson-3.20.2.tar.gz", hash = "sha256:5fe7a6ce14d1c300d80d08695b7f7e633de6cd72c80644021874d985b3393649", size = 85784, upload-time = "2025-09-26T16:29:36.64Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9d/9e/1a91e7614db0416885eab4136d49b7303de20528860ffdd798ce04d054db/simplejson-3.20.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4376d5acae0d1e91e78baeba4ee3cf22fbf6509d81539d01b94e0951d28ec2b6", size = 93523, upload-time = "2025-09-26T16:28:00.356Z" }, - { url = "https://files.pythonhosted.org/packages/5e/2b/d2413f5218fc25608739e3d63fe321dfa85c5f097aa6648dbe72513a5f12/simplejson-3.20.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f8fe6de652fcddae6dec8f281cc1e77e4e8f3575249e1800090aab48f73b4259", size = 75844, upload-time = "2025-09-26T16:28:01.756Z" }, - { url = "https://files.pythonhosted.org/packages/ad/f1/efd09efcc1e26629e120fef59be059ce7841cc6e1f949a4db94f1ae8a918/simplejson-3.20.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25ca2663d99328d51e5a138f22018e54c9162438d831e26cfc3458688616eca8", size = 75655, upload-time = "2025-09-26T16:28:03.037Z" }, - { url = "https://files.pythonhosted.org/packages/97/ec/5c6db08e42f380f005d03944be1af1a6bd501cc641175429a1cbe7fb23b9/simplejson-3.20.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12a6b2816b6cab6c3fd273d43b1948bc9acf708272074c8858f579c394f4cbc9", size = 150335, upload-time = "2025-09-26T16:28:05.027Z" }, - { url = "https://files.pythonhosted.org/packages/81/f5/808a907485876a9242ec67054da7cbebefe0ee1522ef1c0be3bfc90f96f6/simplejson-3.20.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac20dc3fcdfc7b8415bfc3d7d51beccd8695c3f4acb7f74e3a3b538e76672868", size = 158519, upload-time = "2025-09-26T16:28:06.5Z" }, - { url = "https://files.pythonhosted.org/packages/66/af/b8a158246834645ea890c36136584b0cc1c0e4b83a73b11ebd9c2a12877c/simplejson-3.20.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db0804d04564e70862ef807f3e1ace2cc212ef0e22deb1b3d6f80c45e5882c6b", size = 148571, upload-time = "2025-09-26T16:28:07.715Z" }, - { url = "https://files.pythonhosted.org/packages/20/05/ed9b2571bbf38f1a2425391f18e3ac11cb1e91482c22d644a1640dea9da7/simplejson-3.20.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:979ce23ea663895ae39106946ef3d78527822d918a136dbc77b9e2b7f006237e", size = 152367, upload-time = "2025-09-26T16:28:08.921Z" }, - { url = "https://files.pythonhosted.org/packages/81/2c/bad68b05dd43e93f77994b920505634d31ed239418eb6a88997d06599983/simplejson-3.20.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a2ba921b047bb029805726800819675249ef25d2f65fd0edb90639c5b1c3033c", size = 150205, upload-time = "2025-09-26T16:28:10.086Z" }, - { url = "https://files.pythonhosted.org/packages/69/46/90c7fc878061adafcf298ce60cecdee17a027486e9dce507e87396d68255/simplejson-3.20.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:12d3d4dc33770069b780cc8f5abef909fe4a3f071f18f55f6d896a370fd0f970", size = 151823, upload-time = "2025-09-26T16:28:11.329Z" }, - { url = "https://files.pythonhosted.org/packages/ab/27/b85b03349f825ae0f5d4f780cdde0bbccd4f06c3d8433f6a3882df887481/simplejson-3.20.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:aff032a59a201b3683a34be1169e71ddda683d9c3b43b261599c12055349251e", size = 158997, upload-time = "2025-09-26T16:28:12.917Z" }, - { url = "https://files.pythonhosted.org/packages/71/ad/d7f3c331fb930638420ac6d236db68e9f4c28dab9c03164c3cd0e7967e15/simplejson-3.20.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30e590e133b06773f0dc9c3f82e567463df40598b660b5adf53eb1c488202544", size = 154367, upload-time = "2025-09-26T16:28:14.393Z" }, - { url = "https://files.pythonhosted.org/packages/f0/46/5c67324addd40fa2966f6e886cacbbe0407c03a500db94fb8bb40333fcdf/simplejson-3.20.2-cp312-cp312-win32.whl", hash = "sha256:8d7be7c99939cc58e7c5bcf6bb52a842a58e6c65e1e9cdd2a94b697b24cddb54", size = 74285, upload-time = "2025-09-26T16:28:15.931Z" }, - { url = "https://files.pythonhosted.org/packages/fa/c9/5cc2189f4acd3a6e30ffa9775bf09b354302dbebab713ca914d7134d0f29/simplejson-3.20.2-cp312-cp312-win_amd64.whl", hash = "sha256:2c0b4a67e75b945489052af6590e7dca0ed473ead5d0f3aad61fa584afe814ab", size = 75969, upload-time = "2025-09-26T16:28:17.017Z" }, - { url = "https://files.pythonhosted.org/packages/5e/9e/f326d43f6bf47f4e7704a4426c36e044c6bedfd24e072fb8e27589a373a5/simplejson-3.20.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90d311ba8fcd733a3677e0be21804827226a57144130ba01c3c6a325e887dd86", size = 93530, upload-time = "2025-09-26T16:28:18.07Z" }, - { url = "https://files.pythonhosted.org/packages/35/28/5a4b8f3483fbfb68f3f460bc002cef3a5735ef30950e7c4adce9c8da15c7/simplejson-3.20.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:feed6806f614bdf7f5cb6d0123cb0c1c5f40407ef103aa935cffaa694e2e0c74", size = 75846, upload-time = "2025-09-26T16:28:19.12Z" }, - { url = "https://files.pythonhosted.org/packages/7a/4d/30dfef83b9ac48afae1cf1ab19c2867e27b8d22b5d9f8ca7ce5a0a157d8c/simplejson-3.20.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6b1d8d7c3e1a205c49e1aee6ba907dcb8ccea83651e6c3e2cb2062f1e52b0726", size = 75661, upload-time = "2025-09-26T16:28:20.219Z" }, - { url = "https://files.pythonhosted.org/packages/09/1d/171009bd35c7099d72ef6afd4bb13527bab469965c968a17d69a203d62a6/simplejson-3.20.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:552f55745044a24c3cb7ec67e54234be56d5d6d0e054f2e4cf4fb3e297429be5", size = 150579, upload-time = "2025-09-26T16:28:21.337Z" }, - { url = "https://files.pythonhosted.org/packages/61/ae/229bbcf90a702adc6bfa476e9f0a37e21d8c58e1059043038797cbe75b8c/simplejson-3.20.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2da97ac65165d66b0570c9e545786f0ac7b5de5854d3711a16cacbcaa8c472d", size = 158797, upload-time = "2025-09-26T16:28:22.53Z" }, - { url = "https://files.pythonhosted.org/packages/90/c5/fefc0ac6b86b9108e302e0af1cf57518f46da0baedd60a12170791d56959/simplejson-3.20.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f59a12966daa356bf68927fca5a67bebac0033cd18b96de9c2d426cd11756cd0", size = 148851, upload-time = "2025-09-26T16:28:23.733Z" }, - { url = "https://files.pythonhosted.org/packages/43/f1/b392952200f3393bb06fbc4dd975fc63a6843261705839355560b7264eb2/simplejson-3.20.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:133ae2098a8e162c71da97cdab1f383afdd91373b7ff5fe65169b04167da976b", size = 152598, upload-time = "2025-09-26T16:28:24.962Z" }, - { url = "https://files.pythonhosted.org/packages/f4/b4/d6b7279e52a3e9c0fa8c032ce6164e593e8d9cf390698ee981ed0864291b/simplejson-3.20.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7977640af7b7d5e6a852d26622057d428706a550f7f5083e7c4dd010a84d941f", size = 150498, upload-time = "2025-09-26T16:28:26.114Z" }, - { url = "https://files.pythonhosted.org/packages/62/22/ec2490dd859224326d10c2fac1353e8ad5c84121be4837a6dd6638ba4345/simplejson-3.20.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b530ad6d55e71fa9e93e1109cf8182f427a6355848a4ffa09f69cc44e1512522", size = 152129, upload-time = "2025-09-26T16:28:27.552Z" }, - { url = "https://files.pythonhosted.org/packages/33/ce/b60214d013e93dd9e5a705dcb2b88b6c72bada442a97f79828332217f3eb/simplejson-3.20.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bd96a7d981bf64f0e42345584768da4435c05b24fd3c364663f5fbc8fabf82e3", size = 159359, upload-time = "2025-09-26T16:28:28.667Z" }, - { url = "https://files.pythonhosted.org/packages/99/21/603709455827cdf5b9d83abe726343f542491ca8dc6a2528eb08de0cf034/simplejson-3.20.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f28ee755fadb426ba2e464d6fcf25d3f152a05eb6b38e0b4f790352f5540c769", size = 154717, upload-time = "2025-09-26T16:28:30.288Z" }, - { url = "https://files.pythonhosted.org/packages/3c/f9/dc7f7a4bac16cf7eb55a4df03ad93190e11826d2a8950052949d3dfc11e2/simplejson-3.20.2-cp313-cp313-win32.whl", hash = "sha256:472785b52e48e3eed9b78b95e26a256f59bb1ee38339be3075dad799e2e1e661", size = 74289, upload-time = "2025-09-26T16:28:31.809Z" }, - { url = "https://files.pythonhosted.org/packages/87/10/d42ad61230436735c68af1120622b28a782877146a83d714da7b6a2a1c4e/simplejson-3.20.2-cp313-cp313-win_amd64.whl", hash = "sha256:a1a85013eb33e4820286139540accbe2c98d2da894b2dcefd280209db508e608", size = 75972, upload-time = "2025-09-26T16:28:32.883Z" }, - { url = "https://files.pythonhosted.org/packages/05/5b/83e1ff87eb60ca706972f7e02e15c0b33396e7bdbd080069a5d1b53cf0d8/simplejson-3.20.2-py3-none-any.whl", hash = "sha256:3b6bb7fb96efd673eac2e4235200bfffdc2353ad12c54117e1e4e2fc485ac017", size = 57309, upload-time = "2025-09-26T16:29:35.312Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/41/f4/a1ac5ed32f7ed9a088d62a59d410d4c204b3b3815722e2ccfb491fa8251b/simplejson-3.20.2.tar.gz", hash = "sha256:5fe7a6ce14d1c300d80d08695b7f7e633de6cd72c80644021874d985b3393649", size = 85784 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/9e/1a91e7614db0416885eab4136d49b7303de20528860ffdd798ce04d054db/simplejson-3.20.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4376d5acae0d1e91e78baeba4ee3cf22fbf6509d81539d01b94e0951d28ec2b6", size = 93523 }, + { url = "https://files.pythonhosted.org/packages/5e/2b/d2413f5218fc25608739e3d63fe321dfa85c5f097aa6648dbe72513a5f12/simplejson-3.20.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f8fe6de652fcddae6dec8f281cc1e77e4e8f3575249e1800090aab48f73b4259", size = 75844 }, + { url = "https://files.pythonhosted.org/packages/ad/f1/efd09efcc1e26629e120fef59be059ce7841cc6e1f949a4db94f1ae8a918/simplejson-3.20.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25ca2663d99328d51e5a138f22018e54c9162438d831e26cfc3458688616eca8", size = 75655 }, + { url = "https://files.pythonhosted.org/packages/97/ec/5c6db08e42f380f005d03944be1af1a6bd501cc641175429a1cbe7fb23b9/simplejson-3.20.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12a6b2816b6cab6c3fd273d43b1948bc9acf708272074c8858f579c394f4cbc9", size = 150335 }, + { url = "https://files.pythonhosted.org/packages/81/f5/808a907485876a9242ec67054da7cbebefe0ee1522ef1c0be3bfc90f96f6/simplejson-3.20.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac20dc3fcdfc7b8415bfc3d7d51beccd8695c3f4acb7f74e3a3b538e76672868", size = 158519 }, + { url = "https://files.pythonhosted.org/packages/66/af/b8a158246834645ea890c36136584b0cc1c0e4b83a73b11ebd9c2a12877c/simplejson-3.20.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db0804d04564e70862ef807f3e1ace2cc212ef0e22deb1b3d6f80c45e5882c6b", size = 148571 }, + { url = "https://files.pythonhosted.org/packages/20/05/ed9b2571bbf38f1a2425391f18e3ac11cb1e91482c22d644a1640dea9da7/simplejson-3.20.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:979ce23ea663895ae39106946ef3d78527822d918a136dbc77b9e2b7f006237e", size = 152367 }, + { url = "https://files.pythonhosted.org/packages/81/2c/bad68b05dd43e93f77994b920505634d31ed239418eb6a88997d06599983/simplejson-3.20.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a2ba921b047bb029805726800819675249ef25d2f65fd0edb90639c5b1c3033c", size = 150205 }, + { url = "https://files.pythonhosted.org/packages/69/46/90c7fc878061adafcf298ce60cecdee17a027486e9dce507e87396d68255/simplejson-3.20.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:12d3d4dc33770069b780cc8f5abef909fe4a3f071f18f55f6d896a370fd0f970", size = 151823 }, + { url = "https://files.pythonhosted.org/packages/ab/27/b85b03349f825ae0f5d4f780cdde0bbccd4f06c3d8433f6a3882df887481/simplejson-3.20.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:aff032a59a201b3683a34be1169e71ddda683d9c3b43b261599c12055349251e", size = 158997 }, + { url = "https://files.pythonhosted.org/packages/71/ad/d7f3c331fb930638420ac6d236db68e9f4c28dab9c03164c3cd0e7967e15/simplejson-3.20.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30e590e133b06773f0dc9c3f82e567463df40598b660b5adf53eb1c488202544", size = 154367 }, + { url = "https://files.pythonhosted.org/packages/f0/46/5c67324addd40fa2966f6e886cacbbe0407c03a500db94fb8bb40333fcdf/simplejson-3.20.2-cp312-cp312-win32.whl", hash = "sha256:8d7be7c99939cc58e7c5bcf6bb52a842a58e6c65e1e9cdd2a94b697b24cddb54", size = 74285 }, + { url = "https://files.pythonhosted.org/packages/fa/c9/5cc2189f4acd3a6e30ffa9775bf09b354302dbebab713ca914d7134d0f29/simplejson-3.20.2-cp312-cp312-win_amd64.whl", hash = "sha256:2c0b4a67e75b945489052af6590e7dca0ed473ead5d0f3aad61fa584afe814ab", size = 75969 }, + { url = "https://files.pythonhosted.org/packages/5e/9e/f326d43f6bf47f4e7704a4426c36e044c6bedfd24e072fb8e27589a373a5/simplejson-3.20.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90d311ba8fcd733a3677e0be21804827226a57144130ba01c3c6a325e887dd86", size = 93530 }, + { url = "https://files.pythonhosted.org/packages/35/28/5a4b8f3483fbfb68f3f460bc002cef3a5735ef30950e7c4adce9c8da15c7/simplejson-3.20.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:feed6806f614bdf7f5cb6d0123cb0c1c5f40407ef103aa935cffaa694e2e0c74", size = 75846 }, + { url = "https://files.pythonhosted.org/packages/7a/4d/30dfef83b9ac48afae1cf1ab19c2867e27b8d22b5d9f8ca7ce5a0a157d8c/simplejson-3.20.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6b1d8d7c3e1a205c49e1aee6ba907dcb8ccea83651e6c3e2cb2062f1e52b0726", size = 75661 }, + { url = "https://files.pythonhosted.org/packages/09/1d/171009bd35c7099d72ef6afd4bb13527bab469965c968a17d69a203d62a6/simplejson-3.20.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:552f55745044a24c3cb7ec67e54234be56d5d6d0e054f2e4cf4fb3e297429be5", size = 150579 }, + { url = "https://files.pythonhosted.org/packages/61/ae/229bbcf90a702adc6bfa476e9f0a37e21d8c58e1059043038797cbe75b8c/simplejson-3.20.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2da97ac65165d66b0570c9e545786f0ac7b5de5854d3711a16cacbcaa8c472d", size = 158797 }, + { url = "https://files.pythonhosted.org/packages/90/c5/fefc0ac6b86b9108e302e0af1cf57518f46da0baedd60a12170791d56959/simplejson-3.20.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f59a12966daa356bf68927fca5a67bebac0033cd18b96de9c2d426cd11756cd0", size = 148851 }, + { url = "https://files.pythonhosted.org/packages/43/f1/b392952200f3393bb06fbc4dd975fc63a6843261705839355560b7264eb2/simplejson-3.20.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:133ae2098a8e162c71da97cdab1f383afdd91373b7ff5fe65169b04167da976b", size = 152598 }, + { url = "https://files.pythonhosted.org/packages/f4/b4/d6b7279e52a3e9c0fa8c032ce6164e593e8d9cf390698ee981ed0864291b/simplejson-3.20.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7977640af7b7d5e6a852d26622057d428706a550f7f5083e7c4dd010a84d941f", size = 150498 }, + { url = "https://files.pythonhosted.org/packages/62/22/ec2490dd859224326d10c2fac1353e8ad5c84121be4837a6dd6638ba4345/simplejson-3.20.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b530ad6d55e71fa9e93e1109cf8182f427a6355848a4ffa09f69cc44e1512522", size = 152129 }, + { url = "https://files.pythonhosted.org/packages/33/ce/b60214d013e93dd9e5a705dcb2b88b6c72bada442a97f79828332217f3eb/simplejson-3.20.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bd96a7d981bf64f0e42345584768da4435c05b24fd3c364663f5fbc8fabf82e3", size = 159359 }, + { url = "https://files.pythonhosted.org/packages/99/21/603709455827cdf5b9d83abe726343f542491ca8dc6a2528eb08de0cf034/simplejson-3.20.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f28ee755fadb426ba2e464d6fcf25d3f152a05eb6b38e0b4f790352f5540c769", size = 154717 }, + { url = "https://files.pythonhosted.org/packages/3c/f9/dc7f7a4bac16cf7eb55a4df03ad93190e11826d2a8950052949d3dfc11e2/simplejson-3.20.2-cp313-cp313-win32.whl", hash = "sha256:472785b52e48e3eed9b78b95e26a256f59bb1ee38339be3075dad799e2e1e661", size = 74289 }, + { url = "https://files.pythonhosted.org/packages/87/10/d42ad61230436735c68af1120622b28a782877146a83d714da7b6a2a1c4e/simplejson-3.20.2-cp313-cp313-win_amd64.whl", hash = "sha256:a1a85013eb33e4820286139540accbe2c98d2da894b2dcefd280209db508e608", size = 75972 }, + { url = "https://files.pythonhosted.org/packages/05/5b/83e1ff87eb60ca706972f7e02e15c0b33396e7bdbd080069a5d1b53cf0d8/simplejson-3.20.2-py3-none-any.whl", hash = "sha256:3b6bb7fb96efd673eac2e4235200bfffdc2353ad12c54117e1e4e2fc485ac017", size = 57309 }, ] [[package]] @@ -3598,24 +3593,23 @@ requires-dist = [ { name = "transformer-lens", specifier = ">=2.15.4" }, { name = "treescope" }, ] -provides-extras = ["aws", "cuda", "dev", "penzai"] [[package]] name = "six" version = "1.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, ] [[package]] name = "smmap" version = "5.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329, upload-time = "2025-01-02T07:14:40.909Z" } +sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303, upload-time = "2025-01-02T07:14:38.724Z" }, + { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303 }, ] [[package]] @@ -3626,34 +3620,34 @@ dependencies = [ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830, upload-time = "2025-10-10T14:39:12.935Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830 } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/c4/59c7c9b068e6813c898b771204aad36683c96318ed12d4233e1b18762164/sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250", size = 2139675, upload-time = "2025-10-10T16:03:31.064Z" }, - { url = "https://files.pythonhosted.org/packages/d6/ae/eeb0920537a6f9c5a3708e4a5fc55af25900216bdb4847ec29cfddf3bf3a/sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29", size = 2127726, upload-time = "2025-10-10T16:03:35.934Z" }, - { url = "https://files.pythonhosted.org/packages/d8/d5/2ebbabe0379418eda8041c06b0b551f213576bfe4c2f09d77c06c07c8cc5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44", size = 3327603, upload-time = "2025-10-10T15:35:28.322Z" }, - { url = "https://files.pythonhosted.org/packages/45/e5/5aa65852dadc24b7d8ae75b7efb8d19303ed6ac93482e60c44a585930ea5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1", size = 3337842, upload-time = "2025-10-10T15:43:45.431Z" }, - { url = "https://files.pythonhosted.org/packages/41/92/648f1afd3f20b71e880ca797a960f638d39d243e233a7082c93093c22378/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7", size = 3264558, upload-time = "2025-10-10T15:35:29.93Z" }, - { url = "https://files.pythonhosted.org/packages/40/cf/e27d7ee61a10f74b17740918e23cbc5bc62011b48282170dc4c66da8ec0f/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d", size = 3301570, upload-time = "2025-10-10T15:43:48.407Z" }, - { url = "https://files.pythonhosted.org/packages/3b/3d/3116a9a7b63e780fb402799b6da227435be878b6846b192f076d2f838654/sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4", size = 2103447, upload-time = "2025-10-10T15:03:21.678Z" }, - { url = "https://files.pythonhosted.org/packages/25/83/24690e9dfc241e6ab062df82cc0df7f4231c79ba98b273fa496fb3dd78ed/sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e", size = 2130912, upload-time = "2025-10-10T15:03:24.656Z" }, - { url = "https://files.pythonhosted.org/packages/45/d3/c67077a2249fdb455246e6853166360054c331db4613cda3e31ab1cadbef/sqlalchemy-2.0.44-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ff486e183d151e51b1d694c7aa1695747599bb00b9f5f604092b54b74c64a8e1", size = 2135479, upload-time = "2025-10-10T16:03:37.671Z" }, - { url = "https://files.pythonhosted.org/packages/2b/91/eabd0688330d6fd114f5f12c4f89b0d02929f525e6bf7ff80aa17ca802af/sqlalchemy-2.0.44-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b1af8392eb27b372ddb783b317dea0f650241cea5bd29199b22235299ca2e45", size = 2123212, upload-time = "2025-10-10T16:03:41.755Z" }, - { url = "https://files.pythonhosted.org/packages/b0/bb/43e246cfe0e81c018076a16036d9b548c4cc649de241fa27d8d9ca6f85ab/sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b61188657e3a2b9ac4e8f04d6cf8e51046e28175f79464c67f2fd35bceb0976", size = 3255353, upload-time = "2025-10-10T15:35:31.221Z" }, - { url = "https://files.pythonhosted.org/packages/b9/96/c6105ed9a880abe346b64d3b6ddef269ddfcab04f7f3d90a0bf3c5a88e82/sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b87e7b91a5d5973dda5f00cd61ef72ad75a1db73a386b62877d4875a8840959c", size = 3260222, upload-time = "2025-10-10T15:43:50.124Z" }, - { url = "https://files.pythonhosted.org/packages/44/16/1857e35a47155b5ad927272fee81ae49d398959cb749edca6eaa399b582f/sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15f3326f7f0b2bfe406ee562e17f43f36e16167af99c4c0df61db668de20002d", size = 3189614, upload-time = "2025-10-10T15:35:32.578Z" }, - { url = "https://files.pythonhosted.org/packages/88/ee/4afb39a8ee4fc786e2d716c20ab87b5b1fb33d4ac4129a1aaa574ae8a585/sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1e77faf6ff919aa8cd63f1c4e561cac1d9a454a191bb864d5dd5e545935e5a40", size = 3226248, upload-time = "2025-10-10T15:43:51.862Z" }, - { url = "https://files.pythonhosted.org/packages/32/d5/0e66097fc64fa266f29a7963296b40a80d6a997b7ac13806183700676f86/sqlalchemy-2.0.44-cp313-cp313-win32.whl", hash = "sha256:ee51625c2d51f8baadf2829fae817ad0b66b140573939dd69284d2ba3553ae73", size = 2101275, upload-time = "2025-10-10T15:03:26.096Z" }, - { url = "https://files.pythonhosted.org/packages/03/51/665617fe4f8c6450f42a6d8d69243f9420f5677395572c2fe9d21b493b7b/sqlalchemy-2.0.44-cp313-cp313-win_amd64.whl", hash = "sha256:c1c80faaee1a6c3428cecf40d16a2365bcf56c424c92c2b6f0f9ad204b899e9e", size = 2127901, upload-time = "2025-10-10T15:03:27.548Z" }, - { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718, upload-time = "2025-10-10T15:29:45.32Z" }, + { url = "https://files.pythonhosted.org/packages/62/c4/59c7c9b068e6813c898b771204aad36683c96318ed12d4233e1b18762164/sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250", size = 2139675 }, + { url = "https://files.pythonhosted.org/packages/d6/ae/eeb0920537a6f9c5a3708e4a5fc55af25900216bdb4847ec29cfddf3bf3a/sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29", size = 2127726 }, + { url = "https://files.pythonhosted.org/packages/d8/d5/2ebbabe0379418eda8041c06b0b551f213576bfe4c2f09d77c06c07c8cc5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44", size = 3327603 }, + { url = "https://files.pythonhosted.org/packages/45/e5/5aa65852dadc24b7d8ae75b7efb8d19303ed6ac93482e60c44a585930ea5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1", size = 3337842 }, + { url = "https://files.pythonhosted.org/packages/41/92/648f1afd3f20b71e880ca797a960f638d39d243e233a7082c93093c22378/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7", size = 3264558 }, + { url = "https://files.pythonhosted.org/packages/40/cf/e27d7ee61a10f74b17740918e23cbc5bc62011b48282170dc4c66da8ec0f/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d", size = 3301570 }, + { url = "https://files.pythonhosted.org/packages/3b/3d/3116a9a7b63e780fb402799b6da227435be878b6846b192f076d2f838654/sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4", size = 2103447 }, + { url = "https://files.pythonhosted.org/packages/25/83/24690e9dfc241e6ab062df82cc0df7f4231c79ba98b273fa496fb3dd78ed/sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e", size = 2130912 }, + { url = "https://files.pythonhosted.org/packages/45/d3/c67077a2249fdb455246e6853166360054c331db4613cda3e31ab1cadbef/sqlalchemy-2.0.44-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ff486e183d151e51b1d694c7aa1695747599bb00b9f5f604092b54b74c64a8e1", size = 2135479 }, + { url = "https://files.pythonhosted.org/packages/2b/91/eabd0688330d6fd114f5f12c4f89b0d02929f525e6bf7ff80aa17ca802af/sqlalchemy-2.0.44-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b1af8392eb27b372ddb783b317dea0f650241cea5bd29199b22235299ca2e45", size = 2123212 }, + { url = "https://files.pythonhosted.org/packages/b0/bb/43e246cfe0e81c018076a16036d9b548c4cc649de241fa27d8d9ca6f85ab/sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b61188657e3a2b9ac4e8f04d6cf8e51046e28175f79464c67f2fd35bceb0976", size = 3255353 }, + { url = "https://files.pythonhosted.org/packages/b9/96/c6105ed9a880abe346b64d3b6ddef269ddfcab04f7f3d90a0bf3c5a88e82/sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b87e7b91a5d5973dda5f00cd61ef72ad75a1db73a386b62877d4875a8840959c", size = 3260222 }, + { url = "https://files.pythonhosted.org/packages/44/16/1857e35a47155b5ad927272fee81ae49d398959cb749edca6eaa399b582f/sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15f3326f7f0b2bfe406ee562e17f43f36e16167af99c4c0df61db668de20002d", size = 3189614 }, + { url = "https://files.pythonhosted.org/packages/88/ee/4afb39a8ee4fc786e2d716c20ab87b5b1fb33d4ac4129a1aaa574ae8a585/sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1e77faf6ff919aa8cd63f1c4e561cac1d9a454a191bb864d5dd5e545935e5a40", size = 3226248 }, + { url = "https://files.pythonhosted.org/packages/32/d5/0e66097fc64fa266f29a7963296b40a80d6a997b7ac13806183700676f86/sqlalchemy-2.0.44-cp313-cp313-win32.whl", hash = "sha256:ee51625c2d51f8baadf2829fae817ad0b66b140573939dd69284d2ba3553ae73", size = 2101275 }, + { url = "https://files.pythonhosted.org/packages/03/51/665617fe4f8c6450f42a6d8d69243f9420f5677395572c2fe9d21b493b7b/sqlalchemy-2.0.44-cp313-cp313-win_amd64.whl", hash = "sha256:c1c80faaee1a6c3428cecf40d16a2365bcf56c424c92c2b6f0f9ad204b899e9e", size = 2127901 }, + { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718 }, ] [[package]] name = "sqlparse" version = "0.5.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/67/701f86b28d63b2086de47c942eccf8ca2208b3be69715a1119a4e384415a/sqlparse-0.5.4.tar.gz", hash = "sha256:4396a7d3cf1cd679c1be976cf3dc6e0a51d0111e87787e7a8d780e7d5a998f9e", size = 120112, upload-time = "2025-11-28T07:10:18.377Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/67/701f86b28d63b2086de47c942eccf8ca2208b3be69715a1119a4e384415a/sqlparse-0.5.4.tar.gz", hash = "sha256:4396a7d3cf1cd679c1be976cf3dc6e0a51d0111e87787e7a8d780e7d5a998f9e", size = 120112 } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/70/001ee337f7aa888fb2e3f5fd7592a6afc5283adb1ed44ce8df5764070f22/sqlparse-0.5.4-py3-none-any.whl", hash = "sha256:99a9f0314977b76d776a0fcb8554de91b9bb8a18560631d6bc48721d07023dcb", size = 45933, upload-time = "2025-11-28T07:10:19.73Z" }, + { url = "https://files.pythonhosted.org/packages/25/70/001ee337f7aa888fb2e3f5fd7592a6afc5283adb1ed44ce8df5764070f22/sqlparse-0.5.4-py3-none-any.whl", hash = "sha256:99a9f0314977b76d776a0fcb8554de91b9bb8a18560631d6bc48721d07023dcb", size = 45933 }, ] [[package]] @@ -3664,18 +3658,18 @@ dependencies = [ { name = "anyio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, + { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033 }, ] [[package]] name = "stevedore" version = "5.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/5b/496f8abebd10c3301129abba7ddafd46c71d799a70c44ab080323987c4c9/stevedore-5.6.0.tar.gz", hash = "sha256:f22d15c6ead40c5bbfa9ca54aa7e7b4a07d59b36ae03ed12ced1a54cf0b51945", size = 516074, upload-time = "2025-11-20T10:06:07.264Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/5b/496f8abebd10c3301129abba7ddafd46c71d799a70c44ab080323987c4c9/stevedore-5.6.0.tar.gz", hash = "sha256:f22d15c6ead40c5bbfa9ca54aa7e7b4a07d59b36ae03ed12ced1a54cf0b51945", size = 516074 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/40/8561ce06dc46fd17242c7724ab25b257a2ac1b35f4ebf551b40ce6105cfa/stevedore-5.6.0-py3-none-any.whl", hash = "sha256:4a36dccefd7aeea0c70135526cecb7766c4c84c473b1af68db23d541b6dc1820", size = 54428, upload-time = "2025-11-20T10:06:05.946Z" }, + { url = "https://files.pythonhosted.org/packages/f4/40/8561ce06dc46fd17242c7724ab25b257a2ac1b35f4ebf551b40ce6105cfa/stevedore-5.6.0-py3-none-any.whl", hash = "sha256:4a36dccefd7aeea0c70135526cecb7766c4c84c473b1af68db23d541b6dc1820", size = 54428 }, ] [[package]] @@ -3685,9 +3679,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mpmath" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } +sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, + { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353 }, ] [[package]] @@ -3698,32 +3692,32 @@ dependencies = [ { name = "ml-dtypes" }, { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/2c/50ab489a0862ca88d2d766130a6fec45ccd5174f0e04081d8b7b07a8aedd/tensorstore-0.1.79.tar.gz", hash = "sha256:8dad44a8a7f2952a5d0030a8bd868b3cfdff048bd40ab53e7226f3d8b0881c5e", size = 7075782, upload-time = "2025-11-11T22:05:23.824Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/2c/50ab489a0862ca88d2d766130a6fec45ccd5174f0e04081d8b7b07a8aedd/tensorstore-0.1.79.tar.gz", hash = "sha256:8dad44a8a7f2952a5d0030a8bd868b3cfdff048bd40ab53e7226f3d8b0881c5e", size = 7075782 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/a2/a77be16b4a882ace36da0748305795f35306bdad568472f208bd89b96b9d/tensorstore-0.1.79-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:71aa9b45436d888c37b965f7b71195916d15438119b7dccb66a3b0776bfba367", size = 16485740, upload-time = "2025-11-11T22:04:33.478Z" }, - { url = "https://files.pythonhosted.org/packages/7a/e4/7fe268ec41aa70b71a1c56b1ec83346fbcbf12f4bfbefc79d14fb9c03408/tensorstore-0.1.79-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:108c0e867aa2c87d4982cc6325a2de0c4f5bd63c2bea18adb193a370c40594ce", size = 14508736, upload-time = "2025-11-11T22:04:38.613Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f1/b1248dae02598ce534834413e841f915a32ab185c36ecd05e4c67bdc8d19/tensorstore-0.1.79-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:debd435042c00be68ba1fb3cf59325a7babb3f4a3cf4744c87dde346802cbbb4", size = 18947817, upload-time = "2025-11-11T22:04:40.768Z" }, - { url = "https://files.pythonhosted.org/packages/87/4a/60e234147570e21bbab4ac70ab79dd794a5ef9a4945d36c34c1914a73205/tensorstore-0.1.79-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:608f7178ec6e4e4a3c26545b0a44f44bf83438d04bf2d960cd0e7699eaa99ef6", size = 20929832, upload-time = "2025-11-11T22:04:43.613Z" }, - { url = "https://files.pythonhosted.org/packages/f8/48/0531868bce12a2f520002e810d4200ec6f01ba33a2f27b6bd7289fbc197b/tensorstore-0.1.79-cp312-cp312-win_amd64.whl", hash = "sha256:a071c6c255b7e412957a6aa563bc4250242c7894edad06ae6358e3d30b7d88ce", size = 13211970, upload-time = "2025-11-11T22:04:46.179Z" }, - { url = "https://files.pythonhosted.org/packages/fa/0b/54a44e55836d8e8f576343134c0e3db71c6c837d39a0ac44699aba5b01df/tensorstore-0.1.79-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:1e8e2d098829919caac6a62cf568902e34789069ceddb28497d6e36ebcb95c0b", size = 16485855, upload-time = "2025-11-11T22:04:48.734Z" }, - { url = "https://files.pythonhosted.org/packages/04/59/cadb9a45896d480882476df4759cda1659c70669aff87a4d5a4a07ded084/tensorstore-0.1.79-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:29cf4336153af136ac8ac528e2ed46df19367edae7e14e37bca1a8b7c4848ef2", size = 14508277, upload-time = "2025-11-11T22:04:50.775Z" }, - { url = "https://files.pythonhosted.org/packages/e6/cb/3647bdd03c7692882ebc10c19df9ede49f290c216b2906f785edbdb53ef1/tensorstore-0.1.79-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94d8fc9df1721b0287046aca7209fd5040889cad4202e7b73a1fdb77cd9b71c6", size = 18949307, upload-time = "2025-11-11T22:04:53.145Z" }, - { url = "https://files.pythonhosted.org/packages/20/a0/f91ac492cf2ee9f7541aefaaed4ad1258e73e33f3cd3e06cdce5859431db/tensorstore-0.1.79-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9f2dc3342e4686af98f6e259dc9fb377f1bf657b649c247bf6647bbe4f98090", size = 20930427, upload-time = "2025-11-11T22:04:55.353Z" }, - { url = "https://files.pythonhosted.org/packages/69/a6/752fd11747eb9fead715b02d389da7fb180a56172b885de0b48b20237d1e/tensorstore-0.1.79-cp313-cp313-win_amd64.whl", hash = "sha256:0fd6165f3df49abc7c9de029b2b72d74bebd2ff2481a5ced003607eb61c56d3e", size = 13212196, upload-time = "2025-11-11T22:05:00.451Z" }, - { url = "https://files.pythonhosted.org/packages/46/57/1649019893accb3f195780fec55b8bf6793343faf140040bc73f1c28d6a5/tensorstore-0.1.79-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:6f8f5a940eab434a951c2dadcc7c0516c7bef6d8b7a7144054f7a0c56152b5f5", size = 16488849, upload-time = "2025-11-11T22:05:03.014Z" }, - { url = "https://files.pythonhosted.org/packages/bf/23/2668cb120e855a6a7a8a5eb0eba30e2e7020da932a4d3fa13c6ee3c41f9f/tensorstore-0.1.79-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:97756d2cba3c5ce21e15602c2af5a02521cc0ecda7f9fb6d18da2f3bd51827f4", size = 14511448, upload-time = "2025-11-11T22:05:05.58Z" }, - { url = "https://files.pythonhosted.org/packages/6a/0e/c38f079f3933cc284aab53d52976f6cb4f1ad43bb6a704ac27e0b710f176/tensorstore-0.1.79-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:847982652273fb7b2d694b789205747aaf3e50ae64738c5cb7b5eb03d86a9947", size = 18949282, upload-time = "2025-11-11T22:05:07.562Z" }, - { url = "https://files.pythonhosted.org/packages/6f/99/03479deea5bfd27a0d8a8c75d5f1d85417a7bbc9c6c7a90fb85b4a4e347a/tensorstore-0.1.79-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7af9422269c2bfcdecf9dd55309060665ab9c2d7f6c892377ed32c032400feea", size = 20931601, upload-time = "2025-11-11T22:05:10.098Z" }, - { url = "https://files.pythonhosted.org/packages/26/36/2617edf6c6d6fc73b3ff96d9d0b97332adf0d0c56fa2014a226bf4f7dfa6/tensorstore-0.1.79-cp314-cp314-win_amd64.whl", hash = "sha256:bbd8c1ab7d2e3c03ded3d40bb373ee9a67668e33a564484927865ce43b210386", size = 13599766, upload-time = "2025-11-11T22:05:12.265Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a2/a77be16b4a882ace36da0748305795f35306bdad568472f208bd89b96b9d/tensorstore-0.1.79-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:71aa9b45436d888c37b965f7b71195916d15438119b7dccb66a3b0776bfba367", size = 16485740 }, + { url = "https://files.pythonhosted.org/packages/7a/e4/7fe268ec41aa70b71a1c56b1ec83346fbcbf12f4bfbefc79d14fb9c03408/tensorstore-0.1.79-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:108c0e867aa2c87d4982cc6325a2de0c4f5bd63c2bea18adb193a370c40594ce", size = 14508736 }, + { url = "https://files.pythonhosted.org/packages/5a/f1/b1248dae02598ce534834413e841f915a32ab185c36ecd05e4c67bdc8d19/tensorstore-0.1.79-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:debd435042c00be68ba1fb3cf59325a7babb3f4a3cf4744c87dde346802cbbb4", size = 18947817 }, + { url = "https://files.pythonhosted.org/packages/87/4a/60e234147570e21bbab4ac70ab79dd794a5ef9a4945d36c34c1914a73205/tensorstore-0.1.79-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:608f7178ec6e4e4a3c26545b0a44f44bf83438d04bf2d960cd0e7699eaa99ef6", size = 20929832 }, + { url = "https://files.pythonhosted.org/packages/f8/48/0531868bce12a2f520002e810d4200ec6f01ba33a2f27b6bd7289fbc197b/tensorstore-0.1.79-cp312-cp312-win_amd64.whl", hash = "sha256:a071c6c255b7e412957a6aa563bc4250242c7894edad06ae6358e3d30b7d88ce", size = 13211970 }, + { url = "https://files.pythonhosted.org/packages/fa/0b/54a44e55836d8e8f576343134c0e3db71c6c837d39a0ac44699aba5b01df/tensorstore-0.1.79-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:1e8e2d098829919caac6a62cf568902e34789069ceddb28497d6e36ebcb95c0b", size = 16485855 }, + { url = "https://files.pythonhosted.org/packages/04/59/cadb9a45896d480882476df4759cda1659c70669aff87a4d5a4a07ded084/tensorstore-0.1.79-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:29cf4336153af136ac8ac528e2ed46df19367edae7e14e37bca1a8b7c4848ef2", size = 14508277 }, + { url = "https://files.pythonhosted.org/packages/e6/cb/3647bdd03c7692882ebc10c19df9ede49f290c216b2906f785edbdb53ef1/tensorstore-0.1.79-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94d8fc9df1721b0287046aca7209fd5040889cad4202e7b73a1fdb77cd9b71c6", size = 18949307 }, + { url = "https://files.pythonhosted.org/packages/20/a0/f91ac492cf2ee9f7541aefaaed4ad1258e73e33f3cd3e06cdce5859431db/tensorstore-0.1.79-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9f2dc3342e4686af98f6e259dc9fb377f1bf657b649c247bf6647bbe4f98090", size = 20930427 }, + { url = "https://files.pythonhosted.org/packages/69/a6/752fd11747eb9fead715b02d389da7fb180a56172b885de0b48b20237d1e/tensorstore-0.1.79-cp313-cp313-win_amd64.whl", hash = "sha256:0fd6165f3df49abc7c9de029b2b72d74bebd2ff2481a5ced003607eb61c56d3e", size = 13212196 }, + { url = "https://files.pythonhosted.org/packages/46/57/1649019893accb3f195780fec55b8bf6793343faf140040bc73f1c28d6a5/tensorstore-0.1.79-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:6f8f5a940eab434a951c2dadcc7c0516c7bef6d8b7a7144054f7a0c56152b5f5", size = 16488849 }, + { url = "https://files.pythonhosted.org/packages/bf/23/2668cb120e855a6a7a8a5eb0eba30e2e7020da932a4d3fa13c6ee3c41f9f/tensorstore-0.1.79-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:97756d2cba3c5ce21e15602c2af5a02521cc0ecda7f9fb6d18da2f3bd51827f4", size = 14511448 }, + { url = "https://files.pythonhosted.org/packages/6a/0e/c38f079f3933cc284aab53d52976f6cb4f1ad43bb6a704ac27e0b710f176/tensorstore-0.1.79-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:847982652273fb7b2d694b789205747aaf3e50ae64738c5cb7b5eb03d86a9947", size = 18949282 }, + { url = "https://files.pythonhosted.org/packages/6f/99/03479deea5bfd27a0d8a8c75d5f1d85417a7bbc9c6c7a90fb85b4a4e347a/tensorstore-0.1.79-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7af9422269c2bfcdecf9dd55309060665ab9c2d7f6c892377ed32c032400feea", size = 20931601 }, + { url = "https://files.pythonhosted.org/packages/26/36/2617edf6c6d6fc73b3ff96d9d0b97332adf0d0c56fa2014a226bf4f7dfa6/tensorstore-0.1.79-cp314-cp314-win_amd64.whl", hash = "sha256:bbd8c1ab7d2e3c03ded3d40bb373ee9a67668e33a564484927865ce43b210386", size = 13599766 }, ] [[package]] name = "threadpoolctl" version = "3.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274, upload-time = "2025-03-13T13:49:23.031Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274 } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638, upload-time = "2025-03-13T13:49:21.846Z" }, + { url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638 }, ] [[package]] @@ -3733,40 +3727,40 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318, upload-time = "2025-09-19T09:49:11.848Z" }, - { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478, upload-time = "2025-09-19T09:49:09.759Z" }, - { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994, upload-time = "2025-09-19T09:48:56.701Z" }, - { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141, upload-time = "2025-09-19T09:48:59.749Z" }, - { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049, upload-time = "2025-09-19T09:49:05.868Z" }, - { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730, upload-time = "2025-09-19T09:49:01.832Z" }, - { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560, upload-time = "2025-09-19T09:49:03.867Z" }, - { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221, upload-time = "2025-09-19T09:49:07.664Z" }, - { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569, upload-time = "2025-09-19T09:49:14.214Z" }, - { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599, upload-time = "2025-09-19T09:49:16.639Z" }, - { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862, upload-time = "2025-09-19T09:49:19.146Z" }, - { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250, upload-time = "2025-09-19T09:49:21.501Z" }, - { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003, upload-time = "2025-09-19T09:49:27.089Z" }, - { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" }, + { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318 }, + { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478 }, + { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994 }, + { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141 }, + { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049 }, + { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730 }, + { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560 }, + { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221 }, + { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569 }, + { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599 }, + { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862 }, + { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250 }, + { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003 }, + { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684 }, ] [[package]] name = "tomlkit" version = "0.13.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/18/0bbf3884e9eaa38819ebe46a7bd25dcd56b67434402b66a58c4b8e552575/tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1", size = 185207, upload-time = "2025-06-05T07:13:44.947Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/18/0bbf3884e9eaa38819ebe46a7bd25dcd56b67434402b66a58c4b8e552575/tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1", size = 185207 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/75/8539d011f6be8e29f339c42e633aae3cb73bffa95dd0f9adec09b9c58e85/tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0", size = 38901, upload-time = "2025-06-05T07:13:43.546Z" }, + { url = "https://files.pythonhosted.org/packages/bd/75/8539d011f6be8e29f339c42e633aae3cb73bffa95dd0f9adec09b9c58e85/tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0", size = 38901 }, ] [[package]] name = "toolz" version = "1.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/d6/114b492226588d6ff54579d95847662fc69196bdeec318eb45393b24c192/toolz-1.1.0.tar.gz", hash = "sha256:27a5c770d068c110d9ed9323f24f1543e83b2f300a687b7891c1a6d56b697b5b", size = 52613, upload-time = "2025-10-17T04:03:21.661Z" } +sdist = { url = "https://files.pythonhosted.org/packages/11/d6/114b492226588d6ff54579d95847662fc69196bdeec318eb45393b24c192/toolz-1.1.0.tar.gz", hash = "sha256:27a5c770d068c110d9ed9323f24f1543e83b2f300a687b7891c1a6d56b697b5b", size = 52613 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl", hash = "sha256:15ccc861ac51c53696de0a5d6d4607f99c210739caf987b5d2054f3efed429d8", size = 58093, upload-time = "2025-10-17T04:03:20.435Z" }, + { url = "https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl", hash = "sha256:15ccc861ac51c53696de0a5d6d4607f99c210739caf987b5d2054f3efed429d8", size = 58093 }, ] [[package]] @@ -3799,26 +3793,26 @@ dependencies = [ { name = "typing-extensions" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/27/07c645c7673e73e53ded71705045d6cb5bae94c4b021b03aa8d03eee90ab/torch-2.9.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:da5f6f4d7f4940a173e5572791af238cb0b9e21b1aab592bd8b26da4c99f1cd6", size = 104126592, upload-time = "2025-11-12T15:20:41.62Z" }, - { url = "https://files.pythonhosted.org/packages/19/17/e377a460603132b00760511299fceba4102bd95db1a0ee788da21298ccff/torch-2.9.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:27331cd902fb4322252657f3902adf1c4f6acad9dcad81d8df3ae14c7c4f07c4", size = 899742281, upload-time = "2025-11-12T15:22:17.602Z" }, - { url = "https://files.pythonhosted.org/packages/b1/1a/64f5769025db846a82567fa5b7d21dba4558a7234ee631712ee4771c436c/torch-2.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:81a285002d7b8cfd3fdf1b98aa8df138d41f1a8334fd9ea37511517cedf43083", size = 110940568, upload-time = "2025-11-12T15:21:18.689Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ab/07739fd776618e5882661d04c43f5b5586323e2f6a2d7d84aac20d8f20bd/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:c0d25d1d8e531b8343bea0ed811d5d528958f1dcbd37e7245bc686273177ad7e", size = 74479191, upload-time = "2025-11-12T15:21:25.816Z" }, - { url = "https://files.pythonhosted.org/packages/20/60/8fc5e828d050bddfab469b3fe78e5ab9a7e53dda9c3bdc6a43d17ce99e63/torch-2.9.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c29455d2b910b98738131990394da3e50eea8291dfeb4b12de71ecf1fdeb21cb", size = 104135743, upload-time = "2025-11-12T15:21:34.936Z" }, - { url = "https://files.pythonhosted.org/packages/f2/b7/6d3f80e6918213babddb2a37b46dbb14c15b14c5f473e347869a51f40e1f/torch-2.9.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:524de44cd13931208ba2c4bde9ec7741fd4ae6bfd06409a604fc32f6520c2bc9", size = 899749493, upload-time = "2025-11-12T15:24:36.356Z" }, - { url = "https://files.pythonhosted.org/packages/a6/47/c7843d69d6de8938c1cbb1eba426b1d48ddf375f101473d3e31a5fc52b74/torch-2.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:545844cc16b3f91e08ce3b40e9c2d77012dd33a48d505aed34b7740ed627a1b2", size = 110944162, upload-time = "2025-11-12T15:21:53.151Z" }, - { url = "https://files.pythonhosted.org/packages/28/0e/2a37247957e72c12151b33a01e4df651d9d155dd74d8cfcbfad15a79b44a/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5be4bf7496f1e3ffb1dd44b672adb1ac3f081f204c5ca81eba6442f5f634df8e", size = 74830751, upload-time = "2025-11-12T15:21:43.792Z" }, - { url = "https://files.pythonhosted.org/packages/4b/f7/7a18745edcd7b9ca2381aa03353647bca8aace91683c4975f19ac233809d/torch-2.9.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:30a3e170a84894f3652434b56d59a64a2c11366b0ed5776fab33c2439396bf9a", size = 104142929, upload-time = "2025-11-12T15:21:48.319Z" }, - { url = "https://files.pythonhosted.org/packages/f4/dd/f1c0d879f2863ef209e18823a988dc7a1bf40470750e3ebe927efdb9407f/torch-2.9.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:8301a7b431e51764629208d0edaa4f9e4c33e6df0f2f90b90e261d623df6a4e2", size = 899748978, upload-time = "2025-11-12T15:23:04.568Z" }, - { url = "https://files.pythonhosted.org/packages/1f/9f/6986b83a53b4d043e36f3f898b798ab51f7f20fdf1a9b01a2720f445043d/torch-2.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:2e1c42c0ae92bf803a4b2409fdfed85e30f9027a66887f5e7dcdbc014c7531db", size = 111176995, upload-time = "2025-11-12T15:22:01.618Z" }, - { url = "https://files.pythonhosted.org/packages/40/60/71c698b466dd01e65d0e9514b5405faae200c52a76901baf6906856f17e4/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:2c14b3da5df416cf9cb5efab83aa3056f5b8cd8620b8fde81b4987ecab730587", size = 74480347, upload-time = "2025-11-12T15:21:57.648Z" }, - { url = "https://files.pythonhosted.org/packages/48/50/c4b5112546d0d13cc9eaa1c732b823d676a9f49ae8b6f97772f795874a03/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1edee27a7c9897f4e0b7c14cfc2f3008c571921134522d5b9b5ec4ebbc69041a", size = 74433245, upload-time = "2025-11-12T15:22:39.027Z" }, - { url = "https://files.pythonhosted.org/packages/81/c9/2628f408f0518b3bae49c95f5af3728b6ab498c8624ab1e03a43dd53d650/torch-2.9.1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:19d144d6b3e29921f1fc70503e9f2fc572cde6a5115c0c0de2f7ca8b1483e8b6", size = 104134804, upload-time = "2025-11-12T15:22:35.222Z" }, - { url = "https://files.pythonhosted.org/packages/28/fc/5bc91d6d831ae41bf6e9e6da6468f25330522e92347c9156eb3f1cb95956/torch-2.9.1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:c432d04376f6d9767a9852ea0def7b47a7bbc8e7af3b16ac9cf9ce02b12851c9", size = 899747132, upload-time = "2025-11-12T15:23:36.068Z" }, - { url = "https://files.pythonhosted.org/packages/63/5d/e8d4e009e52b6b2cf1684bde2a6be157b96fb873732542fb2a9a99e85a83/torch-2.9.1-cp314-cp314-win_amd64.whl", hash = "sha256:d187566a2cdc726fc80138c3cdb260970fab1c27e99f85452721f7759bbd554d", size = 110934845, upload-time = "2025-11-12T15:22:48.367Z" }, - { url = "https://files.pythonhosted.org/packages/bd/b2/2d15a52516b2ea3f414643b8de68fa4cb220d3877ac8b1028c83dc8ca1c4/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cb10896a1f7fedaddbccc2017ce6ca9ecaaf990f0973bdfcf405439750118d2c", size = 74823558, upload-time = "2025-11-12T15:22:43.392Z" }, - { url = "https://files.pythonhosted.org/packages/86/5c/5b2e5d84f5b9850cd1e71af07524d8cbb74cba19379800f1f9f7c997fc70/torch-2.9.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:0a2bd769944991c74acf0c4ef23603b9c777fdf7637f115605a4b2d8023110c7", size = 104145788, upload-time = "2025-11-12T15:23:52.109Z" }, - { url = "https://files.pythonhosted.org/packages/a9/8c/3da60787bcf70add986c4ad485993026ac0ca74f2fc21410bc4eb1bb7695/torch-2.9.1-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:07c8a9660bc9414c39cac530ac83b1fb1b679d7155824144a40a54f4a47bfa73", size = 899735500, upload-time = "2025-11-12T15:24:08.788Z" }, - { url = "https://files.pythonhosted.org/packages/db/2b/f7818f6ec88758dfd21da46b6cd46af9d1b3433e53ddbb19ad1e0da17f9b/torch-2.9.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c88d3299ddeb2b35dcc31753305612db485ab6f1823e37fb29451c8b2732b87e", size = 111163659, upload-time = "2025-11-12T15:23:20.009Z" }, + { url = "https://files.pythonhosted.org/packages/0f/27/07c645c7673e73e53ded71705045d6cb5bae94c4b021b03aa8d03eee90ab/torch-2.9.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:da5f6f4d7f4940a173e5572791af238cb0b9e21b1aab592bd8b26da4c99f1cd6", size = 104126592 }, + { url = "https://files.pythonhosted.org/packages/19/17/e377a460603132b00760511299fceba4102bd95db1a0ee788da21298ccff/torch-2.9.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:27331cd902fb4322252657f3902adf1c4f6acad9dcad81d8df3ae14c7c4f07c4", size = 899742281 }, + { url = "https://files.pythonhosted.org/packages/b1/1a/64f5769025db846a82567fa5b7d21dba4558a7234ee631712ee4771c436c/torch-2.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:81a285002d7b8cfd3fdf1b98aa8df138d41f1a8334fd9ea37511517cedf43083", size = 110940568 }, + { url = "https://files.pythonhosted.org/packages/6e/ab/07739fd776618e5882661d04c43f5b5586323e2f6a2d7d84aac20d8f20bd/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:c0d25d1d8e531b8343bea0ed811d5d528958f1dcbd37e7245bc686273177ad7e", size = 74479191 }, + { url = "https://files.pythonhosted.org/packages/20/60/8fc5e828d050bddfab469b3fe78e5ab9a7e53dda9c3bdc6a43d17ce99e63/torch-2.9.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c29455d2b910b98738131990394da3e50eea8291dfeb4b12de71ecf1fdeb21cb", size = 104135743 }, + { url = "https://files.pythonhosted.org/packages/f2/b7/6d3f80e6918213babddb2a37b46dbb14c15b14c5f473e347869a51f40e1f/torch-2.9.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:524de44cd13931208ba2c4bde9ec7741fd4ae6bfd06409a604fc32f6520c2bc9", size = 899749493 }, + { url = "https://files.pythonhosted.org/packages/a6/47/c7843d69d6de8938c1cbb1eba426b1d48ddf375f101473d3e31a5fc52b74/torch-2.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:545844cc16b3f91e08ce3b40e9c2d77012dd33a48d505aed34b7740ed627a1b2", size = 110944162 }, + { url = "https://files.pythonhosted.org/packages/28/0e/2a37247957e72c12151b33a01e4df651d9d155dd74d8cfcbfad15a79b44a/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5be4bf7496f1e3ffb1dd44b672adb1ac3f081f204c5ca81eba6442f5f634df8e", size = 74830751 }, + { url = "https://files.pythonhosted.org/packages/4b/f7/7a18745edcd7b9ca2381aa03353647bca8aace91683c4975f19ac233809d/torch-2.9.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:30a3e170a84894f3652434b56d59a64a2c11366b0ed5776fab33c2439396bf9a", size = 104142929 }, + { url = "https://files.pythonhosted.org/packages/f4/dd/f1c0d879f2863ef209e18823a988dc7a1bf40470750e3ebe927efdb9407f/torch-2.9.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:8301a7b431e51764629208d0edaa4f9e4c33e6df0f2f90b90e261d623df6a4e2", size = 899748978 }, + { url = "https://files.pythonhosted.org/packages/1f/9f/6986b83a53b4d043e36f3f898b798ab51f7f20fdf1a9b01a2720f445043d/torch-2.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:2e1c42c0ae92bf803a4b2409fdfed85e30f9027a66887f5e7dcdbc014c7531db", size = 111176995 }, + { url = "https://files.pythonhosted.org/packages/40/60/71c698b466dd01e65d0e9514b5405faae200c52a76901baf6906856f17e4/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:2c14b3da5df416cf9cb5efab83aa3056f5b8cd8620b8fde81b4987ecab730587", size = 74480347 }, + { url = "https://files.pythonhosted.org/packages/48/50/c4b5112546d0d13cc9eaa1c732b823d676a9f49ae8b6f97772f795874a03/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1edee27a7c9897f4e0b7c14cfc2f3008c571921134522d5b9b5ec4ebbc69041a", size = 74433245 }, + { url = "https://files.pythonhosted.org/packages/81/c9/2628f408f0518b3bae49c95f5af3728b6ab498c8624ab1e03a43dd53d650/torch-2.9.1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:19d144d6b3e29921f1fc70503e9f2fc572cde6a5115c0c0de2f7ca8b1483e8b6", size = 104134804 }, + { url = "https://files.pythonhosted.org/packages/28/fc/5bc91d6d831ae41bf6e9e6da6468f25330522e92347c9156eb3f1cb95956/torch-2.9.1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:c432d04376f6d9767a9852ea0def7b47a7bbc8e7af3b16ac9cf9ce02b12851c9", size = 899747132 }, + { url = "https://files.pythonhosted.org/packages/63/5d/e8d4e009e52b6b2cf1684bde2a6be157b96fb873732542fb2a9a99e85a83/torch-2.9.1-cp314-cp314-win_amd64.whl", hash = "sha256:d187566a2cdc726fc80138c3cdb260970fab1c27e99f85452721f7759bbd554d", size = 110934845 }, + { url = "https://files.pythonhosted.org/packages/bd/b2/2d15a52516b2ea3f414643b8de68fa4cb220d3877ac8b1028c83dc8ca1c4/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cb10896a1f7fedaddbccc2017ce6ca9ecaaf990f0973bdfcf405439750118d2c", size = 74823558 }, + { url = "https://files.pythonhosted.org/packages/86/5c/5b2e5d84f5b9850cd1e71af07524d8cbb74cba19379800f1f9f7c997fc70/torch-2.9.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:0a2bd769944991c74acf0c4ef23603b9c777fdf7637f115605a4b2d8023110c7", size = 104145788 }, + { url = "https://files.pythonhosted.org/packages/a9/8c/3da60787bcf70add986c4ad485993026ac0ca74f2fc21410bc4eb1bb7695/torch-2.9.1-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:07c8a9660bc9414c39cac530ac83b1fb1b679d7155824144a40a54f4a47bfa73", size = 899735500 }, + { url = "https://files.pythonhosted.org/packages/db/2b/f7818f6ec88758dfd21da46b6cd46af9d1b3433e53ddbb19ad1e0da17f9b/torch-2.9.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c88d3299ddeb2b35dcc31753305612db485ab6f1823e37fb29451c8b2732b87e", size = 111163659 }, ] [[package]] @@ -3828,9 +3822,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, ] [[package]] @@ -3857,9 +3851,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "wandb" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/14/02504ec3b5333f5c09eee59c8fd0d5b4830d82932fd3dd78f6bf88e00dcf/transformer_lens-2.15.4.tar.gz", hash = "sha256:76e3c6049ae110fb1529669345cf76f60b931936b1e351a70a8c73c48fc84440", size = 151406, upload-time = "2025-05-15T21:56:09.573Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/14/02504ec3b5333f5c09eee59c8fd0d5b4830d82932fd3dd78f6bf88e00dcf/transformer_lens-2.15.4.tar.gz", hash = "sha256:76e3c6049ae110fb1529669345cf76f60b931936b1e351a70a8c73c48fc84440", size = 151406 } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/68/ff1fee1043060b70c307e70389dff64b3259e0a6e3197ace5df0477d2983/transformer_lens-2.15.4-py3-none-any.whl", hash = "sha256:f0bccac37410f3568be316d6b688513d5ffec70a88f89c257d2cb9fffae08104", size = 189262, upload-time = "2025-05-15T21:56:08.321Z" }, + { url = "https://files.pythonhosted.org/packages/46/68/ff1fee1043060b70c307e70389dff64b3259e0a6e3197ace5df0477d2983/transformer_lens-2.15.4-py3-none-any.whl", hash = "sha256:f0bccac37410f3568be316d6b688513d5ffec70a88f89c257d2cb9fffae08104", size = 189262 }, ] [[package]] @@ -3878,9 +3872,9 @@ dependencies = [ { name = "tokenizers" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dd/70/d42a739e8dfde3d92bb2fff5819cbf331fe9657323221e79415cd5eb65ee/transformers-4.57.3.tar.gz", hash = "sha256:df4945029aaddd7c09eec5cad851f30662f8bd1746721b34cc031d70c65afebc", size = 10139680, upload-time = "2025-11-25T15:51:30.139Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/70/d42a739e8dfde3d92bb2fff5819cbf331fe9657323221e79415cd5eb65ee/transformers-4.57.3.tar.gz", hash = "sha256:df4945029aaddd7c09eec5cad851f30662f8bd1746721b34cc031d70c65afebc", size = 10139680 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/6b/2f416568b3c4c91c96e5a365d164f8a4a4a88030aa8ab4644181fdadce97/transformers-4.57.3-py3-none-any.whl", hash = "sha256:c77d353a4851b1880191603d36acb313411d3577f6e2897814f333841f7003f4", size = 11993463, upload-time = "2025-11-25T15:51:26.493Z" }, + { url = "https://files.pythonhosted.org/packages/6a/6b/2f416568b3c4c91c96e5a365d164f8a4a4a88030aa8ab4644181fdadce97/transformers-4.57.3-py3-none-any.whl", hash = "sha256:c77d353a4851b1880191603d36acb313411d3577f6e2897814f333841f7003f4", size = 11993463 }, ] [[package]] @@ -3890,7 +3884,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "transformers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/c2/65f13aec253100e1916e9bd7965fe17bde796ebabeb1265f45191ab4ddc0/transformers-stream-generator-0.0.5.tar.gz", hash = "sha256:271deace0abf9c0f83b36db472c8ba61fdc7b04d1bf89d845644acac2795ed57", size = 13033, upload-time = "2024-03-11T14:18:02.079Z" } +sdist = { url = "https://files.pythonhosted.org/packages/42/c2/65f13aec253100e1916e9bd7965fe17bde796ebabeb1265f45191ab4ddc0/transformers-stream-generator-0.0.5.tar.gz", hash = "sha256:271deace0abf9c0f83b36db472c8ba61fdc7b04d1bf89d845644acac2795ed57", size = 13033 } [[package]] name = "treescope" @@ -3899,9 +3893,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/2a/d13d3c38862632742d2fe2f7ae307c431db06538fd05ca03020d207b5dcc/treescope-0.1.10.tar.gz", hash = "sha256:20f74656f34ab2d8716715013e8163a0da79bdc2554c16d5023172c50d27ea95", size = 138870, upload-time = "2025-08-08T05:43:48.048Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/2a/d13d3c38862632742d2fe2f7ae307c431db06538fd05ca03020d207b5dcc/treescope-0.1.10.tar.gz", hash = "sha256:20f74656f34ab2d8716715013e8163a0da79bdc2554c16d5023172c50d27ea95", size = 138870 } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/2b/36e984399089c026a6499ac8f7401d38487cf0183839a4aa78140d373771/treescope-0.1.10-py3-none-any.whl", hash = "sha256:dde52f5314f4c29d22157a6fe4d3bd103f9cae02791c9e672eefa32c9aa1da51", size = 182255, upload-time = "2025-08-08T05:43:46.673Z" }, + { url = "https://files.pythonhosted.org/packages/43/2b/36e984399089c026a6499ac8f7401d38487cf0183839a4aa78140d373771/treescope-0.1.10-py3-none-any.whl", hash = "sha256:dde52f5314f4c29d22157a6fe4d3bd103f9cae02791c9e672eefa32c9aa1da51", size = 182255 }, ] [[package]] @@ -3909,11 +3903,11 @@ name = "triton" version = "3.5.1" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/50/9a8358d3ef58162c0a415d173cfb45b67de60176e1024f71fbc4d24c0b6d/triton-3.5.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d2c6b915a03888ab931a9fd3e55ba36785e1fe70cbea0b40c6ef93b20fc85232", size = 170470207, upload-time = "2025-11-11T17:41:00.253Z" }, - { url = "https://files.pythonhosted.org/packages/27/46/8c3bbb5b0a19313f50edcaa363b599e5a1a5ac9683ead82b9b80fe497c8d/triton-3.5.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3f4346b6ebbd4fad18773f5ba839114f4826037c9f2f34e0148894cd5dd3dba", size = 170470410, upload-time = "2025-11-11T17:41:06.319Z" }, - { url = "https://files.pythonhosted.org/packages/37/92/e97fcc6b2c27cdb87ce5ee063d77f8f26f19f06916aa680464c8104ef0f6/triton-3.5.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0b4d2c70127fca6a23e247f9348b8adde979d2e7a20391bfbabaac6aebc7e6a8", size = 170579924, upload-time = "2025-11-11T17:41:12.455Z" }, - { url = "https://files.pythonhosted.org/packages/a4/e6/c595c35e5c50c4bc56a7bac96493dad321e9e29b953b526bbbe20f9911d0/triton-3.5.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0637b1efb1db599a8e9dc960d53ab6e4637db7d4ab6630a0974705d77b14b60", size = 170480488, upload-time = "2025-11-11T17:41:18.222Z" }, - { url = "https://files.pythonhosted.org/packages/16/b5/b0d3d8b901b6a04ca38df5e24c27e53afb15b93624d7fd7d658c7cd9352a/triton-3.5.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bac7f7d959ad0f48c0e97d6643a1cc0fd5786fe61cb1f83b537c6b2d54776478", size = 170582192, upload-time = "2025-11-11T17:41:23.963Z" }, + { url = "https://files.pythonhosted.org/packages/f2/50/9a8358d3ef58162c0a415d173cfb45b67de60176e1024f71fbc4d24c0b6d/triton-3.5.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d2c6b915a03888ab931a9fd3e55ba36785e1fe70cbea0b40c6ef93b20fc85232", size = 170470207 }, + { url = "https://files.pythonhosted.org/packages/27/46/8c3bbb5b0a19313f50edcaa363b599e5a1a5ac9683ead82b9b80fe497c8d/triton-3.5.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3f4346b6ebbd4fad18773f5ba839114f4826037c9f2f34e0148894cd5dd3dba", size = 170470410 }, + { url = "https://files.pythonhosted.org/packages/37/92/e97fcc6b2c27cdb87ce5ee063d77f8f26f19f06916aa680464c8104ef0f6/triton-3.5.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0b4d2c70127fca6a23e247f9348b8adde979d2e7a20391bfbabaac6aebc7e6a8", size = 170579924 }, + { url = "https://files.pythonhosted.org/packages/a4/e6/c595c35e5c50c4bc56a7bac96493dad321e9e29b953b526bbbe20f9911d0/triton-3.5.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0637b1efb1db599a8e9dc960d53ab6e4637db7d4ab6630a0974705d77b14b60", size = 170480488 }, + { url = "https://files.pythonhosted.org/packages/16/b5/b0d3d8b901b6a04ca38df5e24c27e53afb15b93624d7fd7d658c7cd9352a/triton-3.5.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bac7f7d959ad0f48c0e97d6643a1cc0fd5786fe61cb1f83b537c6b2d54776478", size = 170582192 }, ] [[package]] @@ -3923,18 +3917,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c7/68/71c1a15b5f65f40e91b65da23b8224dad41349894535a97f63a52e462196/typeguard-4.4.4.tar.gz", hash = "sha256:3a7fd2dffb705d4d0efaed4306a704c89b9dee850b688f060a8b1615a79e5f74", size = 75203, upload-time = "2025-06-18T09:56:07.624Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/68/71c1a15b5f65f40e91b65da23b8224dad41349894535a97f63a52e462196/typeguard-4.4.4.tar.gz", hash = "sha256:3a7fd2dffb705d4d0efaed4306a704c89b9dee850b688f060a8b1615a79e5f74", size = 75203 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/a9/e3aee762739c1d7528da1c3e06d518503f8b6c439c35549b53735ba52ead/typeguard-4.4.4-py3-none-any.whl", hash = "sha256:b5f562281b6bfa1f5492470464730ef001646128b180769880468bd84b68b09e", size = 34874, upload-time = "2025-06-18T09:56:05.999Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a9/e3aee762739c1d7528da1c3e06d518503f8b6c439c35549b53735ba52ead/typeguard-4.4.4-py3-none-any.whl", hash = "sha256:b5f562281b6bfa1f5492470464730ef001646128b180769880468bd84b68b09e", size = 34874 }, ] [[package]] name = "typing-extensions" version = "4.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 }, ] [[package]] @@ -3944,27 +3938,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949 } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611 }, ] [[package]] name = "tzdata" version = "2025.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 }, ] [[package]] name = "urllib3" -version = "2.5.0" +version = "2.6.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/1d/0f3a93cca1ac5e8287842ed4eebbd0f7a991315089b1a0b01c7788aa7b63/urllib3-2.6.1.tar.gz", hash = "sha256:5379eb6e1aba4088bae84f8242960017ec8d8e3decf30480b3a1abdaa9671a3f", size = 432678 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, + { url = "https://files.pythonhosted.org/packages/bc/56/190ceb8cb10511b730b564fb1e0293fa468363dbad26145c34928a60cb0c/urllib3-2.6.1-py3-none-any.whl", hash = "sha256:e67d06fe947c36a7ca39f4994b08d73922d40e6cca949907be05efa6fd75110b", size = 131138 }, ] [[package]] @@ -3975,27 +3969,27 @@ dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/f06b84e2697fef4688ca63bdb2fdf113ca0a3be33f94488f2cadb690b0cf/uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d", size = 80605, upload-time = "2025-10-18T13:46:44.63Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/f06b84e2697fef4688ca63bdb2fdf113ca0a3be33f94488f2cadb690b0cf/uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d", size = 80605 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109, upload-time = "2025-10-18T13:46:42.958Z" }, + { url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109 }, ] [[package]] name = "wadler-lindig" version = "0.1.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1e/67/cbae4bf7683a64755c2c1778c418fea96d00e34395bb91743f08bd951571/wadler_lindig-0.1.7.tar.gz", hash = "sha256:81d14d3fe77d441acf3ebd7f4aefac20c74128bf460e84b512806dccf7b2cd55", size = 15842, upload-time = "2025-06-18T07:00:42.843Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/67/cbae4bf7683a64755c2c1778c418fea96d00e34395bb91743f08bd951571/wadler_lindig-0.1.7.tar.gz", hash = "sha256:81d14d3fe77d441acf3ebd7f4aefac20c74128bf460e84b512806dccf7b2cd55", size = 15842 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/96/04e7b441807b26b794da5b11e59ed7f83b2cf8af202bd7eba8ad2fa6046e/wadler_lindig-0.1.7-py3-none-any.whl", hash = "sha256:e3ec83835570fd0a9509f969162aeb9c65618f998b1f42918cfc8d45122fe953", size = 20516, upload-time = "2025-06-18T07:00:41.684Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/04e7b441807b26b794da5b11e59ed7f83b2cf8af202bd7eba8ad2fa6046e/wadler_lindig-0.1.7-py3-none-any.whl", hash = "sha256:e3ec83835570fd0a9509f969162aeb9c65618f998b1f42918cfc8d45122fe953", size = 20516 }, ] [[package]] name = "waitress" version = "3.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bf/cb/04ddb054f45faa306a230769e868c28b8065ea196891f09004ebace5b184/waitress-3.0.2.tar.gz", hash = "sha256:682aaaf2af0c44ada4abfb70ded36393f0e307f4ab9456a215ce0020baefc31f", size = 179901, upload-time = "2024-11-16T20:02:35.195Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/cb/04ddb054f45faa306a230769e868c28b8065ea196891f09004ebace5b184/waitress-3.0.2.tar.gz", hash = "sha256:682aaaf2af0c44ada4abfb70ded36393f0e307f4ab9456a215ce0020baefc31f", size = 179901 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/57/a27182528c90ef38d82b636a11f606b0cbb0e17588ed205435f8affe3368/waitress-3.0.2-py3-none-any.whl", hash = "sha256:c56d67fd6e87c2ee598b76abdd4e96cfad1f24cacdea5078d382b1f9d7b5ed2e", size = 56232, upload-time = "2024-11-16T20:02:33.858Z" }, + { url = "https://files.pythonhosted.org/packages/8d/57/a27182528c90ef38d82b636a11f606b0cbb0e17588ed205435f8affe3368/waitress-3.0.2-py3-none-any.whl", hash = "sha256:c56d67fd6e87c2ee598b76abdd4e96cfad1f24cacdea5078d382b1f9d7b5ed2e", size = 56232 }, ] [[package]] @@ -4014,26 +4008,26 @@ dependencies = [ { name = "sentry-sdk" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0a/cc/770ae3aa7ae44f6792f7ecb81c14c0e38b672deb35235719bb1006519487/wandb-0.23.1.tar.gz", hash = "sha256:f6fb1e3717949b29675a69359de0eeb01e67d3360d581947d5b3f98c273567d6", size = 44298053, upload-time = "2025-12-03T02:25:10.79Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/cc/770ae3aa7ae44f6792f7ecb81c14c0e38b672deb35235719bb1006519487/wandb-0.23.1.tar.gz", hash = "sha256:f6fb1e3717949b29675a69359de0eeb01e67d3360d581947d5b3f98c273567d6", size = 44298053 } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/0b/c3d7053dfd93fd259a63c7818d9c4ac2ba0642ff8dc8db98662ea0cf9cc0/wandb-0.23.1-py3-none-macosx_12_0_arm64.whl", hash = "sha256:358e15471d19b7d73fc464e37371c19d44d39e433252ac24df107aff993a286b", size = 21527293, upload-time = "2025-12-03T02:24:48.011Z" }, - { url = "https://files.pythonhosted.org/packages/ee/9f/059420fa0cb6c511dc5c5a50184122b6aca7b178cb2aa210139e354020da/wandb-0.23.1-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:110304407f4b38f163bdd50ed5c5225365e4df3092f13089c30171a75257b575", size = 22745926, upload-time = "2025-12-03T02:24:50.519Z" }, - { url = "https://files.pythonhosted.org/packages/96/b6/fd465827c14c64d056d30b4c9fcf4dac889a6969dba64489a88fc4ffa333/wandb-0.23.1-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:6cc984cf85feb2f8ee0451d76bc9fb7f39da94956bb8183e30d26284cf203b65", size = 21212973, upload-time = "2025-12-03T02:24:52.828Z" }, - { url = "https://files.pythonhosted.org/packages/5c/ee/9a8bb9a39cc1f09c3060456cc79565110226dc4099a719af5c63432da21d/wandb-0.23.1-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:67431cd3168d79fdb803e503bd669c577872ffd5dadfa86de733b3274b93088e", size = 22887885, upload-time = "2025-12-03T02:24:55.281Z" }, - { url = "https://files.pythonhosted.org/packages/6d/4d/8d9e75add529142e037b05819cb3ab1005679272950128d69d218b7e5b2e/wandb-0.23.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:07be70c0baa97ea25fadc4a9d0097f7371eef6dcacc5ceb525c82491a31e9244", size = 21250967, upload-time = "2025-12-03T02:24:57.603Z" }, - { url = "https://files.pythonhosted.org/packages/97/72/0b35cddc4e4168f03c759b96d9f671ad18aec8bdfdd84adfea7ecb3f5701/wandb-0.23.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:216c95b08e0a2ec6a6008373b056d597573d565e30b43a7a93c35a171485ee26", size = 22988382, upload-time = "2025-12-03T02:25:00.518Z" }, - { url = "https://files.pythonhosted.org/packages/c0/6d/e78093d49d68afb26f5261a70fc7877c34c114af5c2ee0ab3b1af85f5e76/wandb-0.23.1-py3-none-win32.whl", hash = "sha256:fb5cf0f85692f758a5c36ab65fea96a1284126de64e836610f92ddbb26df5ded", size = 22150756, upload-time = "2025-12-03T02:25:02.734Z" }, - { url = "https://files.pythonhosted.org/packages/05/27/4f13454b44c9eceaac3d6e4e4efa2230b6712d613ff9bf7df010eef4fd18/wandb-0.23.1-py3-none-win_amd64.whl", hash = "sha256:21c8c56e436eb707b7d54f705652e030d48e5cfcba24cf953823eb652e30e714", size = 22150760, upload-time = "2025-12-03T02:25:05.106Z" }, - { url = "https://files.pythonhosted.org/packages/30/20/6c091d451e2a07689bfbfaeb7592d488011420e721de170884fedd68c644/wandb-0.23.1-py3-none-win_arm64.whl", hash = "sha256:8aee7f3bb573f2c0acf860f497ca9c684f9b35f2ca51011ba65af3d4592b77c1", size = 20137463, upload-time = "2025-12-03T02:25:08.317Z" }, + { url = "https://files.pythonhosted.org/packages/12/0b/c3d7053dfd93fd259a63c7818d9c4ac2ba0642ff8dc8db98662ea0cf9cc0/wandb-0.23.1-py3-none-macosx_12_0_arm64.whl", hash = "sha256:358e15471d19b7d73fc464e37371c19d44d39e433252ac24df107aff993a286b", size = 21527293 }, + { url = "https://files.pythonhosted.org/packages/ee/9f/059420fa0cb6c511dc5c5a50184122b6aca7b178cb2aa210139e354020da/wandb-0.23.1-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:110304407f4b38f163bdd50ed5c5225365e4df3092f13089c30171a75257b575", size = 22745926 }, + { url = "https://files.pythonhosted.org/packages/96/b6/fd465827c14c64d056d30b4c9fcf4dac889a6969dba64489a88fc4ffa333/wandb-0.23.1-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:6cc984cf85feb2f8ee0451d76bc9fb7f39da94956bb8183e30d26284cf203b65", size = 21212973 }, + { url = "https://files.pythonhosted.org/packages/5c/ee/9a8bb9a39cc1f09c3060456cc79565110226dc4099a719af5c63432da21d/wandb-0.23.1-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:67431cd3168d79fdb803e503bd669c577872ffd5dadfa86de733b3274b93088e", size = 22887885 }, + { url = "https://files.pythonhosted.org/packages/6d/4d/8d9e75add529142e037b05819cb3ab1005679272950128d69d218b7e5b2e/wandb-0.23.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:07be70c0baa97ea25fadc4a9d0097f7371eef6dcacc5ceb525c82491a31e9244", size = 21250967 }, + { url = "https://files.pythonhosted.org/packages/97/72/0b35cddc4e4168f03c759b96d9f671ad18aec8bdfdd84adfea7ecb3f5701/wandb-0.23.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:216c95b08e0a2ec6a6008373b056d597573d565e30b43a7a93c35a171485ee26", size = 22988382 }, + { url = "https://files.pythonhosted.org/packages/c0/6d/e78093d49d68afb26f5261a70fc7877c34c114af5c2ee0ab3b1af85f5e76/wandb-0.23.1-py3-none-win32.whl", hash = "sha256:fb5cf0f85692f758a5c36ab65fea96a1284126de64e836610f92ddbb26df5ded", size = 22150756 }, + { url = "https://files.pythonhosted.org/packages/05/27/4f13454b44c9eceaac3d6e4e4efa2230b6712d613ff9bf7df010eef4fd18/wandb-0.23.1-py3-none-win_amd64.whl", hash = "sha256:21c8c56e436eb707b7d54f705652e030d48e5cfcba24cf953823eb652e30e714", size = 22150760 }, + { url = "https://files.pythonhosted.org/packages/30/20/6c091d451e2a07689bfbfaeb7592d488011420e721de170884fedd68c644/wandb-0.23.1-py3-none-win_arm64.whl", hash = "sha256:8aee7f3bb573f2c0acf860f497ca9c684f9b35f2ca51011ba65af3d4592b77c1", size = 20137463 }, ] [[package]] name = "wcwidth" version = "0.2.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293 } wheels = [ - { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286 }, ] [[package]] @@ -4043,92 +4037,92 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/ea/b0f8eeb287f8df9066e56e831c7824ac6bab645dd6c7a8f4b2d767944f9b/werkzeug-3.1.4.tar.gz", hash = "sha256:cd3cd98b1b92dc3b7b3995038826c68097dcb16f9baa63abe35f20eafeb9fe5e", size = 864687, upload-time = "2025-11-29T02:15:22.841Z" } +sdist = { url = "https://files.pythonhosted.org/packages/45/ea/b0f8eeb287f8df9066e56e831c7824ac6bab645dd6c7a8f4b2d767944f9b/werkzeug-3.1.4.tar.gz", hash = "sha256:cd3cd98b1b92dc3b7b3995038826c68097dcb16f9baa63abe35f20eafeb9fe5e", size = 864687 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/f9/9e082990c2585c744734f85bec79b5dae5df9c974ffee58fe421652c8e91/werkzeug-3.1.4-py3-none-any.whl", hash = "sha256:2ad50fb9ed09cc3af22c54698351027ace879a0b60a3b5edf5730b2f7d876905", size = 224960, upload-time = "2025-11-29T02:15:21.13Z" }, + { url = "https://files.pythonhosted.org/packages/2f/f9/9e082990c2585c744734f85bec79b5dae5df9c974ffee58fe421652c8e91/werkzeug-3.1.4-py3-none-any.whl", hash = "sha256:2ad50fb9ed09cc3af22c54698351027ace879a0b60a3b5edf5730b2f7d876905", size = 224960 }, ] [[package]] name = "xxhash" version = "3.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, - { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, - { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, - { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, - { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, - { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, - { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, - { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, - { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, - { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, - { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, - { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, - { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, - { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, - { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, - { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, - { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, - { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, - { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, - { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, - { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, - { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, - { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, - { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, - { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, - { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, - { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, - { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, - { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, - { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, - { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, - { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, - { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, - { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, - { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, - { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, - { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, - { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, - { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, - { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, - { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, - { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, - { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, - { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, - { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" }, - { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" }, - { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" }, - { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" }, - { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" }, - { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" }, - { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" }, - { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" }, - { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" }, - { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" }, - { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" }, - { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" }, - { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" }, - { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" }, - { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" }, - { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" }, - { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" }, - { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" }, - { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" }, - { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" }, - { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" }, - { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" }, - { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" }, - { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" }, - { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" }, - { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, - { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, - { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744 }, + { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816 }, + { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035 }, + { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914 }, + { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163 }, + { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411 }, + { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883 }, + { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392 }, + { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898 }, + { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655 }, + { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001 }, + { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431 }, + { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617 }, + { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534 }, + { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876 }, + { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738 }, + { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821 }, + { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127 }, + { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975 }, + { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241 }, + { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471 }, + { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936 }, + { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440 }, + { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990 }, + { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689 }, + { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068 }, + { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495 }, + { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620 }, + { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542 }, + { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880 }, + { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956 }, + { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072 }, + { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409 }, + { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736 }, + { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833 }, + { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348 }, + { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070 }, + { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907 }, + { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839 }, + { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304 }, + { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930 }, + { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787 }, + { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916 }, + { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799 }, + { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044 }, + { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754 }, + { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846 }, + { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343 }, + { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074 }, + { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388 }, + { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614 }, + { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024 }, + { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541 }, + { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305 }, + { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848 }, + { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142 }, + { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547 }, + { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214 }, + { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290 }, + { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795 }, + { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955 }, + { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072 }, + { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579 }, + { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854 }, + { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965 }, + { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484 }, + { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162 }, + { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007 }, + { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956 }, + { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401 }, + { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083 }, + { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913 }, + { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586 }, + { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526 }, + { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898 }, ] [[package]] @@ -4140,96 +4134,96 @@ dependencies = [ { name = "multidict" }, { name = "propcache" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, - { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, - { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, - { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, - { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, - { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, - { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, - { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, - { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, - { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, - { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, - { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, - { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, - { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, - { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, - { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, - { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, - { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" }, - { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" }, - { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" }, - { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" }, - { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" }, - { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" }, - { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, - { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, - { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, - { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, - { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, - { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, - { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, - { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, - { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, - { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" }, - { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" }, - { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" }, - { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" }, - { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" }, - { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" }, - { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, - { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, - { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, - { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, - { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, - { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, - { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" }, - { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" }, - { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" }, - { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080, upload-time = "2025-10-06T14:11:20.996Z" }, - { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696, upload-time = "2025-10-06T14:11:22.847Z" }, - { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121, upload-time = "2025-10-06T14:11:24.889Z" }, - { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080, upload-time = "2025-10-06T14:11:27.307Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661, upload-time = "2025-10-06T14:11:29.387Z" }, - { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645, upload-time = "2025-10-06T14:11:31.423Z" }, - { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361, upload-time = "2025-10-06T14:11:33.055Z" }, - { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" }, - { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" }, - { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" }, - { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" }, - { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" }, - { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" }, - { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" }, - { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" }, - { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820, upload-time = "2025-10-06T14:11:52.549Z" }, - { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203, upload-time = "2025-10-06T14:11:54.225Z" }, - { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173, upload-time = "2025-10-06T14:11:56.069Z" }, - { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562, upload-time = "2025-10-06T14:11:58.783Z" }, - { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828, upload-time = "2025-10-06T14:12:00.686Z" }, - { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551, upload-time = "2025-10-06T14:12:02.628Z" }, - { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512, upload-time = "2025-10-06T14:12:04.871Z" }, - { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" }, - { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" }, - { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" }, - { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" }, - { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, - { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000 }, + { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338 }, + { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909 }, + { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940 }, + { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825 }, + { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705 }, + { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518 }, + { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267 }, + { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797 }, + { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535 }, + { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324 }, + { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803 }, + { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220 }, + { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589 }, + { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213 }, + { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330 }, + { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980 }, + { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424 }, + { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821 }, + { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243 }, + { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361 }, + { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036 }, + { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671 }, + { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059 }, + { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356 }, + { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331 }, + { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590 }, + { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316 }, + { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431 }, + { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555 }, + { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965 }, + { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205 }, + { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209 }, + { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966 }, + { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312 }, + { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967 }, + { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949 }, + { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818 }, + { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626 }, + { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129 }, + { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776 }, + { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879 }, + { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996 }, + { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047 }, + { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947 }, + { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943 }, + { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715 }, + { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857 }, + { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520 }, + { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504 }, + { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282 }, + { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080 }, + { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696 }, + { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121 }, + { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080 }, + { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661 }, + { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645 }, + { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361 }, + { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451 }, + { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814 }, + { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799 }, + { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990 }, + { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292 }, + { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888 }, + { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223 }, + { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981 }, + { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303 }, + { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820 }, + { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203 }, + { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173 }, + { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562 }, + { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828 }, + { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551 }, + { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512 }, + { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400 }, + { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140 }, + { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473 }, + { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056 }, + { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292 }, + { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171 }, + { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814 }, ] [[package]] name = "zipp" version = "3.23.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276 }, ] From 2dd3e8c4422fa5b5e3f4dc16de6cb98973905fa2 Mon Sep 17 00:00:00 2001 From: ealt Date: Tue, 9 Dec 2025 19:14:37 -0800 Subject: [PATCH 02/35] Fix resolve base config bug (#134) --- simplexity/structured_configs/base.py | 36 +++++++-------- tests/structured_configs/test_base_config.py | 48 ++++++++++++++------ 2 files changed, 49 insertions(+), 35 deletions(-) diff --git a/simplexity/structured_configs/base.py b/simplexity/structured_configs/base.py index 946b1043..455daf43 100644 --- a/simplexity/structured_configs/base.py +++ b/simplexity/structured_configs/base.py @@ -54,7 +54,7 @@ def validate_base_config(cfg: DictConfig) -> None: @dynamic_resolve -def resolve_base_config(cfg: DictConfig, *, strict: bool, seed: int = 42, device: str | None = None) -> None: +def resolve_base_config(cfg: DictConfig, *, strict: bool, seed: int | None = None, device: str | None = None) -> None: """Resolve the BaseConfig by setting default values and logging mismatches. This function sets default seed and strict tag values if not present in the config. @@ -64,28 +64,24 @@ def resolve_base_config(cfg: DictConfig, *, strict: bool, seed: int = 42, device Args: cfg: A DictConfig with seed and tags fields (from Hydra). strict: Whether strict mode is enabled. Used to set tags.strict. - seed: The random seed to use. Defaults to 42. - device: The device to use. Defaults to "auto". + seed: The random seed to use. If None, defaults to 42 when config has no seed. + device: The device to use. If None, defaults to "auto" when config has no device. """ - if device is None: - device = "auto" - if cfg.get("device") is None: + device_tag = cfg.get("device") + if device_tag is None: + cfg.device = device or "auto" + elif device and device_tag != device: + SIMPLEXITY_LOGGER.warning( + "Device tag set to '%s', but device is '%s'. Overriding device tag.", device_tag, device + ) cfg.device = device - else: - device_tag: str = cfg.get("device") - if device_tag != device: - SIMPLEXITY_LOGGER.warning( - "Device tag set to '%s', but device is '%s'. Overriding device tag.", device_tag, device - ) - cfg.device = device - - if cfg.get("seed") is None: + + seed_tag = cfg.get("seed") + if seed_tag is None: + cfg.seed = seed if seed is not None else 42 + elif seed is not None and seed_tag != seed: + SIMPLEXITY_LOGGER.warning("Seed tag set to '%s', but seed is '%s'. Overriding seed tag.", seed_tag, seed) cfg.seed = seed - else: - seed_tag: int = cfg.get("seed") - if seed_tag != seed: - SIMPLEXITY_LOGGER.warning("Seed tag set to '%s', but seed is '%s'. Overriding seed tag.", seed_tag, seed) - cfg.seed = seed if cfg.get("tags") is None: cfg.tags = DictConfig({"strict": str(strict).lower()}) diff --git a/tests/structured_configs/test_base_config.py b/tests/structured_configs/test_base_config.py index 648c209f..87b7b135 100644 --- a/tests/structured_configs/test_base_config.py +++ b/tests/structured_configs/test_base_config.py @@ -22,8 +22,8 @@ from simplexity.structured_configs.base import resolve_base_config, validate_base_config -class TestBaseConfig: - """Test BaseConfig.""" +class TestValidateBaseConfig: + """Test validate_base_config.""" def test_validate_base_config_valid(self) -> None: """Test validate_base_config with valid configs.""" @@ -113,38 +113,56 @@ def test_validate_base_config_propagates_mlflow_errors(self) -> None: with pytest.raises(ConfigValidationError, match="MLFlowConfig.experiment_name must be a non-empty string"): validate_base_config(cfg) - def test_resolve_base_config(self) -> None: + +class TestResolveBaseConfig: + """Test resolve_base_config.""" + + def test_empty_config_with_explicit_param_values(self) -> None: """Test resolve_base_config with valid configs.""" cfg = DictConfig({}) - resolve_base_config(cfg, strict=True, seed=34, device="gpu") + resolve_base_config(cfg, strict=True, seed=0, device="gpu") assert cfg.device == "gpu" - assert cfg.seed == 34 + assert cfg.seed == 0 assert cfg.tags.strict == "true" - # default seed + def test_empty_config_with_default_param_values(self) -> None: + """Test resolve_base_config with default values.""" cfg = DictConfig({}) resolve_base_config(cfg, strict=False) assert cfg.device == "auto" assert cfg.seed == 42 assert cfg.tags.strict == "false" - def test_resolve_base_config_with_existing_values(self) -> None: - """Test resolve_base_config overrides mismatched seed and strict values.""" + def test_config_with_matching_param_values(self) -> None: + """Test resolve_base_config preserves matching seed, strict and device values.""" # matching values - cfg = DictConfig({"seed": 34, "tags": DictConfig({"strict": "true"})}) - resolve_base_config(cfg, strict=True, seed=34) - assert cfg.seed == 34 + cfg = DictConfig({"device": "gpu", "seed": 0, "tags": DictConfig({"strict": "true"})}) + resolve_base_config(cfg, strict=True, seed=0, device="gpu") + assert cfg.device == "gpu" + assert cfg.seed == 0 assert cfg.tags.strict == "true" + def test_config_with_non_matching_param_values(self) -> None: + """Test resolve_base_config overrides mismatched device, seed, and strict values.""" # non-matching values - cfg = DictConfig({"seed": 34, "tags": DictConfig({"strict": "true"})}) + cfg = DictConfig({"device": "gpu", "seed": 34, "tags": DictConfig({"strict": "true"})}) with patch("simplexity.structured_configs.base.SIMPLEXITY_LOGGER.warning") as mock_warning: - resolve_base_config(cfg, strict=False, seed=56) + resolve_base_config(cfg, strict=False, seed=0, device="cpu") mock_warning.assert_has_calls( [ - call("Seed tag set to '%s', but seed is '%s'. Overriding seed tag.", 34, 56), + call("Device tag set to '%s', but device is '%s'. Overriding device tag.", "gpu", "cpu"), + call("Seed tag set to '%s', but seed is '%s'. Overriding seed tag.", 34, 0), call("Strict tag set to '%s', but strict mode is '%s'. Overriding strict tag.", "true", "false"), ] ) - assert cfg.seed == 56 + assert cfg.device == "cpu" + assert cfg.seed == 0 assert cfg.tags.strict == "false" + + def test_config_with_no_param_values(self) -> None: + """Test resolve_base_config preserves existing config values when not explicitly overridden.""" + cfg = DictConfig({"device": "gpu", "seed": 34}) + resolve_base_config(cfg, strict=False) + assert cfg.device == "gpu" + assert cfg.seed == 34 + assert cfg.tags.strict == "false" From 21efdbb7330b41147fbafc19b6d7ae0c2335cec6 Mon Sep 17 00:00:00 2001 From: ealt Date: Thu, 11 Dec 2025 10:16:42 -0800 Subject: [PATCH 03/35] Update github workflows for dev branch (#133) --- .github/workflows/claude-code-review.yml | 14 +++++++ .github/workflows/copilot-review.yml | 40 +++++++++++++------ .github/workflows/e2e-tests.yaml | 2 +- .github/workflows/simplexity.yaml | 22 +++++++--- .../transition_matrices.py | 13 ++++++ .../test_transition_matrices.py | 33 +++++++++++++++ 6 files changed, 104 insertions(+), 20 deletions(-) diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml index f95a6d6d..7fd0ed0b 100644 --- a/.github/workflows/claude-code-review.yml +++ b/.github/workflows/claude-code-review.yml @@ -4,6 +4,7 @@ name: Claude Code Review on: + workflow_dispatch: workflow_run: workflows: ["simplexity"] types: @@ -11,6 +12,8 @@ on: pull_request: types: - ready_for_review + branches: + - main jobs: claude-review: @@ -23,6 +26,17 @@ jobs: checks: read actions: read + # Only run if: + # 1. Manually triggered (workflow_dispatch) + # 2. PR event targeting main + # 3. Workflow_run event where head_branch is main (push to main) + # 4. Workflow_run event where associated PR targets main + if: > + github.event_name == 'workflow_dispatch' || + (github.event_name == 'pull_request' && github.event.pull_request.base.ref == 'main') || + (github.event_name == 'workflow_run' && github.event.workflow_run.head_branch == 'main') || + (github.event_name == 'workflow_run' && github.event.workflow_run.event == 'pull_request' && github.base_ref == 'main') + steps: - name: Checkout repository uses: actions/checkout@v4 diff --git a/.github/workflows/copilot-review.yml b/.github/workflows/copilot-review.yml index 497196c3..7c16d15c 100644 --- a/.github/workflows/copilot-review.yml +++ b/.github/workflows/copilot-review.yml @@ -1,7 +1,7 @@ # GitHub Copilot Code Review # # This workflow requests a review from GitHub Copilot only after all checks pass. -# +# # IMPORTANT: To use this workflow, you must DISABLE the automatic Copilot review # in your repository ruleset (Settings > Rules > Rulesets). Otherwise, Copilot # will be added as a reviewer immediately when the PR is created, defeating the @@ -16,6 +16,7 @@ name: Copilot Code Review on: + workflow_dispatch: workflow_run: workflows: ["simplexity"] types: @@ -23,6 +24,8 @@ on: pull_request: types: - ready_for_review + branches: + - main jobs: copilot-review: @@ -33,7 +36,18 @@ jobs: issues: read checks: read actions: read - + + # Only run if: + # 1. Manually triggered (workflow_dispatch) + # 2. PR event targeting main + # 3. Workflow_run event where head_branch is main (push to main) + # 4. Workflow_run event where associated PR targets main + if: > + github.event_name == 'workflow_dispatch' || + (github.event_name == 'pull_request' && github.event.pull_request.base.ref == 'main') || + (github.event_name == 'workflow_run' && github.event.workflow_run.head_branch == 'main') || + (github.event_name == 'workflow_run' && github.event.workflow_run.event == 'pull_request' && github.base_ref == 'main') + steps: - name: Checkout repository uses: actions/checkout@v4 @@ -47,7 +61,7 @@ jobs: with: script: | let pr; - + if (context.eventName === 'workflow_run') { // Triggered by workflow_run - check if workflow succeeded const workflowRun = context.payload.workflow_run; @@ -99,14 +113,14 @@ jobs: return; } } - + // Skip if PR is a draft if (pr.draft === true) { core.info(`PR #${pr.number} is a draft - skipping Copilot review`); core.setFailed('PR is a draft'); return; } - + core.info(`Processing PR #${pr.number} - all checks have passed`); - name: Check and request Copilot review @@ -115,7 +129,7 @@ jobs: with: script: | let pr; - + if (context.eventName === 'workflow_run') { const workflowRun = context.payload.workflow_run; const prs = await github.rest.pulls.list({ @@ -128,35 +142,35 @@ jobs: } else { pr = context.payload.pull_request; } - + const prNumber = pr.number; - + // Get current reviewers to check if Copilot is already a reviewer const reviews = await github.rest.pulls.listRequestedReviewers({ owner: context.repo.owner, repo: context.repo.repo, pull_number: prNumber, }); - + // Check if Copilot is already a requested reviewer const existingReviewers = [ ...(reviews.data.users || []).map(u => u.login.toLowerCase()), ...(reviews.data.teams || []).map(t => t.slug.toLowerCase()), ]; - + const copilotIdentifiers = ['github-copilot', 'copilot', 'github-copilot[bot]']; const hasCopilot = existingReviewers.some(r => copilotIdentifiers.some(id => r.includes(id)) ); - + if (hasCopilot) { core.info('GitHub Copilot is already a requested reviewer for this PR - skipping'); return; } - + // Try to request Copilot as a reviewer core.info(`Requesting Copilot review for PR #${prNumber}...`); - + try { // Note: The GitHub API may not directly support requesting Copilot as a reviewer // since it's an app, not a user. This is a best-effort attempt. diff --git a/.github/workflows/e2e-tests.yaml b/.github/workflows/e2e-tests.yaml index 794b658b..16ea4a42 100644 --- a/.github/workflows/e2e-tests.yaml +++ b/.github/workflows/e2e-tests.yaml @@ -4,6 +4,7 @@ on: push: branches: - main + - dev pull_request: jobs: @@ -24,4 +25,3 @@ jobs: tests/end_to_end/ \ --capture=no \ --verbose - diff --git a/.github/workflows/simplexity.yaml b/.github/workflows/simplexity.yaml index f1013c1d..f42c784b 100644 --- a/.github/workflows/simplexity.yaml +++ b/.github/workflows/simplexity.yaml @@ -2,6 +2,7 @@ on: push: branches: - main + - dev pull_request: jobs: @@ -57,7 +58,7 @@ jobs: TESTMON_FLAG="--testmon-noselect" echo "::notice::Running full test suite (main branch mode)" fi - + # Run tests with coverage tracking (no threshold enforcement) # Coverage is tracked but won't fail - new code in PRs is checked via diff-cover # TODO: Enable strict overall coverage enforcement once coverage improves @@ -69,7 +70,7 @@ jobs: --cov-fail-under=0 \ --ignore=tests/end_to_end \ $TESTMON_FLAG - + # Verify coverage.xml was generated (required for diff-cover and Codecov) # This is ensured by --cov-report=xml in pyproject.toml, but we verify for clarity if [ ! -f coverage.xml ]; then @@ -83,16 +84,25 @@ jobs: # Get the base branch (usually main) BASE_BRANCH="${{ github.event.pull_request.base.ref }}" echo "Checking coverage for new code against base branch: ${BASE_BRANCH}" + + # Determine threshold based on branch + if [ "$BASE_BRANCH" = "main" ]; then + FAIL_UNDER=80 + echo "::notice::Enforcing 80% coverage threshold for main branch" + else + FAIL_UNDER=0 + echo "::notice::Skipping coverage enforcement for non-main branch (monitoring only)" + fi + # Run diff-cover to check only new/changed code - # Fail if new code coverage is below 80% # Note: diff-cover only needs --extra dev (tests need all extras for integration testing) if uv run --extra dev diff-cover coverage.xml \ --compare-branch=origin/${BASE_BRANCH} \ - --fail-under=80 \ + --fail-under=$FAIL_UNDER \ --markdown-report diff-coverage-report.md; then - echo "::notice::New code coverage check passed (80% threshold)" + echo "::notice::New code coverage check passed" else - echo "::error::New code coverage is below 80% threshold" + echo "::error::New code coverage is below $FAIL_UNDER% threshold" exit 1 fi continue-on-error: false diff --git a/simplexity/generative_processes/transition_matrices.py b/simplexity/generative_processes/transition_matrices.py index f66b511c..d71aa2ed 100644 --- a/simplexity/generative_processes/transition_matrices.py +++ b/simplexity/generative_processes/transition_matrices.py @@ -95,6 +95,18 @@ def fanizza(alpha: float, lamb: float) -> jax.Array: return jnp.stack([da, db], axis=0) +def leaky_rrxor(p1: float, p2: float, epsilon: float) -> jax.Array: + """Creates a transition matrix for the leaky RRXOR Process.""" + assert 0 <= epsilon <= 1 + + transition_matrices_base = rrxor(p1, p2) + leak = jnp.ones((2, 5, 5)) + + transition_matrices = (1 - epsilon) * transition_matrices_base + (epsilon / 10) * leak + + return transition_matrices + + def matching_parens(open_probs: list[float]) -> jax.Array: """Creates a model for generating Matching Parentheses.""" if len(open_probs) < 1: @@ -357,6 +369,7 @@ def zero_one_random(p: float) -> jax.Array: "coin": coin, "days_of_week": days_of_week, "even_ones": even_ones, + "leaky_rrxor": leaky_rrxor, "matching_parens": matching_parens, "mess3": mess3, "mr_name": mr_name, diff --git a/tests/generative_processes/test_transition_matrices.py b/tests/generative_processes/test_transition_matrices.py index e319834e..60a0d2b0 100644 --- a/tests/generative_processes/test_transition_matrices.py +++ b/tests/generative_processes/test_transition_matrices.py @@ -10,6 +10,7 @@ even_ones, fanizza, get_stationary_state, + leaky_rrxor, matching_parens, mess3, mr_name, @@ -113,6 +114,38 @@ def test_fanizza(): assert jnp.allclose(jnp.sum(transition_matrices @ tau, axis=0), tau), "Stochasticity condition not met" +def test_leaky_rrxor(): + """Test the leaky rrxor transition matrices.""" + vocab_size = 2 + num_states = 5 + p1 = 0.5 + p2 = 0.5 + epsilon = 0.1 + transition_matrices = leaky_rrxor(p1=p1, p2=p2, epsilon=epsilon) + assert transition_matrices.shape == (vocab_size, num_states, num_states) + validate_hmm_transition_matrices(transition_matrices) + + base_matrices = rrxor(p1, p2) + diff = jnp.abs(transition_matrices - base_matrices) + leak_value = 1 / (vocab_size * num_states) + min_diff_expected = epsilon * jnp.min(jnp.abs(base_matrices - leak_value)) + max_diff_expected = epsilon * jnp.max(jnp.abs(base_matrices - leak_value)) + assert jnp.allclose(jnp.min(diff), min_diff_expected, rtol=1e-5), ( + f"Minimum difference should be approximately {min_diff_expected}" + ) + assert jnp.allclose(jnp.max(diff), max_diff_expected, rtol=1e-5), ( + f"Maximum difference should be approximately {max_diff_expected}" + ) + + +def test_leaky_rrxor_zero_epsilon(): + """Test that leaky rrxor with epsilon=0 equals regular rrxor.""" + p1, p2 = 0.5, 0.5 + leaky_matrices = leaky_rrxor(p1=p1, p2=p2, epsilon=0.0) + base_matrices = rrxor(p1, p2) + chex.assert_trees_all_close(leaky_matrices, base_matrices) + + def test_matching_parens(): """Test the matching parens transition matrices.""" transition_matrices = matching_parens(open_probs=[1.0, 0.5, 0.5]) From 9514ce5ad32af7ea1b01df82f881beae2e89fca0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Casper=20L=C3=BCtzh=C3=B8ft=20Christensen?= <61698286+casperlchristensen@users.noreply.github.com> Date: Mon, 15 Dec 2025 16:23:37 -0800 Subject: [PATCH 04/35] Activation visualizations -> dev (#132) * Enhance PyTorch training with metric tracking and update configuration - Introduced `TrainingMetricTracker` for stateful metric tracking during PyTorch training, allowing for detailed monitoring of loss, learning rates, and parameter updates. - Updated `train_pytorch_model` to integrate the metric tracker, enabling automatic logging of training metrics. - Added new metrics to track cumulative and instantaneous values, including loss averages and parameter norms. - Modified `pyproject.toml` to include `reportUnnecessaryEllipsis` setting and added `diff-cover` as a development dependency. - Expanded the README with documentation on the new `TrainingMetricTracker` and its usage. - Added tests for the metric tracker to ensure accurate reporting of metrics during training. * pylint (#98) * add compatibility for factored states * concrete examples and alternating process * tweaks to vocab sizes * Create design doc * Implement solution * Add plotly support * Disable too many instance attributes in configs * Replace dict with {} * Import altair in a normal way * Remove init * Reorganize altair dependency in pyproject.toml * Fix demo imports * Refactor metric tracker * Update metrics * Add current loss metric enhancements - Introduced additional metrics for tracking loss: minimum loss, moving average (MA), and exponential moving average (EMA). - Updated the `compute` method to return these new metrics alongside the current loss. - Enhanced the distance from initialization metric to track the maximum distance encountered during training. * Fix bugs with metrics tracker * Fix loss metrics * update naming * Rename metric tracker * Refactor MetricTracker and metrics initialization - Removed initial_loss and optimal_loss parameters from MetricTracker constructor. - Introduced metric_kwargs to pass additional parameters for metrics initialization. - Updated the _initialize_context and _initialize_metrics methods to accommodate changes. - Enhanced CurrentLossMetric and LossProgressMetric to use kwargs for initialization, improving flexibility. * Refactor MetricTracker and MetricContext to unify named parameters handling - Renamed and consolidated handling of named parameters in MetricTracker and MetricContext. - Updated methods to use a single `named_parameters` attribute instead of separate current and previous parameters. - Adjusted metrics computations to reflect the new structure, ensuring consistency across metrics that rely on named parameters. * Refactor MetricTracker and MetricContext to use unified token count - Renamed `batch_tokens` and `total_tokens` to `num_tokens` in MetricContext and MetricTracker. - Updated metrics calculations in TokensMetric, LearningRateWeightedTokensMetric, and GradientWeightedTokensMetric to reflect the new naming convention. - Enhanced cumulative token tracking for improved clarity and consistency. * Refactor metrics to use update method and improve computation - Updated the `compute` method in various metrics to remove context dependency and introduced an `update` method for state management. - Enhanced metrics such as TokensMetric, LearningRateMetric, and GradientWeightedTokensMetric to maintain internal state for more efficient calculations. - Added new utility functions for L2 norm calculations across collections of tensors, improving performance and clarity in metric computations. * Refactor LossProgressMetric to separate update and compute methods - Introduced an `update` method to manage the current loss state, enhancing clarity and separation of concerns. - Updated the `compute` method to calculate progress based on the current loss, improving the metric's functionality. * Update TokensMetric to rename token metrics for clarity - Changed metric keys from "tokens/batch" and "tokens/total" to "tokens/raw" and "tokens/raw/cumulative" to better reflect their purpose and improve consistency in naming conventions. * Clear gradients and learning rates after metric computation in GradientWeightedTokensMetric and FisherInformationMetric for improved state management. * Refactor MetricTracker to enhance metric group handling and requirements management - Updated MetricTracker to initialize metric groups and requirement flags more efficiently. - Modified the update method to support group-specific requirements for learning rates, gradients, and named parameters. - Simplified the initialization of metrics by consolidating logic and improving clarity in the code structure. - Added `update_every_step` attribute to several metrics for better state management during updates. * Add logging for missing update keys in MetricTracker - Introduced logging to warn when required update keys are missing for metric groups. - Enhanced metric group handling by adding a method to identify missing update keys based on the `update_every_step` attribute. - Improved clarity in the metric initialization process by consolidating logic for required metrics. * Refactor L2 norm computation in metrics.py - Simplified the docstring for the _tensor_collection_l2_norms function to focus on its core functionality. - Removed unnecessary casting to CPU in the _named_tensor_distance function to streamline tensor operations. * Refactor metric computations to utilize new utility functions - Replaced internal L2 norm and distance calculations in metrics.py with calls to the newly defined tensor_collection_l2_norm and named_tensor_distance functions from pytorch_utils.py. - Updated docstrings for clarity and removed redundant comments to streamline the codebase. * Refactor MetricTracker and metrics protocol for improved clarity - Renamed the TrainingMetric protocol to Metric for better alignment with its purpose. - Updated the MetricTracker's _initialize_metrics method to utilize the new Metric protocol, enhancing type consistency and clarity in metric initialization. * Refactor metrics to utilize tensor_stack_l2_norm for improved efficiency - Replaced instances of tensor_collection_l2_norm with tensor_stack_l2_norm in various metrics for optimized L2 norm calculations. - Simplified the update and compute methods in GradientWeightedTokensMetric, CumulativeParameterUpdateMetric, and FisherInformationMetric to enhance state management and clarity. - Removed redundant internal functions for L2 norm and distance calculations, streamlining the codebase. * Remove metric tracker * add activation analysis work * reformat tests * move protocol and inherit * add example * less jax conversions * jax first * claude feedback * better types * fix tests * fix initialisation from config * use protocol only for duck-typing * error handling * simplified docstrings, unused variables * pyright protocol * analyses tweaks * typing * refactor: Split `MetricTracker.update` into `step` and `update_metrics`, and optimize tensor operations in `named_tensor_distance`, gradient extraction, and parameter snapshots by removing CPU transfers and vectorizing calculations. * Add configs and metric tracker in run management * update pr * fix uv * pin for transformer_lens compatibility * add layerwise analysis classes * use correct return class * dataclass access notation * fix lock * ruff format * pylint * remove unused arg * fix tests after refactor * linter happiness * separate responsibilities of generative processes * revert * add activation tracker test * add activation tracker test * final feedback * proper instantiate * no aliasing * remove unneeded sklearn * simplify last token * fix tests after refactor * remove unusd dict and handle div by 0 * add tests to analysis functions * better coverage * make pyright happy * add config coverage * pull out normalization functions * be more explicit about missing data/features * PR feedback * missing docstrings * make methods public and document * prepare options * unnecessary conversion * missing docstring * formatting * use explicit typehints * use prepare options in tests * change tests to JNP * unused import * update test coverage * wip activation visualization * merge * add lock * run with scalars * temporary commit for merge * static analysis checks * update after facet-plots * mute final pylint warnings * fix final static analyses * get rid of type alias * small e2e test only * use activation tracker in e2e * fix yaml structure * remove unused config * fix end to end tests * add more coverage * add more tests * refactor to more modularity * training config * add schedulers, proper bos handling in loop * remove unnecessary comment * add schedulers to e2e configs * handle bos-token behaviour in tests * get rid of large docs file * add LR scheduler tests * add LR schedulers for exact recreations of Adam's plots * fix pyright * only little test * fix colour test * make altair optional * make altair obligatory * simplify conversions * consolidations * Delete tests/end_to_end/configs/demo_config_with_visuals.yaml * Delete tests/end_to_end/configs/demo_config_with_visuals.py * consolidation (again) * address PR feedback * better typing --------- Co-authored-by: Eric Alt Co-authored-by: ealt Co-authored-by: Casper Lutzhoft Christensen --- .github/workflows/simplexity.yaml | 2 +- pyproject.toml | 1 + simplexity/activations/activation_analyses.py | 11 + simplexity/activations/activation_tracker.py | 155 +- .../activations/activation_visualizations.py | 282 ++++ .../activations/visualization/__init__.py | 49 + .../visualization/data_structures.py | 66 + .../visualization/dataframe_builders.py | 467 ++++++ .../visualization/field_resolution.py | 158 ++ .../visualization/pattern_expansion.py | 600 +++++++ .../visualization/pattern_utils.py | 139 ++ .../visualization/preprocessing.py | 223 +++ .../activations/visualization_configs.py | 368 ++++ .../activations/visualization_persistence.py | 93 ++ simplexity/analysis/layerwise_analysis.py | 7 + simplexity/analysis/linear_regression.py | 56 +- simplexity/generative_processes/generator.py | 27 +- .../generative_processes/torch_generator.py | 6 +- simplexity/optimization/lr_schedulers.py | 110 ++ simplexity/run_management/components.py | 5 + simplexity/run_management/run_management.py | 49 +- .../learning_rate_scheduler.py | 138 ++ simplexity/structured_configs/optimizer.py | 4 +- simplexity/utils/analysis_utils.py | 21 + simplexity/utils/factoring_utils.py | 8 +- simplexity/visualization/altair_renderer.py | 380 +++++ simplexity/visualization/data_pipeline.py | 194 +++ simplexity/visualization/data_registry.py | 39 + simplexity/visualization/history.py | 105 ++ simplexity/visualization/plotly_renderer.py | 1476 +++++++++++++++++ .../visualization/structured_configs.py | 238 +++ tests/activations/test_activation_analysis.py | 399 ++--- tests/activations/test_activation_tracker.py | 364 ++++ .../test_activation_visualizations.py | 298 ++++ .../activations/test_dataframe_integration.py | 349 ++++ tests/activations/test_field_expansion.py | 984 +++++++++++ tests/activations/test_scalar_history.py | 405 +++++ .../test_scalar_wildcard_expansion.py | 182 ++ .../activations/test_visualization_modules.py | 974 +++++++++++ .../test_visualization_persistence.py | 89 + tests/analysis/test_layerwise_analysis.py | 30 +- tests/analysis/test_linear_regression.py | 65 + .../configs/activation_tracker/default.yaml | 2 +- .../rmse_over_time_example.yaml | 37 + .../with_factor_visuals.yaml | 162 ++ .../activation_tracker/with_visuals.yaml | 230 +++ .../configs/generative_process/rrxor.yaml | 13 + .../unified_chain_example.yaml | 2 +- .../unified_independent_example.yaml | 21 +- .../unified_independent_example_crazy.yaml | 68 + .../lr_scheduler/reduce_lr_on_plateau.yaml | 10 + .../windowed_reduce_lr_on_plateau.yaml | 12 + .../configs/metric_tracker/basic.yaml | 14 + .../optimizer/pytorch_adam_factored.yaml | 4 + .../predictive_model/tiny_transformer.yaml | 4 +- .../configs/predictive_model/transformer.yaml | 12 +- .../configs/test_metric_tracker.yaml | 15 + tests/end_to_end/configs/training.yaml | 3 + tests/end_to_end/configs/training/full.yaml | 10 +- .../end_to_end/configs/training/minimal.yaml | 8 +- .../end_to_end/configs/training_factored.yaml | 24 + tests/end_to_end/configs/training_test.yaml | 2 + .../configs/visualization/3d_scatter.yaml | 5 + .../configs/visualization/plot/scatter3d.yaml | 26 + .../test_metric_tracker_integration.py | 89 + tests/end_to_end/training.py | 97 +- tests/end_to_end/visualization_3d_demo.py | 201 +++ tests/end_to_end/visualization_demo.py | 105 ++ tests/generative_processes/test_generator.py | 41 + .../test_torch_generator.py | 35 + tests/optimization/test_lr_schedulers.py | 224 +++ .../test_activation_tracker_config.py | 51 +- .../test_learning_rate_scheduler.py | 329 ++++ tests/utils/test_analysis_utils.py | 1 - tests/visualization/test_altair_renderer.py | 330 ++++ tests/visualization/test_data_pipeline.py | 317 ++++ tests/visualization/test_history.py | 155 ++ tests/visualization/test_plotly_renderer.py | 411 +++++ tests/visualization/test_renderer_controls.py | 234 +++ uv.lock | 140 ++ 80 files changed, 12651 insertions(+), 409 deletions(-) create mode 100644 simplexity/activations/activation_visualizations.py create mode 100644 simplexity/activations/visualization/__init__.py create mode 100644 simplexity/activations/visualization/data_structures.py create mode 100644 simplexity/activations/visualization/dataframe_builders.py create mode 100644 simplexity/activations/visualization/field_resolution.py create mode 100644 simplexity/activations/visualization/pattern_expansion.py create mode 100644 simplexity/activations/visualization/pattern_utils.py create mode 100644 simplexity/activations/visualization/preprocessing.py create mode 100644 simplexity/activations/visualization_configs.py create mode 100644 simplexity/activations/visualization_persistence.py create mode 100644 simplexity/optimization/lr_schedulers.py create mode 100644 simplexity/structured_configs/learning_rate_scheduler.py create mode 100644 simplexity/visualization/altair_renderer.py create mode 100644 simplexity/visualization/data_pipeline.py create mode 100644 simplexity/visualization/data_registry.py create mode 100644 simplexity/visualization/history.py create mode 100644 simplexity/visualization/plotly_renderer.py create mode 100644 simplexity/visualization/structured_configs.py create mode 100644 tests/activations/test_activation_tracker.py create mode 100644 tests/activations/test_activation_visualizations.py create mode 100644 tests/activations/test_dataframe_integration.py create mode 100644 tests/activations/test_field_expansion.py create mode 100644 tests/activations/test_scalar_history.py create mode 100644 tests/activations/test_scalar_wildcard_expansion.py create mode 100644 tests/activations/test_visualization_modules.py create mode 100644 tests/activations/test_visualization_persistence.py create mode 100644 tests/end_to_end/configs/activation_tracker/rmse_over_time_example.yaml create mode 100644 tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml create mode 100644 tests/end_to_end/configs/activation_tracker/with_visuals.yaml create mode 100644 tests/end_to_end/configs/generative_process/rrxor.yaml create mode 100644 tests/end_to_end/configs/generative_process/unified_independent_example_crazy.yaml create mode 100644 tests/end_to_end/configs/lr_scheduler/reduce_lr_on_plateau.yaml create mode 100644 tests/end_to_end/configs/lr_scheduler/windowed_reduce_lr_on_plateau.yaml create mode 100644 tests/end_to_end/configs/metric_tracker/basic.yaml create mode 100644 tests/end_to_end/configs/optimizer/pytorch_adam_factored.yaml create mode 100644 tests/end_to_end/configs/test_metric_tracker.yaml create mode 100644 tests/end_to_end/configs/training_factored.yaml create mode 100644 tests/end_to_end/configs/visualization/3d_scatter.yaml create mode 100644 tests/end_to_end/configs/visualization/plot/scatter3d.yaml create mode 100644 tests/end_to_end/test_metric_tracker_integration.py create mode 100644 tests/end_to_end/visualization_3d_demo.py create mode 100644 tests/end_to_end/visualization_demo.py create mode 100644 tests/optimization/test_lr_schedulers.py create mode 100644 tests/structured_configs/test_learning_rate_scheduler.py create mode 100644 tests/visualization/test_altair_renderer.py create mode 100644 tests/visualization/test_data_pipeline.py create mode 100644 tests/visualization/test_history.py create mode 100644 tests/visualization/test_plotly_renderer.py create mode 100644 tests/visualization/test_renderer_controls.py diff --git a/.github/workflows/simplexity.yaml b/.github/workflows/simplexity.yaml index f42c784b..93762293 100644 --- a/.github/workflows/simplexity.yaml +++ b/.github/workflows/simplexity.yaml @@ -129,4 +129,4 @@ jobs: slug: Astera-org/simplexity verbose: true files: ./coverage.xml - fail_ci_if_error: false + fail_ci_if_error: false \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 683194fb..c52345df 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ dependencies = [ "transformer-lens>=2.15.4", "treescope", "pydantic>=2.12.0", + "altair>=5.3.0", ] [project.optional-dependencies] diff --git a/simplexity/activations/activation_analyses.py b/simplexity/activations/activation_analyses.py index 37de3e7b..51fdb7d9 100644 --- a/simplexity/activations/activation_analyses.py +++ b/simplexity/activations/activation_analyses.py @@ -26,6 +26,11 @@ def use_probs_as_weights(self) -> bool: """Whether to use probabilities as weights for analysis.""" ... + @property + def skip_first_token(self) -> bool: + """Whether to skip the first token (useful for off-manifold initial states).""" + ... + @property def requires_belief_states(self) -> bool: """Whether the analysis needs belief state targets.""" @@ -52,6 +57,7 @@ def __init__( last_token_only: bool = False, concat_layers: bool = False, use_probs_as_weights: bool = True, + skip_first_token: bool = False, ) -> None: analysis_kwargs: dict[str, Any] = { "n_components": n_components, @@ -62,6 +68,7 @@ def __init__( last_token_only=last_token_only, concat_layers=concat_layers, use_probs_as_weights=use_probs_as_weights, + skip_first_token=skip_first_token, analysis_kwargs=analysis_kwargs, ) @@ -75,6 +82,7 @@ def __init__( last_token_only: bool = False, concat_layers: bool = False, use_probs_as_weights: bool = True, + skip_first_token: bool = False, fit_intercept: bool = True, to_factors: bool = False, ) -> None: @@ -83,6 +91,7 @@ def __init__( last_token_only=last_token_only, concat_layers=concat_layers, use_probs_as_weights=use_probs_as_weights, + skip_first_token=skip_first_token, analysis_kwargs={"fit_intercept": fit_intercept, "to_factors": to_factors}, ) @@ -96,6 +105,7 @@ def __init__( last_token_only: bool = False, concat_layers: bool = False, use_probs_as_weights: bool = True, + skip_first_token: bool = False, rcond_values: Sequence[float] | None = None, fit_intercept: bool = True, to_factors: bool = False, @@ -108,5 +118,6 @@ def __init__( last_token_only=last_token_only, concat_layers=concat_layers, use_probs_as_weights=use_probs_as_weights, + skip_first_token=skip_first_token, analysis_kwargs=analysis_kwargs, ) diff --git a/simplexity/activations/activation_tracker.py b/simplexity/activations/activation_tracker.py index 0908e4f9..63e274eb 100644 --- a/simplexity/activations/activation_tracker.py +++ b/simplexity/activations/activation_tracker.py @@ -2,15 +2,34 @@ from collections.abc import Mapping from dataclasses import dataclass +from pathlib import Path from typing import Any, NamedTuple import jax import jax.numpy as jnp import numpy as np +import pandas as pd import torch from jax.typing import DTypeLike +from omegaconf import DictConfig from simplexity.activations.activation_analyses import ActivationAnalysis +from simplexity.activations.activation_visualizations import ( + ActivationVisualizationPayload, + PreparedMetadata, + build_visualization_payloads, +) +from simplexity.activations.visualization.pattern_utils import ( + build_wildcard_regex, + has_pattern, + parse_range, + substitute_range, +) +from simplexity.activations.visualization_configs import ( + ActivationVisualizationConfig, + build_activation_visualization_config, +) +from simplexity.activations.visualization_persistence import save_visualization_payloads from simplexity.utils.analysis_utils import build_deduplicated_dataset from simplexity.utils.pytorch_utils import torch_to_jax @@ -22,6 +41,7 @@ class PreparedActivations: activations: Mapping[str, jax.Array] belief_states: jax.Array | tuple[jax.Array, ...] | None weights: jax.Array + metadata: PreparedMetadata class PrepareOptions(NamedTuple): @@ -30,6 +50,7 @@ class PrepareOptions(NamedTuple): last_token_only: bool concat_layers: bool use_probs_as_weights: bool + skip_first_token: bool = False def _get_uniform_weights(n_samples: int, dtype: DTypeLike) -> jax.Array: @@ -77,6 +98,7 @@ def prepare_activations( probs=probs, activations_by_layer=activations, select_last_token=prepare_options.last_token_only, + skip_first_token=prepare_options.skip_first_token, ) layer_acts = dataset.activations_by_layer @@ -91,19 +113,38 @@ def prepare_activations( concatenated = jnp.concatenate(list(layer_acts.values()), axis=-1) layer_acts = {"concatenated": concatenated} + metadata = PreparedMetadata( + sequences=dataset.sequences, + steps=np.asarray([len(sequence) for sequence in dataset.sequences], dtype=np.int32), + select_last_token=prepare_options.last_token_only, + ) + return PreparedActivations( activations=layer_acts, belief_states=belief_states, weights=weights, + metadata=metadata, ) class ActivationTracker: """Orchestrates multiple activation analyses with efficient preprocessing.""" - def __init__(self, analyses: Mapping[str, ActivationAnalysis]): + def __init__( + self, + analyses: Mapping[str, ActivationAnalysis], + *, + visualizations: Mapping[str, list[DictConfig | Mapping[str, Any]]] | None = None, + default_backend: str = "altair", + ): """Initialize the tracker with named analyses.""" self._analyses = analyses + self._default_backend = default_backend + self._visualization_specs: dict[str, list[ActivationVisualizationConfig]] = {} + self._scalar_history: dict[str, list[tuple[int, float]]] = {} + if visualizations: + for name, cfgs in visualizations.items(): + self._visualization_specs[name] = [build_activation_visualization_config(cfg) for cfg in cfgs] def analyze( self, @@ -116,7 +157,8 @@ def analyze( | tuple[np.ndarray, ...], probs: jax.Array | torch.Tensor | np.ndarray, activations: Mapping[str, jax.Array | torch.Tensor | np.ndarray], - ) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: + step: int | None = None, + ) -> tuple[Mapping[str, float], Mapping[str, jax.Array], Mapping[str, ActivationVisualizationPayload]]: """Run all analyses and return namespaced results.""" preprocessing_cache: dict[PrepareOptions, PreparedActivations] = {} @@ -125,6 +167,7 @@ def analyze( analysis.last_token_only, analysis.concat_layers, analysis.use_probs_as_weights, + analysis.skip_first_token, ) config_key = prepare_options @@ -140,12 +183,14 @@ def analyze( all_scalars = {} all_projections = {} + all_visualizations: dict[str, ActivationVisualizationPayload] = {} for analysis_name, analysis in self._analyses.items(): prepare_options = PrepareOptions( analysis.last_token_only, analysis.concat_layers, analysis.use_probs_as_weights, + analysis.skip_first_token, ) prepared = preprocessing_cache[prepare_options] @@ -163,7 +208,109 @@ def analyze( weights=prepared_weights, belief_states=prepared_beliefs, ) - all_scalars.update({f"{analysis_name}/{key}": value for key, value in scalars.items()}) + + namespaced_scalars = {f"{analysis_name}/{key}": value for key, value in scalars.items()} + all_scalars.update(namespaced_scalars) all_projections.update({f"{analysis_name}/{key}": value for key, value in projections.items()}) - return all_scalars, all_projections + if step is not None: + for scalar_key, scalar_value in namespaced_scalars.items(): + if scalar_key not in self._scalar_history: + self._scalar_history[scalar_key] = [] + self._scalar_history[scalar_key].append((step, float(scalar_value))) + + viz_configs = self._visualization_specs.get(analysis_name) + if viz_configs: + np_weights = np.asarray(prepared_weights) + # Handle tuple belief states (factored processes) by stacking to (samples, factors, states) + if prepared_beliefs is None: + np_beliefs = None + elif isinstance(prepared_beliefs, tuple): + # Stack tuple of (samples, states) arrays into (samples, factors, states) + np_beliefs = np.stack([np.asarray(b) for b in prepared_beliefs], axis=1) + else: + np_beliefs = np.asarray(prepared_beliefs) + np_projections = {key: np.asarray(value) for key, value in projections.items()} + payloads = build_visualization_payloads( + analysis_name, + viz_configs, + default_backend=self._default_backend, + prepared_metadata=prepared.metadata, + weights=np_weights, + belief_states=np_beliefs, + projections=np_projections, + scalars={f"{analysis_name}/{key}": float(value) for key, value in scalars.items()}, + scalar_history=self._scalar_history, + scalar_history_step=step, + analysis_concat_layers=analysis.concat_layers, + layer_names=list(prepared.activations.keys()), + ) + all_visualizations.update({f"{analysis_name}/{payload.name}": payload for payload in payloads}) + + return all_scalars, all_projections, all_visualizations + + def save_visualizations( + self, + visualizations: Mapping[str, ActivationVisualizationPayload], + root: Path, + step: int, + ) -> Mapping[str, str]: + """Persist visualization payloads to disk with history accumulation.""" + return save_visualization_payloads(visualizations, root, step) + + def get_scalar_history( + self, + pattern: str | None = None, + ) -> dict[str, list[tuple[int, float]]]: + """Get scalar history, optionally filtered by pattern. + + Args: + pattern: Optional wildcard pattern to filter scalar keys (e.g., "layer_*_rmse" or "layer_0...3_loss") + + Returns: + Dictionary mapping scalar names to list of (step, value) tuples + """ + if pattern is None: + return dict(self._scalar_history) + + if not has_pattern(pattern): + # No pattern, just exact match + return {k: v for k, v in self._scalar_history.items() if k == pattern} + + # Expand range patterns to individual patterns + range_result = parse_range(pattern) + if range_result: + start_idx, end_idx = range_result + patterns = [substitute_range(pattern, idx) for idx in range(start_idx, end_idx)] + else: + patterns = [pattern] + + # Match against available keys + matched = {} + for p in patterns: + if "*" in p: + regex = build_wildcard_regex(p, capture=r"([^/]+)") + for key, history in self._scalar_history.items(): + if regex.match(key): + matched[key] = history + else: + if p in self._scalar_history: + matched[p] = self._scalar_history[p] + + return matched + + def get_scalar_history_df(self) -> pd.DataFrame: + """Export scalar history as a tidy pandas DataFrame. + + Returns: + DataFrame with columns: metric, step, value + """ + if not self._scalar_history: + return pd.DataFrame({"metric": [], "step": [], "value": []}) + + rows = [] + for metric_name, history in self._scalar_history.items(): + for step, value in history: + rows.append({"metric": metric_name, "step": step, "value": value}) + + return pd.DataFrame(rows) diff --git a/simplexity/activations/activation_visualizations.py b/simplexity/activations/activation_visualizations.py new file mode 100644 index 00000000..44da4769 --- /dev/null +++ b/simplexity/activations/activation_visualizations.py @@ -0,0 +1,282 @@ +"""Helpers for building activation visualizations from analysis outputs.""" + +from __future__ import annotations + +import re +from collections.abc import Mapping +from typing import Any + +import altair +import numpy as np +import pandas as pd +import plotly.graph_objects as go + +from simplexity.activations.visualization.data_structures import ( + _SCALAR_INDEX_SENTINEL, + ActivationVisualizationPayload, + PreparedMetadata, + VisualizationControlDetail, + VisualizationControlsState, +) +from simplexity.activations.visualization.dataframe_builders import ( + _apply_sampling, + _build_dataframe, + _build_metadata_columns, +) +from simplexity.activations.visualization.preprocessing import _apply_preprocessing +from simplexity.activations.visualization_configs import ( + ActivationVisualizationConfig, + ActivationVisualizationControlsConfig, +) +from simplexity.exceptions import ConfigValidationError +from simplexity.visualization.altair_renderer import build_altair_chart +from simplexity.visualization.data_registry import DictDataRegistry +from simplexity.visualization.plotly_renderer import build_plotly_figure +from simplexity.visualization.structured_configs import PlotConfig + + +def _parse_scalar_expression(expr: str) -> tuple[str, str | None]: + """Parse a scalar expression that may contain an aggregation function. + + Args: + expr: Expression like "layer_0_rmse" or "min(layer_0_rmse)" + + Returns: + Tuple of (scalar_key, aggregation_function or None) + """ + expr = expr.strip() + agg_match = re.match(r"^(min|max|avg|mean|latest|first|last)\((.+)\)$", expr) + if agg_match: + agg_func = agg_match.group(1) + scalar_key = agg_match.group(2).strip() + return (scalar_key, agg_func) + return (expr, None) + + +def _compute_aggregation( + history: list[tuple[int, float]], + agg_func: str, +) -> float: + """Compute aggregation over scalar history. + + Args: + history: List of (step, value) tuples + agg_func: Aggregation function name (min, max, avg, mean, latest, first, last) + + Returns: + Aggregated value + """ + if not history: + raise ConfigValidationError(f"Cannot compute {agg_func} over empty history") + + values = [value for _, value in history] + + if agg_func == "min": + return float(np.min(values)) + elif agg_func == "max": + return float(np.max(values)) + elif agg_func in ("avg", "mean"): + return float(np.mean(values)) + elif agg_func in ("latest", "last"): + return history[-1][1] + elif agg_func == "first": + return history[0][1] + else: + raise ConfigValidationError(f"Unknown aggregation function: {agg_func}") + + +def _render_title_template( + title: str | None, + title_scalars: dict[str, str] | None, + scalars: Mapping[str, float], + scalar_history: Mapping[str, list[tuple[int, float]]], +) -> str | None: + """Render a title template by substituting scalar values and aggregations. + + Args: + title: Title string potentially containing format placeholders like {rmse:.3f} + title_scalars: Mapping from template variable names to scalar keys or expressions + scalars: Available current scalar values + scalar_history: Historical scalar values for aggregations + + Returns: + Rendered title string with scalar values substituted, or None if title is None + + Examples: + title_scalars: {"rmse": "layer_0_rmse", "best": "min(layer_0_rmse)"} + This will substitute {rmse} with current value and {best} with historical minimum. + """ + if title is None: + return None + + if title_scalars is None or not title_scalars: + return title + + scalar_values = {} + for var_name, scalar_expr in title_scalars.items(): + scalar_key, agg_func = _parse_scalar_expression(scalar_expr) + + if agg_func is None: + # No aggregation, use current value + if scalar_key in scalars: + scalar_values[var_name] = scalars[scalar_key] + else: + raise ConfigValidationError( + f"Title template references scalar '{scalar_key}' (var: '{var_name}') but it is not available. " + f"Available scalars: {list(scalars.keys())}" + ) + else: + # Aggregation requested, use history + if scalar_key not in scalar_history: + raise ConfigValidationError( + f"Title template requests {agg_func}({scalar_key}) but no history available for '{scalar_key}'. " + f"Available history keys: {list(scalar_history.keys())}" + ) + history = scalar_history[scalar_key] + scalar_values[var_name] = _compute_aggregation(history, agg_func) + + try: + return title.format(**scalar_values) + except (KeyError, ValueError, IndexError) as e: + raise ConfigValidationError( + f"Failed to render title template '{title}' with values {scalar_values}: {e}" + ) from e + + +def _get_facet_columns(viz_cfg: ActivationVisualizationConfig) -> list[str]: + """Get columns used for faceting/subplots. + + Returns columns that define subplot groups, used for per-subplot sampling. + """ + cols = ["layer", "factor", "data_type"] + if viz_cfg.plot and viz_cfg.plot.facet: + if viz_cfg.plot.facet.row: + cols.append(viz_cfg.plot.facet.row) + if viz_cfg.plot.facet.column: + cols.append(viz_cfg.plot.facet.column) + return list(dict.fromkeys(cols)) + + +def build_visualization_payloads( + analysis_name: str, + viz_cfgs: list[ActivationVisualizationConfig], + *, + default_backend: str, + prepared_metadata: PreparedMetadata, + weights: np.ndarray, + belief_states: np.ndarray | None, + projections: Mapping[str, np.ndarray], + scalars: Mapping[str, float], + scalar_history: Mapping[str, list[tuple[int, float]]], + scalar_history_step: int | None, + analysis_concat_layers: bool, + layer_names: list[str], +) -> list[ActivationVisualizationPayload]: + """Materialize and render the configured visualizations for one analysis.""" + payloads: list[ActivationVisualizationPayload] = [] + metadata_columns = _build_metadata_columns(analysis_name, prepared_metadata, weights) + for viz_cfg in viz_cfgs: + dataframe = _build_dataframe( + viz_cfg, + metadata_columns, + projections, + scalars, + scalar_history, + scalar_history_step, + belief_states, + analysis_concat_layers, + layer_names, + ) + if viz_cfg.data_mapping.sampling is not None: + facet_cols = _get_facet_columns(viz_cfg) + dataframe = _apply_sampling(dataframe, viz_cfg.data_mapping.sampling, facet_cols) + dataframe = _apply_preprocessing(dataframe, viz_cfg.preprocessing) + plot_cfg = viz_cfg.resolve_plot_config(default_backend) + + if plot_cfg.guides and plot_cfg.guides.title_scalars: + plot_cfg.guides.title = _render_title_template( + plot_cfg.guides.title, + plot_cfg.guides.title_scalars, + scalars, + scalar_history, + ) + + controls = _build_controls_state(dataframe, viz_cfg.controls) + backend = plot_cfg.backend + figure = render_visualization(plot_cfg, dataframe, controls) + payloads.append( + ActivationVisualizationPayload( + analysis=analysis_name, + name=viz_cfg.name, + backend=backend, + figure=figure, + dataframe=dataframe, + controls=controls, + plot_config=plot_cfg, + ) + ) + return payloads + + +def render_visualization( + plot_cfg: PlotConfig, + dataframe: pd.DataFrame, + controls: VisualizationControlsState | None, +) -> altair.Chart | go.Figure: + """Render a visualization figure from plot configuration and dataframe.""" + registry = DictDataRegistry({plot_cfg.data.source: dataframe}) + return _render_plot(plot_cfg, registry, controls) + + +def _render_plot( + plot_cfg: PlotConfig, + registry: DictDataRegistry, + controls: VisualizationControlsState | None, +) -> Any: + if plot_cfg.backend == "plotly": + return build_plotly_figure(plot_cfg, registry, controls=controls) + return build_altair_chart(plot_cfg, registry, controls=controls) + + +def _build_controls_state( + dataframe: pd.DataFrame, controls_cfg: ActivationVisualizationControlsConfig | None +) -> VisualizationControlsState | None: + if controls_cfg is None: + return None + slider = _build_control_detail(dataframe, "slider", controls_cfg.slider, controls_cfg.cumulative) + dropdown = _build_control_detail(dataframe, "dropdown", controls_cfg.dropdown) + toggle = _build_control_detail(dataframe, "toggle", controls_cfg.toggle) + return VisualizationControlsState( + slider=slider, + dropdown=dropdown, + toggle=toggle, + accumulate_steps=controls_cfg.accumulate_steps, + ) + + +def _build_control_detail( + dataframe: pd.DataFrame, + control_type: str, + field: str | None, + cumulative: bool | None = None, +) -> VisualizationControlDetail | None: + if field is None: + return None + if field not in dataframe: + raise ConfigValidationError(f"Control field '{field}' is not present in visualization dataframe.") + options = list(pd.unique(dataframe[field])) + # Filter out "_no_layer_" placeholder used for layer-independent data (e.g., ground truth) + if field == "layer": + options = [opt for opt in options if opt != "_no_layer_"] + return VisualizationControlDetail(type=control_type, field=field, options=options, cumulative=cumulative) + + +__all__ = [ + "ActivationVisualizationPayload", + "PreparedMetadata", + "VisualizationControlDetail", + "VisualizationControlsState", + "_SCALAR_INDEX_SENTINEL", + "build_visualization_payloads", + "render_visualization", +] diff --git a/simplexity/activations/visualization/__init__.py b/simplexity/activations/visualization/__init__.py new file mode 100644 index 00000000..72b59cde --- /dev/null +++ b/simplexity/activations/visualization/__init__.py @@ -0,0 +1,49 @@ +"""Visualization subpackage for activation analysis.""" + +from simplexity.activations.visualization.data_structures import ( + _SCALAR_INDEX_SENTINEL, + ActivationVisualizationPayload, + PreparedMetadata, + VisualizationControlDetail, + VisualizationControlsState, +) +from simplexity.activations.visualization.dataframe_builders import ( + _build_dataframe, + _build_metadata_columns, +) +from simplexity.activations.visualization.field_resolution import ( + _lookup_projection_array, + _lookup_scalar_value, + _maybe_component, + _resolve_belief_states, + _resolve_field, +) +from simplexity.activations.visualization.pattern_expansion import ( + _expand_field_mapping, + _has_field_pattern, + _has_key_pattern, + _parse_component_spec, +) +from simplexity.activations.visualization.preprocessing import ( + _apply_preprocessing, +) + +__all__ = [ + "ActivationVisualizationPayload", + "PreparedMetadata", + "VisualizationControlDetail", + "VisualizationControlsState", + "_SCALAR_INDEX_SENTINEL", + "_apply_preprocessing", + "_build_dataframe", + "_build_metadata_columns", + "_expand_field_mapping", + "_has_field_pattern", + "_has_key_pattern", + "_lookup_projection_array", + "_lookup_scalar_value", + "_maybe_component", + "_parse_component_spec", + "_resolve_belief_states", + "_resolve_field", +] diff --git a/simplexity/activations/visualization/data_structures.py b/simplexity/activations/visualization/data_structures.py new file mode 100644 index 00000000..6d9fb065 --- /dev/null +++ b/simplexity/activations/visualization/data_structures.py @@ -0,0 +1,66 @@ +"""Core data structures for activation visualizations.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any + +import altair +import numpy as np +import pandas as pd +import plotly.graph_objects as go + +from simplexity.visualization.structured_configs import PlotConfig + + +@dataclass +class PreparedMetadata: + """Metadata derived during activation preprocessing.""" + + sequences: list[tuple[int, ...]] + steps: np.ndarray + select_last_token: bool + + +@dataclass +class ActivationVisualizationPayload: + """Rendered visualization plus auxiliary metadata.""" + + analysis: str + name: str + backend: str + figure: altair.Chart | go.Figure + dataframe: pd.DataFrame + controls: VisualizationControlsState | None + plot_config: PlotConfig + + +@dataclass +class VisualizationControlDetail: + """Runtime metadata for a single control.""" + + type: str + field: str + options: list[Any] + cumulative: bool | None = None + + +@dataclass +class VisualizationControlsState: + """Collection of optional control metadata.""" + + slider: VisualizationControlDetail | None = None + dropdown: VisualizationControlDetail | None = None + toggle: VisualizationControlDetail | None = None + accumulate_steps: bool = False + + +_SCALAR_INDEX_SENTINEL = "__SCALAR_INDEX_SENTINEL__" + +__all__ = [ + "ActivationVisualizationPayload", + "PreparedMetadata", + "VisualizationControlDetail", + "VisualizationControlsState", + "_SCALAR_INDEX_SENTINEL", +] diff --git a/simplexity/activations/visualization/dataframe_builders.py b/simplexity/activations/visualization/dataframe_builders.py new file mode 100644 index 00000000..20bf93e0 --- /dev/null +++ b/simplexity/activations/visualization/dataframe_builders.py @@ -0,0 +1,467 @@ +"""DataFrame construction for activation visualizations.""" + +from __future__ import annotations + +import re +from collections.abc import Mapping +from typing import Any + +import numpy as np +import pandas as pd + +from simplexity.activations.visualization.data_structures import ( + _SCALAR_INDEX_SENTINEL, + PreparedMetadata, +) +from simplexity.activations.visualization.field_resolution import _resolve_field +from simplexity.activations.visualization.pattern_expansion import ( + _expand_field_mapping, + _expand_scalar_pattern_keys, + _scalar_pattern_label, +) +from simplexity.activations.visualization.pattern_utils import has_pattern +from simplexity.activations.visualization_configs import ( + ActivationVisualizationConfig, + ActivationVisualizationFieldRef, + SamplingConfig, + ScalarSeriesMapping, +) +from simplexity.exceptions import ConfigValidationError + + +def _build_metadata_columns( + analysis_name: str, + metadata: PreparedMetadata, + weights: np.ndarray, +) -> dict[str, Any]: + """Build base metadata columns for visualization DataFrames.""" + sequences = metadata.sequences + numeric_steps = metadata.steps + sequence_strings = [" ".join(str(token) for token in seq) for seq in sequences] + base = { + "analysis": np.repeat(analysis_name, len(sequences)), + "step": numeric_steps, + "sequence_length": numeric_steps, + "sequence": np.asarray(sequence_strings), + "sample_index": np.arange(len(sequences), dtype=np.int32), + "weight": weights, + } + return base + + +def _extract_base_column_name(column: str, group_value: str) -> str: + """Extract base column name by removing group index from expanded column name. + + For column='factor_0_prob_0' with group_value='0', returns 'prob_0'. + Uses the group_value to identify and remove the group-related part. + + In practice, key-expanded columns will have format prefix_N_suffix where prefix + is the group name (e.g., 'factor') and suffix is the base column (e.g., 'prob_0'). + Columns like 'prob_0' without a clear group prefix are returned unchanged. + """ + # Pattern: prefix_N_suffix (e.g., factor_0_prob_0 -> prob_0) + # Must have alphabetic suffix after the group value underscore to ensure + # we're stripping a real group prefix, not just matching any column ending in _N + pattern = re.compile(rf"^([a-zA-Z][a-zA-Z0-9_]*)_{re.escape(group_value)}_([a-zA-Z].*)$") + match = pattern.match(column) + if match: + return match.group(2) + + # No match - return original column unchanged + # This handles cases like 'prob_0' where there's no group prefix to strip + return column + + +def _build_scalar_dataframe( + mappings: dict[str, ActivationVisualizationFieldRef], + scalars: Mapping[str, float], + scalar_history: Mapping[str, list[tuple[int, float]]], + analysis_name: str, + current_step: int, +) -> pd.DataFrame: + """Build a long-format DataFrame for scalar visualizations supporting both current and historical data.""" + rows: list[dict[str, Any]] = [] + + for field_name, ref in mappings.items(): + if ref.source not in ("scalar_pattern", "scalar_history"): + continue + + if ref.key is None: + raise ConfigValidationError(f"{ref.source} field references must specify a key") + + # Determine which scalar keys this mapping should include + if has_pattern(ref.key): + # Match pattern against both current scalars and history keys + all_available_keys = set(scalars.keys()) | set(scalar_history.keys()) + matched_keys = _expand_scalar_pattern_keys(ref.key, all_available_keys, analysis_name) + else: + matched_keys = [ref.key if "/" in ref.key else f"{analysis_name}/{ref.key}"] + + for scalar_key in matched_keys: + if ref.source == "scalar_pattern": + # scalar_pattern: Always use current scalar values + # This ensures compatibility with accumulate_steps file persistence + if scalar_key in scalars: + value = scalars[scalar_key] + rows.append( + { + "step": current_step, + "layer": _scalar_pattern_label(scalar_key), + field_name: value, + "metric": scalar_key, + } + ) + elif ref.source == "scalar_history": + # scalar_history: Use full in-memory history + if scalar_key in scalar_history and scalar_history[scalar_key]: + for step, value in scalar_history[scalar_key]: + rows.append( + { + "step": step, + "layer": _scalar_pattern_label(scalar_key), + field_name: value, + "metric": scalar_key, + } + ) + elif scalar_key in scalars: + # No history yet, use current value + value = scalars[scalar_key] + rows.append( + { + "step": current_step, + "layer": _scalar_pattern_label(scalar_key), + field_name: value, + "metric": scalar_key, + } + ) + + if not rows: + raise ConfigValidationError( + "Scalar visualization could not find any matching scalar values. " + f"Available keys: {list(scalars.keys())}, History keys: {list(scalar_history.keys())}" + ) + + return pd.DataFrame(rows) + + +def _build_scalar_series_dataframe( + mapping: ScalarSeriesMapping, + metadata_columns: Mapping[str, Any], + scalars: Mapping[str, float], + layer_names: list[str], + analysis_name: str, +) -> pd.DataFrame: + """Build a DataFrame from scalar series data.""" + base_metadata = _scalar_series_metadata(metadata_columns) + rows: list[dict[str, Any]] = [] + for layer_name in layer_names: + index_values = mapping.index_values or _infer_scalar_series_indices(mapping, scalars, layer_name, analysis_name) + for index_value in index_values: + raw_key = mapping.key_template.format(layer=layer_name, index=index_value) + scalar_key = f"{analysis_name}/{raw_key}" + scalar_value = scalars.get(scalar_key) + if scalar_value is None: + continue + row: dict[str, Any] = { + mapping.index_field: index_value, + mapping.value_field: scalar_value, + "layer": layer_name, + } + row.update(base_metadata) + rows.append(row) + if not rows: + raise ConfigValidationError( + "Scalar series visualization could not resolve any scalar values with the provided key_template." + ) + return pd.DataFrame(rows) + + +def _infer_scalar_series_indices( + mapping: ScalarSeriesMapping, + scalars: Mapping[str, float], + layer_name: str, + analysis_name: str, +) -> list[int]: + """Infer available indices for scalar series from available scalar keys.""" + raw_template = mapping.key_template.format(layer=layer_name, index=_SCALAR_INDEX_SENTINEL) + template = f"{analysis_name}/{raw_template}" + if _SCALAR_INDEX_SENTINEL not in template: + raise ConfigValidationError( + "scalar_series.key_template must include '{index}' placeholder to infer index values." + ) + prefix, suffix = template.split(_SCALAR_INDEX_SENTINEL, 1) + inferred: set[int] = set() + for key in scalars: + if not key.startswith(prefix): + continue + if suffix and not key.endswith(suffix): + continue + body = key[len(prefix) : len(key) - len(suffix) if suffix else None] + if not body: + continue + try: + inferred.add(int(body)) + except ValueError: + continue + if not inferred: + raise ConfigValidationError( + f"Scalar series could not infer indices for layer '{layer_name}' " + f"using key_template '{mapping.key_template}'." + ) + return sorted(inferred) + + +def _scalar_series_metadata(metadata_columns: Mapping[str, Any]) -> dict[str, Any]: + """Extract scalar metadata from metadata columns.""" + metadata: dict[str, Any] = {} + for key, value in metadata_columns.items(): + if isinstance(value, np.ndarray): + if value.size == 0: + continue + metadata[key] = value.flat[0] + else: + metadata[key] = value + return metadata + + +def _build_dataframe_for_mappings( + mappings: dict[str, ActivationVisualizationFieldRef], + metadata_columns: Mapping[str, Any], + projections: Mapping[str, np.ndarray], + scalars: Mapping[str, float], + belief_states: np.ndarray | None, + analysis_concat_layers: bool, + layer_names: list[str], +) -> pd.DataFrame: + """Build a DataFrame from a single set of mappings (used by both regular and combined modes).""" + base_rows = len(metadata_columns["step"]) + frames: list[pd.DataFrame] = [] + + # Check if mappings are belief-state-only (don't need layer iteration) + all_belief_states = all(ref.source == "belief_states" for ref in mappings.values()) + effective_layer_names = ["_no_layer_"] if all_belief_states else layer_names + + for layer_name in effective_layer_names: + # Expand all mappings first + expanded_mappings: dict[str, ActivationVisualizationFieldRef] = {} + for field_name, ref in mappings.items(): + try: + expanded = _expand_field_mapping( + field_name, ref, layer_name, projections, scalars, belief_states, analysis_concat_layers + ) + expanded_mappings.update(expanded) + except ConfigValidationError as e: + raise ConfigValidationError(f"Error expanding '{field_name}' for layer '{layer_name}': {e}") from e + + # Check if any refs have group expansion (_group_value set) + group_refs = {col: ref for col, ref in expanded_mappings.items() if ref._group_value is not None} + non_group_refs = {col: ref for col, ref in expanded_mappings.items() if ref._group_value is None} + + if group_refs: + # Group expansion: restructure to long format + # Group refs by _group_value + groups: dict[str, dict[str, ActivationVisualizationFieldRef]] = {} + group_column_name: str | None = None + + for col, ref in group_refs.items(): + group_val = ref._group_value + assert group_val is not None + if group_val not in groups: + groups[group_val] = {} + groups[group_val][col] = ref + + # Extract group column name from group_as + if ref.group_as is not None: + if isinstance(ref.group_as, str): + group_column_name = ref.group_as + elif isinstance(ref.group_as, list) and len(ref.group_as) > 0: + group_column_name = ref.group_as[0] + + if group_column_name is None: + group_column_name = "group" # Default fallback + + # Build DataFrame chunks for each group value + for group_val, group_col_refs in sorted(groups.items(), key=lambda x: int(x[0])): + group_data = {key: np.copy(value) for key, value in metadata_columns.items()} + group_data["layer"] = np.repeat(layer_name, base_rows) + # Ensure group value is always string for consistent faceting + group_data[group_column_name] = np.repeat(str(group_val), base_rows) + + # Add non-group columns (same for all groups) + for column, ref in non_group_refs.items(): + group_data[column] = _resolve_field( + ref, + layer_name, + projections, + scalars, + belief_states, + analysis_concat_layers, + base_rows, + metadata_columns, + ) + + # Add group-specific columns with base names (strip group index) + for column, ref in group_col_refs.items(): + base_col_name = _extract_base_column_name(column, group_val) + group_data[base_col_name] = _resolve_field( + ref, + layer_name, + projections, + scalars, + belief_states, + analysis_concat_layers, + base_rows, + metadata_columns, + ) + + frames.append(pd.DataFrame(group_data)) + else: + # No group expansion: standard DataFrame construction + layer_data = {key: np.copy(value) for key, value in metadata_columns.items()} + layer_data["layer"] = np.repeat(layer_name, base_rows) + + for column, ref in expanded_mappings.items(): + layer_data[column] = _resolve_field( + ref, + layer_name, + projections, + scalars, + belief_states, + analysis_concat_layers, + base_rows, + metadata_columns, + ) + frames.append(pd.DataFrame(layer_data)) + + return pd.concat(frames, ignore_index=True) + + +def _build_dataframe( + viz_cfg: ActivationVisualizationConfig, + metadata_columns: Mapping[str, Any], + projections: Mapping[str, np.ndarray], + scalars: Mapping[str, float], + scalar_history: Mapping[str, list[tuple[int, float]]], + scalar_history_step: int | None, + belief_states: np.ndarray | None, + analysis_concat_layers: bool, + layer_names: list[str], +) -> pd.DataFrame: + """Build a DataFrame from visualization configuration.""" + # Handle combined mappings (multiple data sources with labels) + if viz_cfg.data_mapping.combined is not None: + combined_frames: list[pd.DataFrame] = [] + combine_column = viz_cfg.data_mapping.combine_as + assert combine_column is not None, "combine_as should be validated in config" + + for section in viz_cfg.data_mapping.combined: + section_df = _build_dataframe_for_mappings( + section.mappings, + metadata_columns, + projections, + scalars, + belief_states, + analysis_concat_layers, + layer_names, + ) + section_df[combine_column] = section.label + combined_frames.append(section_df) + + return pd.concat(combined_frames, ignore_index=True) + + # Check if this is a scalar_pattern or scalar_history visualization + has_scalar_pattern = any(ref.source == "scalar_pattern" for ref in viz_cfg.data_mapping.mappings.values()) + has_scalar_history = any(ref.source == "scalar_history" for ref in viz_cfg.data_mapping.mappings.values()) + + if has_scalar_pattern or has_scalar_history: + if scalar_history_step is None: + raise ConfigValidationError( + "Visualization uses scalar_pattern/scalar_history " + "source but analyze() was called without the `step` parameter." + ) + if "analysis" not in metadata_columns: + raise ConfigValidationError("scalar_pattern/scalar_history requires 'analysis' in metadata_columns.") + analysis_name = str(metadata_columns["analysis"][0]) + return _build_scalar_dataframe( + viz_cfg.data_mapping.mappings, + scalars, + scalar_history, + analysis_name, + scalar_history_step, + ) + + if viz_cfg.data_mapping.scalar_series is not None: + if "analysis" not in metadata_columns: + raise ConfigValidationError("scalar_series requires 'analysis' in metadata_columns.") + analysis_name = str(metadata_columns["analysis"][0]) + return _build_scalar_series_dataframe( + viz_cfg.data_mapping.scalar_series, + metadata_columns, + scalars, + layer_names, + analysis_name, + ) + + # Standard mappings mode - delegate to helper + return _build_dataframe_for_mappings( + viz_cfg.data_mapping.mappings, + metadata_columns, + projections, + scalars, + belief_states, + analysis_concat_layers, + layer_names, + ) + + +def _apply_sampling( + df: pd.DataFrame, + config: SamplingConfig, + facet_columns: list[str], +) -> pd.DataFrame: + """Sample DataFrame down to max_points per facet group. + + Args: + df: The DataFrame to sample + config: Sampling configuration with max_points and optional seed + facet_columns: Column names used for faceting/subplots (e.g., layer, factor, data_type) + + Returns: + Sampled DataFrame with at most max_points rows per facet group + """ + if config.max_points is None: + return df + + group_cols = [col for col in facet_columns if col in df.columns] + + if not group_cols: + if len(df) <= config.max_points: + return df + return df.sample(n=config.max_points, random_state=config.seed) + + def sample_group(group: pd.DataFrame) -> pd.DataFrame: + if len(group) <= config.max_points: # type: ignore[operator] + return group + return group.sample(n=config.max_points, random_state=config.seed) # type: ignore[arg-type] + + # Use group_keys=True to preserve group columns in index, include_groups=False to avoid FutureWarning, + # then reset_index to restore group columns as regular columns + return ( + df.groupby(group_cols, group_keys=True) + .apply(sample_group, include_groups=False) + .reset_index(level=group_cols) + .reset_index(drop=True) + ) + + +__all__ = [ + "_apply_sampling", + "_build_dataframe", + "_build_dataframe_for_mappings", + "_build_metadata_columns", + "_build_scalar_dataframe", + "_build_scalar_series_dataframe", + "_extract_base_column_name", + "_infer_scalar_series_indices", + "_scalar_series_metadata", +] diff --git a/simplexity/activations/visualization/field_resolution.py b/simplexity/activations/visualization/field_resolution.py new file mode 100644 index 00000000..2a423db3 --- /dev/null +++ b/simplexity/activations/visualization/field_resolution.py @@ -0,0 +1,158 @@ +"""Field resolution from projections, scalars, and belief states.""" + +from __future__ import annotations + +from collections.abc import Mapping + +import numpy as np + +from simplexity.activations.visualization_configs import ActivationVisualizationFieldRef +from simplexity.exceptions import ConfigValidationError + + +def _lookup_projection_array( + projections: Mapping[str, np.ndarray], layer_name: str, key: str | None, concat_layers: bool +) -> np.ndarray: + """Look up a projection array by key, handling layer naming conventions.""" + if key is None: + raise ConfigValidationError("Projection references must supply a `key` value.") + suffix = f"_{key}" + for full_key, value in projections.items(): + if concat_layers: + if full_key.endswith(suffix) or full_key == key: + return np.asarray(value) + else: + if not full_key.endswith(suffix): + continue + candidate_layer = full_key[: -len(suffix)] + if candidate_layer == layer_name: + return np.asarray(value) + raise ConfigValidationError(f"Projection '{key}' not available for layer '{layer_name}'.") + + +def _lookup_scalar_value(scalars: Mapping[str, float], layer_name: str, key: str, concat_layers: bool) -> float: + """Look up a scalar value by key, handling layer naming conventions.""" + suffix = f"_{key}" + for full_key, value in scalars.items(): + if concat_layers: + if full_key.endswith(suffix) or full_key == key: + return float(value) + else: + if full_key.endswith(suffix) and full_key[: -len(suffix)] == layer_name: + return float(value) + raise ConfigValidationError(f"Scalar '{key}' not available for layer '{layer_name}'.") + + +def _maybe_component(array: np.ndarray, component: int | None) -> np.ndarray: + """Extract a component from a 2D array, or return the 1D array as-is.""" + np_array = np.asarray(array) + if np_array.ndim == 1: + if component is not None: + raise ConfigValidationError("Component index is invalid for 1D projection arrays.") + return np_array + if np_array.ndim != 2: + raise ConfigValidationError("Projection arrays must be 1D or 2D.") + if component is None: + raise ConfigValidationError("Projection references for 2D arrays must specify `component`.") + if component < 0 or component >= np_array.shape[1]: + raise ConfigValidationError( + f"Component index {component} is out of bounds for projection dimension {np_array.shape[1]}" + ) + return np_array[:, component] + + +def _resolve_belief_states(belief_states: np.ndarray, ref: ActivationVisualizationFieldRef) -> np.ndarray: + """Resolve belief states to a 1D array based on field reference configuration.""" + np_array = np.asarray(belief_states) + + # Handle factor dimension for 3D belief states (samples, factors, states) + if np_array.ndim == 3: + if ref.factor is None: + raise ConfigValidationError( + f"Belief states have 3 dimensions (samples, factors, states) but no `factor` was specified. " + f"Shape: {np_array.shape}" + ) + if isinstance(ref.factor, str): + raise ConfigValidationError("Factor patterns should be expanded before resolution") + factor_idx = ref.factor + if factor_idx < 0 or factor_idx >= np_array.shape[1]: + raise ConfigValidationError( + f"Belief state factor {factor_idx} is out of bounds for dimension {np_array.shape[1]}" + ) + np_array = np_array[:, factor_idx, :] # Now 2D: (samples, states) + elif np_array.ndim == 2: + if ref.factor is not None: + raise ConfigValidationError( + f"Belief states are 2D but `factor={ref.factor}` was specified. " + f"Factor selection requires 3D belief states (samples, factors, states)." + ) + else: + raise ConfigValidationError(f"Belief states must be 2D or 3D, got {np_array.ndim}D") + + # Now np_array is 2D: (samples, states) + if ref.reducer == "argmax": + return np.argmax(np_array, axis=1) + if ref.reducer == "l2_norm": + return np.linalg.norm(np_array, axis=1) + if isinstance(ref.component, str): + raise ConfigValidationError("Component indices should be expanded before resolution") + component = ref.component if ref.component is not None else 0 + if component < 0 or component >= np_array.shape[1]: + raise ConfigValidationError( + f"Belief state component {component} is out of bounds for dimension {np_array.shape[1]}" + ) + return np_array[:, component] + + +def _resolve_field( + ref: ActivationVisualizationFieldRef, + layer_name: str, + projections: Mapping[str, np.ndarray], + scalars: Mapping[str, float], + belief_states: np.ndarray | None, + analysis_concat_layers: bool, + num_rows: int, + metadata_columns: Mapping[str, object], +) -> np.ndarray: + """Resolve a field reference to a numpy array of values.""" + if ref.source == "metadata": + if ref.key is None: + raise ConfigValidationError("Metadata references must specify `key`.") + if ref.key == "layer": + return np.repeat(layer_name, num_rows) + if ref.key not in metadata_columns: + raise ConfigValidationError(f"Metadata column '{ref.key}' is not available.") + return np.asarray(metadata_columns[ref.key]) + + if ref.source == "weights": + if "weight" not in metadata_columns: + raise ConfigValidationError("Weight metadata is unavailable for visualization mapping.") + return np.asarray(metadata_columns["weight"]) + + if ref.source == "projections": + array = _lookup_projection_array(projections, layer_name, ref.key, analysis_concat_layers) + if isinstance(ref.component, str): + raise ConfigValidationError("Component indices should be expanded before resolution") + return _maybe_component(array, ref.component) + + if ref.source == "belief_states": + if belief_states is None: + raise ConfigValidationError("Visualization requests belief_states but they were not retained.") + return _resolve_belief_states(belief_states, ref) + + if ref.source == "scalars": + if ref.key is None: + raise ConfigValidationError("Scalar references must supply `key`.") + value = _lookup_scalar_value(scalars, layer_name, ref.key, analysis_concat_layers) + return np.repeat(value, num_rows) + + raise ConfigValidationError(f"Unsupported field source '{ref.source}'") + + +__all__ = [ + "_lookup_projection_array", + "_lookup_scalar_value", + "_maybe_component", + "_resolve_belief_states", + "_resolve_field", +] diff --git a/simplexity/activations/visualization/pattern_expansion.py b/simplexity/activations/visualization/pattern_expansion.py new file mode 100644 index 00000000..000f43d9 --- /dev/null +++ b/simplexity/activations/visualization/pattern_expansion.py @@ -0,0 +1,600 @@ +"""Pattern parsing and expansion logic for visualization field mappings.""" + +from __future__ import annotations + +import re +from collections.abc import Iterable, Mapping + +import numpy as np + +from simplexity.activations.visualization.field_resolution import _lookup_projection_array +from simplexity.activations.visualization.pattern_utils import ( + build_wildcard_regex, + count_patterns, + has_pattern, + parse_range, + substitute_pattern, + validate_single_pattern, +) +from simplexity.activations.visualization_configs import ActivationVisualizationFieldRef +from simplexity.exceptions import ConfigValidationError + + +def _has_key_pattern(key: str | None) -> bool: + """Check if key contains * or range pattern (e.g., factor_*/projected).""" + if key is None: + return False + validate_single_pattern(key, "Key") + return has_pattern(key) + + +def _has_field_pattern(field_name: str) -> bool: + """Check if field name contains * or range pattern.""" + validate_single_pattern(field_name, "Field name") + return has_pattern(field_name) + + +def _parse_component_spec(component: int | str | None) -> tuple[str, int | None, int | None]: + """Parse component into (type, start, end). + + Returns: + - ("single", val, None) for int component + - ("wildcard", None, None) for "*" + - ("range", start, end) for "start...end" + - ("none", None, None) for None + """ + if component is None: + return ("none", None, None) + if isinstance(component, int): + return ("single", component, None) + if component == "*": + return ("wildcard", None, None) + if "..." in component: + parts = component.split("...") + if len(parts) != 2: + raise ConfigValidationError(f"Invalid range: {component}") + try: + start, end = int(parts[0]), int(parts[1]) + if start >= end: + raise ConfigValidationError(f"Range start must be < end: {component}") + return ("range", start, end) + except ValueError as e: + raise ConfigValidationError(f"Invalid range: {component}") from e + raise ConfigValidationError(f"Unrecognized component pattern: {component}") + + +def _expand_pattern_to_indices( + pattern: str, + available_keys: Iterable[str], +) -> list[int]: + """Extract numeric indices from keys matching a wildcard or range pattern. + + Args: + pattern: Pattern with * or N...M + available_keys: Keys to match against + + Returns: + Sorted list of unique indices that match the pattern + """ + if not has_pattern(pattern): + raise ConfigValidationError(f"Pattern '{pattern}' has no wildcard or range") + + if "*" in pattern: + regex_pattern = build_wildcard_regex(pattern) + indices: list[int] = [] + for key in available_keys: + match = regex_pattern.match(key) + if match: + try: + indices.append(int(match.group(1))) + except (ValueError, IndexError): + continue + if not indices: + raise ConfigValidationError(f"No keys found matching pattern '{pattern}'") + return sorted(set(indices)) + else: + range_bounds = parse_range(pattern) + if not range_bounds: + raise ConfigValidationError(f"Invalid range pattern in '{pattern}'") + start_idx, end_idx = range_bounds + return list(range(start_idx, end_idx)) + + +def _get_component_count( + ref: ActivationVisualizationFieldRef, + layer_name: str, + projections: Mapping[str, np.ndarray], + belief_states: np.ndarray | None, + analysis_concat_layers: bool, +) -> int: + """Get number of components available for expansion.""" + if ref.source == "projections": + if ref.key is None: + raise ConfigValidationError("Projection refs require key") + array = _lookup_projection_array(projections, layer_name, ref.key, analysis_concat_layers) + np_array = np.asarray(array) + if np_array.ndim == 1: + raise ConfigValidationError(f"Cannot expand 1D projection '{ref.key}'. Patterns require 2D arrays.") + if np_array.ndim != 2: + raise ConfigValidationError(f"Projection must be 1D or 2D, got {np_array.ndim}D") + return np_array.shape[1] + + elif ref.source == "belief_states": + if belief_states is None: + raise ConfigValidationError("Belief states not available") + np_array = np.asarray(belief_states) + if np_array.ndim != 2: + raise ConfigValidationError(f"Belief states must be 2D, got {np_array.ndim}D") + return np_array.shape[1] + + else: + raise ConfigValidationError(f"Component expansion not supported for source: {ref.source}") + + +def _expand_projection_key_pattern( + key_pattern: str, + layer_name: str, + projections: Mapping[str, np.ndarray], + analysis_concat_layers: bool, +) -> dict[str, str]: + """Expand projection key patterns against available keys. + + Args: + key_pattern: Pattern like "factor_*/projected" or "factor_0...3/projected" + layer_name: Current layer name for matching + projections: Available projection arrays + analysis_concat_layers: Whether layers were concatenated + + Returns: + Dict mapping extracted index (as string) to the concrete key suffix. + E.g., {"0": "factor_0/projected", "1": "factor_1/projected"} + """ + # Build regex from pattern + if "*" in key_pattern: + regex_pattern = build_wildcard_regex(key_pattern) + else: + # Range pattern like "factor_0...3/projected" + range_bounds = parse_range(key_pattern) + if not range_bounds: + raise ConfigValidationError(f"Invalid key pattern: {key_pattern}") + start_idx, end_idx = range_bounds + if start_idx >= end_idx: + raise ConfigValidationError(f"Invalid range in key pattern: {key_pattern}") + # Return explicit range without matching + result = {} + for idx in range(start_idx, end_idx): + concrete_key = substitute_pattern(key_pattern, idx) + result[str(idx)] = concrete_key + return result + + # Match against available projection keys + result: dict[str, str] = {} + for full_key in projections: + # Extract the key suffix (part after layer name) + if analysis_concat_layers: + # Keys are like "factor_0/projected" directly + key_suffix = full_key + else: + # Keys are like "layer_name_factor_0/projected" + prefix = f"{layer_name}_" + if not full_key.startswith(prefix): + continue + key_suffix = full_key[len(prefix) :] + + match = regex_pattern.match(key_suffix) + if match: + extracted_idx = match.group(1) + if extracted_idx not in result: + result[extracted_idx] = key_suffix + + if not result: + raise ConfigValidationError( + f"No projection keys found matching pattern '{key_pattern}' for layer '{layer_name}'. " + f"Available keys: {list(projections.keys())}" + ) + + return result + + +def _expand_projection_key_mapping( + field_name: str, + ref: ActivationVisualizationFieldRef, + layer_name: str, + projections: Mapping[str, np.ndarray], + belief_states: np.ndarray | None, + analysis_concat_layers: bool, +) -> dict[str, ActivationVisualizationFieldRef]: + """Expand projection key patterns, optionally combined with component patterns. + + Handles cross-product expansion when both key and component patterns are present. + Sets _group_value on expanded refs for DataFrame construction. + """ + assert ref.key is not None, "Key must be provided for projection key pattern expansion" + + # Expand key pattern to get concrete keys + key_expansions = _expand_projection_key_pattern(ref.key, layer_name, projections, analysis_concat_layers) + + # Check if component expansion is also needed + spec_type, start_idx, end_idx = _parse_component_spec(ref.component) + needs_component_expansion = spec_type in ("wildcard", "range") + + expanded: dict[str, ActivationVisualizationFieldRef] = {} + + # Count patterns in field name to handle cross-product correctly + total_field_patterns = count_patterns(field_name) + + for group_idx, concrete_key in sorted(key_expansions.items(), key=lambda x: int(x[0])): + if needs_component_expansion: + # Get component count for this specific key + array = _lookup_projection_array(projections, layer_name, concrete_key, analysis_concat_layers) + np_array = np.asarray(array) + if np_array.ndim != 2: + raise ConfigValidationError( + f"Component expansion requires 2D projection, got {np_array.ndim}D for key '{concrete_key}'" + ) + max_components = np_array.shape[1] + + if spec_type == "wildcard": + components = list(range(max_components)) + else: + assert start_idx is not None + assert end_idx is not None + if end_idx > max_components: + raise ConfigValidationError( + f"Range {start_idx}...{end_idx} exceeds components ({max_components}) for key '{concrete_key}'" + ) + components = list(range(start_idx, end_idx)) + + # Cross-product: expand both key and component + for comp_idx in components: + # Replace patterns in field name (key pattern first, then component) + if total_field_patterns == 2: + # Two patterns: first for key, second for component + expanded_name = substitute_pattern(field_name, int(group_idx)) + expanded_name = substitute_pattern(expanded_name, comp_idx) + elif total_field_patterns == 1: + # Only one pattern in field name - use for component, prefix with group index + # to ensure unique keys when iterating over multiple groups + expanded_name = f"factor_{group_idx}_{substitute_pattern(field_name, comp_idx)}" + else: + raise ConfigValidationError( + f"Field '{field_name}' must have 1-2 patterns for key+component expansion" + ) + + expanded[expanded_name] = ActivationVisualizationFieldRef( + source="projections", + key=concrete_key, + component=comp_idx, + reducer=ref.reducer, + group_as=ref.group_as, + _group_value=str(group_idx), + ) + else: + # Only key pattern, no component expansion + expanded_name = substitute_pattern(field_name, int(group_idx)) + + expanded[expanded_name] = ActivationVisualizationFieldRef( + source="projections", + key=concrete_key, + component=ref.component, # Keep original (could be None or int) + reducer=ref.reducer, + group_as=ref.group_as, + _group_value=str(group_idx), + ) + + return expanded + + +def _expand_belief_factor_mapping( + field_name: str, + ref: ActivationVisualizationFieldRef, + belief_states: np.ndarray, +) -> dict[str, ActivationVisualizationFieldRef]: + """Expand belief state factor patterns, optionally combined with component patterns. + + Handles cross-product expansion when both factor and component patterns are present. + Sets _group_value on expanded refs for DataFrame construction. + """ + np_beliefs = np.asarray(belief_states) + if np_beliefs.ndim != 3: + raise ConfigValidationError( + f"Belief state factor patterns require 3D beliefs (samples, factors, states), got {np_beliefs.ndim}D" + ) + + n_factors = np_beliefs.shape[1] + n_states = np_beliefs.shape[2] + + # Parse factor pattern using _parse_component_spec (same pattern syntax) + try: + factor_spec_type, factor_start, factor_end = _parse_component_spec(ref.factor) + except ConfigValidationError: + raise ConfigValidationError(f"Invalid factor pattern: {ref.factor}") from None + + if factor_spec_type == "wildcard": + factors = list(range(n_factors)) + elif factor_spec_type == "range": + assert factor_start is not None + assert factor_end is not None + if factor_end > n_factors: + raise ConfigValidationError( + f"Factor range {factor_start}...{factor_end} exceeds available factors ({n_factors})" + ) + factors = list(range(factor_start, factor_end)) + else: + raise ConfigValidationError(f"Invalid factor pattern: {ref.factor}") + + # Check if component expansion is also needed + spec_type, start_idx, end_idx = _parse_component_spec(ref.component) + needs_component_expansion = spec_type in ("wildcard", "range") + + expanded: dict[str, ActivationVisualizationFieldRef] = {} + + # Count patterns in field name + total_field_patterns = count_patterns(field_name) + + for factor_idx in factors: + if needs_component_expansion: + # Get component range + if spec_type == "wildcard": + components = list(range(n_states)) + else: + assert start_idx is not None + assert end_idx is not None + if end_idx > n_states: + raise ConfigValidationError(f"Component range {start_idx}...{end_idx} exceeds states ({n_states})") + components = list(range(start_idx, end_idx)) + + # Cross-product: expand both factor and component + for comp_idx in components: + if total_field_patterns == 2: + # Two patterns: first for factor, second for component + expanded_name = substitute_pattern(field_name, factor_idx) + expanded_name = substitute_pattern(expanded_name, comp_idx) + elif total_field_patterns == 1: + # Only one pattern in field name - use for component, prefix with factor index + # to ensure unique keys when iterating over multiple factors + expanded_name = f"factor_{factor_idx}_{substitute_pattern(field_name, comp_idx)}" + else: + raise ConfigValidationError( + f"Field '{field_name}' must have 1-2 patterns for factor+component expansion" + ) + + expanded[expanded_name] = ActivationVisualizationFieldRef( + source="belief_states", + key=ref.key, + component=comp_idx, + reducer=ref.reducer, + group_as=ref.group_as, + factor=factor_idx, + _group_value=str(factor_idx), + ) + else: + # Only factor pattern, no component expansion + expanded_name = substitute_pattern(field_name, factor_idx) + + expanded[expanded_name] = ActivationVisualizationFieldRef( + source="belief_states", + key=ref.key, + component=ref.component, + reducer=ref.reducer, + group_as=ref.group_as, + factor=factor_idx, + _group_value=str(factor_idx), + ) + + return expanded + + +def _expand_scalar_keys( + field_pattern: str, + key_pattern: str | None, + layer_name: str, + scalars: Mapping[str, float], +) -> dict[str, str]: + """Expand scalar field patterns by matching available scalar keys. + + Returns dict of expanded field_name → scalar_key. + """ + if key_pattern is None: + raise ConfigValidationError("Scalar wildcard expansion requires a key pattern") + + if not has_pattern(key_pattern): + return {field_pattern: key_pattern} + + indices = _expand_pattern_to_indices(key_pattern, scalars.keys()) + + expanded = {} + for idx in indices: + expanded_field = substitute_pattern(field_pattern, idx) if has_pattern(field_pattern) else field_pattern + expanded_key = substitute_pattern(key_pattern, idx) + expanded[expanded_field] = expanded_key + + return expanded + + +def _expand_scalar_pattern_keys( + pattern: str, + available_keys: Iterable[str], + analysis_name: str, +) -> list[str]: + """Expand wildcard/range pattern against available scalar keys.""" + keys = list(available_keys) + has_prefixed_keys = any("/" in key for key in keys) + prefix = f"{analysis_name}/" + + normalized_pattern = pattern + if "/" not in normalized_pattern and has_prefixed_keys: + normalized_pattern = f"{analysis_name}/{normalized_pattern}" + elif "/" in normalized_pattern and not has_prefixed_keys and normalized_pattern.startswith(prefix): + normalized_pattern = normalized_pattern[len(prefix) :] + + pattern_variants = _expand_scalar_pattern_ranges(normalized_pattern) + matched: list[str] = [] + + for variant in pattern_variants: + if "*" in variant: + escaped = re.escape(variant).replace(r"\*", r"([^/]+)") + regex = re.compile(f"^{escaped}$") + matched.extend(key for key in keys if regex.match(key)) + else: + if variant in keys: + matched.append(variant) + + unique_matches: list[str] = [] + seen: set[str] = set() + for key in matched: + if key not in seen: + seen.add(key) + unique_matches.append(key) + + if not unique_matches: + raise ConfigValidationError(f"No scalar pattern keys found matching pattern '{pattern}'") + + return sorted(unique_matches) + + +def _expand_scalar_pattern_ranges(pattern: str) -> list[str]: + """Expand numeric range tokens (e.g., 0...4) within a scalar pattern.""" + range_bounds = parse_range(pattern) + if not range_bounds: + return [pattern] + + start_idx, end_idx = range_bounds + if start_idx >= end_idx: + raise ConfigValidationError(f"Invalid range pattern in scalar pattern key '{pattern}'") + + expanded: list[str] = [] + for idx in range(start_idx, end_idx): + replaced = substitute_pattern(pattern, idx) + expanded.extend(_expand_scalar_pattern_ranges(replaced)) + return expanded + + +def _scalar_pattern_label(full_key: str) -> str: + """Derive a categorical label for scalar pattern rows based on the key.""" + suffix = full_key.split("/", 1)[1] if "/" in full_key else full_key + layer_match = re.search(r"(layer_\d+)", suffix) + if layer_match: + return layer_match.group(1) + return suffix + + +def _expand_field_mapping( + field_name: str, + ref: ActivationVisualizationFieldRef, + layer_name: str, + projections: Mapping[str, np.ndarray], + scalars: Mapping[str, float], + belief_states: np.ndarray | None, + analysis_concat_layers: bool, +) -> dict[str, ActivationVisualizationFieldRef]: + """Expand pattern-based mapping into concrete mappings. + + Returns dict of expanded field_name → FieldRef with concrete component/key values. + """ + # Check for projection key patterns FIRST (allows multiple field patterns for key+component) + if ref.source == "projections" and ref.key and _has_key_pattern(ref.key): + # For key pattern expansion, we allow up to 2 patterns in field name + # (one for key expansion, one for component expansion) + total_field_patterns = count_patterns(field_name) + + if total_field_patterns == 0: + raise ConfigValidationError(f"Projection key pattern '{ref.key}' requires field name pattern") + if total_field_patterns > 2: + raise ConfigValidationError( + f"Field name '{field_name}' has too many patterns (max 2 for key+component expansion)" + ) + + return _expand_projection_key_mapping( + field_name, ref, layer_name, projections, belief_states, analysis_concat_layers + ) + + # Check for belief state factor patterns + if ref.source == "belief_states" and ref.factor is not None and isinstance(ref.factor, str): + has_factor_pattern = ref.factor == "*" or "..." in ref.factor + if has_factor_pattern: + if belief_states is None: + raise ConfigValidationError("Belief state factor patterns require belief_states to be provided") + total_field_patterns = count_patterns(field_name) + + if total_field_patterns == 0: + raise ConfigValidationError(f"Belief state factor pattern '{ref.factor}' requires field name pattern") + if total_field_patterns > 2: + raise ConfigValidationError( + f"Field name '{field_name}' has too many patterns (max 2 for factor+component expansion)" + ) + + return _expand_belief_factor_mapping(field_name, ref, belief_states) + + field_has_pattern = _has_field_pattern(field_name) + + if ref.source == "scalars": + key_has_pattern = ref.key is not None and has_pattern(ref.key) + + if field_has_pattern and not key_has_pattern: + raise ConfigValidationError(f"Field '{field_name}' has pattern but scalar key has no pattern") + if key_has_pattern and not field_has_pattern: + raise ConfigValidationError(f"Scalar key pattern '{ref.key}' requires field name pattern") + + if not field_has_pattern: + return {field_name: ref} + + scalar_expansions = _expand_scalar_keys(field_name, ref.key, layer_name, scalars) + return { + field: ActivationVisualizationFieldRef(source="scalars", key=key, component=None, reducer=None) + for field, key in scalar_expansions.items() + } + + spec_type, start_idx, end_idx = _parse_component_spec(ref.component) + needs_expansion = spec_type in ("wildcard", "range") + + if field_has_pattern and not needs_expansion: + raise ConfigValidationError(f"Field '{field_name}' has pattern but component is not wildcard/range") + if needs_expansion and not field_has_pattern: + raise ConfigValidationError(f"Component pattern '{ref.component}' requires field name pattern") + + if not needs_expansion: + return {field_name: ref} + + max_components = _get_component_count(ref, layer_name, projections, belief_states, analysis_concat_layers) + + if spec_type == "wildcard": + components = list(range(max_components)) + else: + assert start_idx is not None, "Range spec must have start index" + assert end_idx is not None, "Range spec must have end index" + if end_idx > max_components: + raise ConfigValidationError( + f"Range {start_idx}...{end_idx} exceeds available components (max: {max_components})" + ) + components = list(range(start_idx, end_idx)) + + expanded = {} + for comp_idx in components: + expanded_name = substitute_pattern(field_name, comp_idx) + + expanded[expanded_name] = ActivationVisualizationFieldRef( + source=ref.source, + key=ref.key, + component=comp_idx, + reducer=ref.reducer, + ) + + return expanded + + +__all__ = [ + "_expand_belief_factor_mapping", + "_expand_field_mapping", + "_expand_pattern_to_indices", + "_expand_projection_key_mapping", + "_expand_projection_key_pattern", + "_expand_scalar_keys", + "_expand_scalar_pattern_keys", + "_expand_scalar_pattern_ranges", + "_get_component_count", + "_has_field_pattern", + "_has_key_pattern", + "_parse_component_spec", + "_scalar_pattern_label", +] diff --git a/simplexity/activations/visualization/pattern_utils.py b/simplexity/activations/visualization/pattern_utils.py new file mode 100644 index 00000000..a26febdc --- /dev/null +++ b/simplexity/activations/visualization/pattern_utils.py @@ -0,0 +1,139 @@ +"""Shared pattern detection, parsing, and substitution utilities.""" + +from __future__ import annotations + +import re + +from simplexity.exceptions import ConfigValidationError + +# Compiled regex for range patterns (e.g., "0...10") +RANGE_PATTERN = re.compile(r"(\d+)\.\.\.(\d+)") + + +def count_patterns(text: str) -> int: + """Count wildcard (*) and range (N...M) patterns in text. + + Args: + text: String to check for patterns + + Returns: + Total number of wildcard and range patterns found + """ + return text.count("*") + len(RANGE_PATTERN.findall(text)) + + +def has_pattern(text: str) -> bool: + """Check if text contains any wildcard (*) or range (N...M) pattern. + + Args: + text: String to check for patterns + + Returns: + True if text contains at least one pattern + """ + return "*" in text or bool(RANGE_PATTERN.search(text)) + + +def validate_single_pattern(text: str, context: str) -> None: + """Validate that text has at most one pattern. + + Args: + text: String to validate + context: Description for error message (e.g., "Key", "Field name") + + Raises: + ConfigValidationError: If text contains multiple patterns + """ + if count_patterns(text) > 1: + raise ConfigValidationError(f"{context} cannot have multiple patterns: {text}") + + +def substitute_pattern(text: str, index: int) -> str: + """Replace the first wildcard or range pattern with an index. + + Handles both wildcard (*) and range (N...M) patterns. If both are present, + wildcard takes precedence. + + Args: + text: String containing a pattern + index: Index value to substitute + + Returns: + Text with first pattern replaced by index + """ + if "*" in text: + return text.replace("*", str(index), 1) + return RANGE_PATTERN.sub(str(index), text, count=1) + + +def substitute_range(text: str, index: int) -> str: + """Replace a range pattern (N...M) with an index. + + Args: + text: String containing a range pattern + index: Index value to substitute + + Returns: + Text with range pattern replaced by index + """ + return RANGE_PATTERN.sub(str(index), text, count=1) + + +def parse_range(text: str) -> tuple[int, int] | None: + """Extract (start, end) from a range pattern. + + Args: + text: String potentially containing a range pattern like "0...10" + + Returns: + Tuple of (start, end) if range found, None otherwise + """ + match = RANGE_PATTERN.search(text) + if match: + return int(match.group(1)), int(match.group(2)) + return None + + +def is_valid_range(text: str) -> bool: + """Check if text is a valid range pattern with start < end. + + Args: + text: String to check (e.g., "0...10") + + Returns: + True if text is a valid range pattern with start < end + """ + result = parse_range(text) + if result is None: + return False + start, end = result + return start < end + + +def build_wildcard_regex(pattern: str, capture: str = r"(\d+)") -> re.Pattern[str]: + """Build a regex pattern from a wildcard pattern. + + Escapes special regex characters and replaces * with a capture group. + + Args: + pattern: String with * wildcard (e.g., "factor_*/projected") + capture: Regex capture group to replace * with (default: numeric capture) + + Returns: + Compiled regex pattern for matching + """ + escaped = re.escape(pattern).replace(r"\*", capture) + return re.compile(f"^{escaped}$") + + +__all__ = [ + "RANGE_PATTERN", + "build_wildcard_regex", + "count_patterns", + "has_pattern", + "is_valid_range", + "parse_range", + "substitute_pattern", + "substitute_range", + "validate_single_pattern", +] diff --git a/simplexity/activations/visualization/preprocessing.py b/simplexity/activations/visualization/preprocessing.py new file mode 100644 index 00000000..48cb430a --- /dev/null +++ b/simplexity/activations/visualization/preprocessing.py @@ -0,0 +1,223 @@ +"""Post-processing transforms for visualization DataFrames.""" + +from __future__ import annotations + +import numpy as np +import pandas as pd + +from simplexity.activations.visualization.pattern_utils import ( + build_wildcard_regex, + has_pattern, + parse_range, + substitute_pattern, +) +from simplexity.activations.visualization_configs import ActivationVisualizationPreprocessStep +from simplexity.analysis.pca import compute_weighted_pca +from simplexity.exceptions import ConfigValidationError + + +def _expand_preprocessing_fields(field_patterns: list[str], available_columns: list[str]) -> list[str]: + """Expand wildcard and range patterns in preprocessing field lists. + + Args: + field_patterns: List of field names, may contain patterns like "belief_*" or "prob_0...3" + available_columns: List of column names available in the DataFrame + + Returns: + Expanded list of field names with patterns replaced by matching columns + """ + expanded: list[str] = [] + for pattern in field_patterns: + # Check if this is a pattern + if has_pattern(pattern): + # Extract the numeric pattern if it's a range + range_bounds = parse_range(pattern) + if range_bounds: + start, end = range_bounds + component_range = list(range(start, end)) + # Replace range pattern with each index + for idx in component_range: + expanded_name = substitute_pattern(pattern, idx) + if expanded_name in available_columns: + expanded.append(expanded_name) + else: + raise ConfigValidationError( + f"Preprocessing pattern '{pattern}' expanded to '{expanded_name}' " + f"but column not found in DataFrame. " + f"Available columns: {', '.join(sorted(available_columns))}" + ) + elif "*" in pattern: + # Wildcard pattern - find all matching columns + regex = build_wildcard_regex(pattern) + matches = [] + for col in available_columns: + match = regex.match(col) + if match: + # Extract the numeric part for sorting + try: + idx = int(match.group(1)) + matches.append((idx, col)) + except (IndexError, ValueError): + continue + if not matches: + raise ConfigValidationError( + f"Preprocessing pattern '{pattern}' did not match any columns in DataFrame. " + f"Available columns: {', '.join(sorted(available_columns))}" + ) + # Sort by index and add column names + matches.sort(key=lambda x: x[0]) + expanded.extend([col for _, col in matches]) + else: + raise ConfigValidationError(f"Invalid preprocessing field pattern: {pattern}") + else: + # Not a pattern, just add as-is + expanded.append(pattern) + + return expanded + + +def _apply_preprocessing(dataframe: pd.DataFrame, steps: list[ActivationVisualizationPreprocessStep]) -> pd.DataFrame: + """Apply preprocessing steps to a DataFrame.""" + result = dataframe.copy() + available_columns = list(result.columns) + + for step in steps: + # Validate output_fields don't contain patterns + for output_field in step.output_fields: + if "*" in output_field or "..." in output_field: + raise ConfigValidationError( + f"Preprocessing output_fields cannot contain patterns. Found: '{output_field}'" + ) + + # Expand input_fields patterns + expanded_input_fields = _expand_preprocessing_fields(step.input_fields, available_columns) + + # Create a modified step with expanded fields + expanded_step = ActivationVisualizationPreprocessStep( + type=step.type, input_fields=expanded_input_fields, output_fields=step.output_fields + ) + + if step.type == "project_to_simplex": + result = _project_to_simplex(result, expanded_step) + elif step.type == "combine_rgb": + result = _combine_rgb(result, expanded_step) + else: # pragma: no cover - defensive for future types + raise ConfigValidationError(f"Unsupported preprocessing op '{step.type}'") + + # Update available columns for next step + available_columns = list(result.columns) + + return result + + +def _project_to_simplex(dataframe: pd.DataFrame, step: ActivationVisualizationPreprocessStep) -> pd.DataFrame: + """Project 3D probability coordinates to 2D simplex coordinates.""" + required = step.input_fields + for column in required: + if column not in dataframe: + raise ConfigValidationError( + f"Preprocessing step requires column '{column}' but it is missing from the dataframe." + ) + _, p1, p2 = (dataframe[col].astype(float) for col in required) + x = p1 + 0.5 * p2 + y = (np.sqrt(3.0) / 2.0) * p2 + dataframe[step.output_fields[0]] = x + dataframe[step.output_fields[1]] = y + return dataframe + + +def _combine_rgb(dataframe: pd.DataFrame, step: ActivationVisualizationPreprocessStep) -> pd.DataFrame: + """Combine input fields into RGB color values. + + Supports either: + - 3 input fields: Directly map to R, G, B channels + - >3 input fields: Project to 3D via PCA, then map to RGB + """ + # ---- Validation ---- + # Note: input_fields have already been expanded by _expand_preprocessing_fields() + # at this point, so we just validate the expanded result + if len(step.output_fields) != 1: + raise ConfigValidationError("combine_rgb requires exactly one output_field.") + if len(step.input_fields) < 3: + raise ConfigValidationError("combine_rgb requires at least three input_fields.") + + # Make sure all input columns exist + for field in step.input_fields: + if field not in dataframe: + raise ConfigValidationError(f"combine_rgb requires column '{field}' but it is missing from the dataframe.") + + def _channel_to_int(series: pd.Series) -> pd.Series: + return (series.clip(0.0, 1.0) * 255).round().astype(int) + + # ---- Case 1: exactly 3 inputs -> normalize to [0, 1] then map to RGB ---- + if len(step.input_fields) == 3: + rgb = dataframe[list(step.input_fields)].to_numpy(dtype=float) + mins = rgb.min(axis=0) + maxs = rgb.max(axis=0) + ranges = maxs - mins + ranges_safe = np.where(ranges > 0, ranges, 1.0) + rgb = (rgb - mins) / ranges_safe + rgb[:, ranges == 0] = 0.5 + + r_vals = _channel_to_int(pd.Series(rgb[:, 0], index=dataframe.index)) + g_vals = _channel_to_int(pd.Series(rgb[:, 1], index=dataframe.index)) + b_vals = _channel_to_int(pd.Series(rgb[:, 2], index=dataframe.index)) + + # ---- Case 2: >3 inputs -> PCA to 3D, then map to RGB ---- + else: + import jax.numpy as jnp + + # Stack the selected columns into an (n_samples, n_features) matrix + X_np = dataframe[step.input_fields].to_numpy(dtype=float) + X_jax = jnp.asarray(X_np) + + # Unweighted PCA (weights=None) to up to 3 components + # We pass n_components=3, but compute_weighted_pca will cap it at min(n_samples, n_features) + # via its own logic if you change it to allow that, or you can just pass None and slice. + pca_res = compute_weighted_pca( + X_jax, + n_components=None, # let it pick max_rank + weights=None, + center=True, + ) + + # Get projected coordinates, shape: (n_samples, k) where k = max_rank + proj = np.asarray(pca_res["X_proj"]) # convert from jax.Array to numpy + + # Ensure we have 3 channels: take first 3 components, pad with zeros if fewer + if proj.shape[1] >= 3: + proj3 = proj[:, :3] + else: + # This is rare (happens when n_samples < 3). Pad extra dims with zeros. + pad_width = 3 - proj.shape[1] + proj3 = np.pad(proj, ((0, 0), (0, pad_width)), mode="constant") + + # Min-max normalize each component to [0, 1] across the dataset + mins = proj3.min(axis=0) + maxs = proj3.max(axis=0) + ranges = maxs - mins + # Avoid divide-by-zero: if range is 0, just leave that channel at 0.5 + ranges_safe = np.where(ranges > 0, ranges, 1.0) + colors = (proj3 - mins) / ranges_safe + colors[:, ranges == 0] = 0.5 + + colors = np.clip(colors, 0.0, 1.0) + + # Turn into Series so we can reuse _channel_to_int + r_vals = _channel_to_int(pd.Series(colors[:, 0], index=dataframe.index)) + g_vals = _channel_to_int(pd.Series(colors[:, 1], index=dataframe.index)) + b_vals = _channel_to_int(pd.Series(colors[:, 2], index=dataframe.index)) + + # ---- Build hex color column ---- + dataframe[step.output_fields[0]] = [ + f"#{rv:02x}{gv:02x}{bv:02x}" for rv, gv, bv in zip(r_vals, g_vals, b_vals, strict=False) + ] + return dataframe + + +__all__ = [ + "_apply_preprocessing", + "_combine_rgb", + "_expand_preprocessing_fields", + "_project_to_simplex", +] diff --git a/simplexity/activations/visualization_configs.py b/simplexity/activations/visualization_configs.py new file mode 100644 index 00000000..b9417c19 --- /dev/null +++ b/simplexity/activations/visualization_configs.py @@ -0,0 +1,368 @@ +"""Structured configuration objects for activation visualizations.""" + +from __future__ import annotations + +from collections.abc import Mapping +from dataclasses import dataclass, field, fields, is_dataclass +from typing import Any, Literal, TypeVar, cast, get_args, get_origin, get_type_hints + +from omegaconf import DictConfig, OmegaConf + +from simplexity.activations.visualization.pattern_utils import is_valid_range +from simplexity.exceptions import ConfigValidationError +from simplexity.visualization.structured_configs import ( + DataConfig, + LayerConfig, + PlotConfig, + PlotLevelGuideConfig, + PlotSizeConfig, +) + +FieldSource = Literal[ + "projections", "scalars", "belief_states", "weights", "metadata", "scalar_pattern", "scalar_history" +] +ReducerType = Literal["argmax", "l2_norm"] + +T = TypeVar("T") + + +def _dict_to_visualization_dataclass[T](data: dict[str, Any], schema: type[T]) -> T: + """Convert a dict to a visualization dataclass instance. + + This is a simplified converter specifically for visualization config dataclasses. + It handles nested dataclasses, lists, dicts, and optional fields. + """ + if not is_dataclass(schema): + raise TypeError(f"Expected a dataclass type, got {schema}") + + try: + type_hints = get_type_hints(schema) + except (NameError, TypeError): + type_hints = {f.name: f.type for f in fields(schema)} + + kwargs: dict[str, Any] = {} + for f in fields(schema): + if f.name not in data: + continue + value = data[f.name] + field_type = type_hints.get(f.name, f.type) + kwargs[f.name] = _convert_field_value(value, field_type) + + return schema(**kwargs) + + +def _convert_field_value(value: Any, field_type: Any) -> Any: + """Convert a field value based on its type annotation.""" + if value is None: + return None + + origin = get_origin(field_type) + args = get_args(field_type) + + # Handle list[T] + if origin is list: + item_type = args[0] if args else Any + if is_dataclass(item_type) and isinstance(item_type, type): + return [ + item if isinstance(item, item_type) else _dict_to_visualization_dataclass(item, item_type) + for item in value + ] + return list(value) + + # Handle dict[K, V] + if origin is dict: + value_type = args[1] if len(args) > 1 else Any + if is_dataclass(value_type) and isinstance(value_type, type): + return { + k: v if isinstance(v, value_type) else _dict_to_visualization_dataclass(v, value_type) + for k, v in value.items() + } + return dict(value) + + # Handle Optional[T] / T | None (any Union containing None) + if type(None) in args: + non_none_args = [arg for arg in args if arg is not type(None)] + if non_none_args: + return _convert_field_value(value, non_none_args[0]) + return value + + # Handle nested dataclass + if is_dataclass(field_type) and isinstance(field_type, type): + if isinstance(value, field_type): + return value + if isinstance(value, dict): + return _dict_to_visualization_dataclass(value, field_type) + return value + + return value + + +@dataclass +class ScalarSeriesMapping: + """Describe how to unfold indexed scalar metrics into long-format (tidy) dataframe. + + This is used for plotting scalar values over an index dimension (e.g., cumulative + variance vs. component count). For adding scalar values as columns to existing data, + use wildcard mappings instead: `mappings: {rmse: {source: scalars, key: "layer_0_rmse"}}`. + """ + + key_template: str + index_field: str + value_field: str + index_values: list[int] | None = None + + def __post_init__(self) -> None: + if "{layer}" not in self.key_template: + raise ConfigValidationError("scalar_series.key_template must include '{layer}' placeholder") + if "{index}" not in self.key_template: + raise ConfigValidationError("scalar_series.key_template must include '{index}' placeholder") + if self.index_values is not None and not self.index_values: + raise ConfigValidationError("scalar_series.index_values must not be empty") + + +@dataclass +class ActivationVisualizationFieldRef: + """Map a DataFrame column to a specific activation artifact.""" + + source: FieldSource + key: str | None = None + component: int | str | None = None + reducer: ReducerType | None = None + group_as: str | list[str] | None = None + factor: int | str | None = None # For selecting factor in factored belief states (3D arrays) + _group_value: str | None = None # Internal: populated during key/factor pattern expansion + + def __post_init__(self) -> None: + if self.source == "projections" and not self.key: + raise ConfigValidationError("Projection field references must specify the `key` to read from.") + if self.source == "scalars" and not self.key: + raise ConfigValidationError("Scalar field references must specify the `key` to read from.") + if self.source == "scalar_pattern" and not self.key: + raise ConfigValidationError("Scalar pattern field references must specify the `key` to read from.") + if self.source == "scalar_history" and not self.key: + raise ConfigValidationError("Scalar history field references must specify the `key` to read from.") + if self.source == "metadata" and not self.key: + raise ConfigValidationError("Metadata field references must specify the `key` to read from.") + + if isinstance(self.component, str): + if self.component != "*" and not is_valid_range(self.component): + raise ConfigValidationError(f"Component pattern '{self.component}' invalid. Use '*' or 'N...M'") + if self.source not in ("projections", "belief_states"): + raise ConfigValidationError( + f"Component patterns only supported for projections/belief_states, not '{self.source}'" + ) + + # Validate key patterns for projections + if self.source == "projections" and self.key: + has_key_pattern = "*" in self.key or is_valid_range(self.key) + # Key patterns require group_as to name the resulting column(s) + if has_key_pattern and self.group_as is None: + raise ConfigValidationError( + f"Projection key pattern '{self.key}' requires `group_as` to name the expanded column(s)" + ) + + # Validate factor field (only for belief_states) + if self.factor is not None: + if self.source != "belief_states": + raise ConfigValidationError(f"`factor` is only supported for belief_states, not '{self.source}'") + if isinstance(self.factor, str): + has_factor_pattern = self.factor == "*" or is_valid_range(self.factor) + if has_factor_pattern and self.group_as is None: + raise ConfigValidationError( + f"Factor pattern '{self.factor}' requires `group_as` to name the expanded column(s)" + ) + + # Validate group_as + if self.group_as is not None and self.source not in ("projections", "belief_states"): + raise ConfigValidationError( + f"`group_as` is only supported for projections/belief_states, not '{self.source}'" + ) + + +@dataclass +class SamplingConfig: + """Configuration for sampling DataFrame rows to limit visualization size. + + When max_points is set, the DataFrame is sampled down to at most max_points + rows per facet group (e.g., per layer, factor, or data_type combination). + This ensures even distribution across subplots. + """ + + max_points: int | None = None + seed: int | None = None + + def __post_init__(self) -> None: + if self.max_points is not None and self.max_points <= 0: + raise ConfigValidationError("sampling.max_points must be a positive integer") + + +@dataclass +class CombinedMappingSection: + """A labeled section of field mappings for combining multiple data sources. + + Used to combine projections and ground truth belief states into a single + DataFrame with a label column for faceting (e.g., row faceting by data_type). + """ + + label: str + mappings: dict[str, ActivationVisualizationFieldRef] = field(default_factory=dict) + + def __post_init__(self) -> None: + if not self.mappings: + raise ConfigValidationError(f"Combined mapping section '{self.label}' must have at least one mapping.") + + +@dataclass +class ActivationVisualizationDataMapping: + """Describe how to build the pandas DataFrame prior to rendering.""" + + mappings: dict[str, ActivationVisualizationFieldRef] = field(default_factory=dict) + scalar_series: ScalarSeriesMapping | None = None + combined: list[CombinedMappingSection] | None = None # For combining multiple data sources + combine_as: str | None = None # Column name for section labels (e.g., "data_type") + sampling: SamplingConfig | None = None # Optional sampling to limit visualization size + + def __post_init__(self) -> None: + has_mappings = bool(self.mappings) + has_scalar_series = self.scalar_series is not None + has_combined = self.combined is not None and len(self.combined) > 0 + + if not has_mappings and not has_scalar_series and not has_combined: + raise ConfigValidationError( + "Activation visualization data mapping must include at least one of: " + "mappings, scalar_series, or combined sections." + ) + + if has_combined: + if has_mappings: + raise ConfigValidationError( + "Cannot use both 'mappings' and 'combined' in the same data_mapping. " + "Use 'combined' for multi-source visualizations." + ) + if self.combine_as is None: + raise ConfigValidationError( + "'combine_as' is required when using 'combined' sections to specify the label column name." + ) + + +@dataclass +class ActivationVisualizationPreprocessStep: + """Preprocessing directives applied after the base DataFrame is built.""" + + type: Literal["project_to_simplex", "combine_rgb"] + input_fields: list[str] + output_fields: list[str] + + def __post_init__(self) -> None: + # Check if any input fields contain patterns (wildcards or ranges) + has_pattern = any("*" in field or "..." in field for field in self.input_fields) + + if self.type == "project_to_simplex": + # Skip input validation if patterns present (will be validated at runtime) + if not has_pattern and len(self.input_fields) != 3: + raise ConfigValidationError("project_to_simplex requires exactly three input_fields.") + if len(self.output_fields) != 2: + raise ConfigValidationError("project_to_simplex requires exactly two output_fields.") + elif self.type == "combine_rgb": + # Skip input validation if patterns present (will be validated at runtime) + if not has_pattern and len(self.input_fields) < 3: + raise ConfigValidationError("combine_rgb requires at least three input_fields.") + if len(self.output_fields) != 1: + raise ConfigValidationError("combine_rgb requires exactly one output_field.") + + +@dataclass +class ActivationVisualizationControlsConfig: + """Optional control metadata to drive interactive front-ends.""" + + slider: str | None = None + dropdown: str | None = None + toggle: str | None = None + cumulative: bool = False + accumulate_steps: bool = False + + def __post_init__(self) -> None: + if self.accumulate_steps and self.slider == "step": + raise ConfigValidationError( + "controls.accumulate_steps cannot be used together with slider targeting 'step'." + ) + + +@dataclass +class ActivationVisualizationConfig: + """Full specification for an analysis-attached visualization.""" + + name: str + data_mapping: ActivationVisualizationDataMapping + backend: str | None = None + plot: PlotConfig | None = None + layer: LayerConfig | None = None + size: PlotSizeConfig | None = None + guides: PlotLevelGuideConfig | None = None + preprocessing: list[ActivationVisualizationPreprocessStep] = field(default_factory=list) + controls: ActivationVisualizationControlsConfig | None = None + + def resolve_plot_config(self, default_backend: str) -> PlotConfig: + """Return a PlotConfig constructed from either `plot` or shorthand fields.""" + if self.plot is not None: + plot_cfg = self.plot + elif self.layer is not None: + plot_cfg = PlotConfig( + backend=self.backend or default_backend, + layers=[self.layer], + size=self.size or PlotSizeConfig(), + guides=self.guides or PlotLevelGuideConfig(), + ) + else: + raise ConfigValidationError( + f"Visualization '{self.name}' must specify either a PlotConfig (`plot`) or a single `layer`." + ) + + if plot_cfg.data is None: + plot_cfg.data = DataConfig(source="main") + else: + plot_cfg.data.source = plot_cfg.data.source or "main" + plot_cfg.backend = self.backend or plot_cfg.backend + if self.size is not None: + plot_cfg.size = self.size + if self.guides is not None: + plot_cfg.guides = self.guides + if any(step.type == "combine_rgb" for step in self.preprocessing) and plot_cfg.backend != "plotly": + raise ConfigValidationError("combine_rgb preprocessing requires backend='plotly'") + return plot_cfg + + +def _to_dict(cfg: Mapping[str, Any] | DictConfig) -> dict[str, Any]: + """Convert OmegaConf or Mapping to a plain dict.""" + if isinstance(cfg, DictConfig): + container = OmegaConf.to_container(cfg, resolve=False) + return cast(dict[str, Any], container) if isinstance(container, dict) else {} + if isinstance(cfg, dict): + return cfg + return dict(cfg) + + +def build_activation_visualization_config(raw_cfg: Mapping[str, Any]) -> ActivationVisualizationConfig: + """Convert a dictionary/OmegaConf config into an ActivationVisualizationConfig dataclass.""" + if isinstance(raw_cfg, ActivationVisualizationConfig): + return raw_cfg + + config_dict = _to_dict(raw_cfg) + + if config_dict.get("data_mapping") is None: + raise ConfigValidationError("Visualization config must include a data_mapping block.") + + return _dict_to_visualization_dataclass(config_dict, ActivationVisualizationConfig) + + +__all__ = [ + "ActivationVisualizationConfig", + "ActivationVisualizationControlsConfig", + "ActivationVisualizationDataMapping", + "ActivationVisualizationFieldRef", + "ActivationVisualizationPreprocessStep", + "CombinedMappingSection", + "SamplingConfig", + "ScalarSeriesMapping", + "build_activation_visualization_config", +] diff --git a/simplexity/activations/visualization_persistence.py b/simplexity/activations/visualization_persistence.py new file mode 100644 index 00000000..27518184 --- /dev/null +++ b/simplexity/activations/visualization_persistence.py @@ -0,0 +1,93 @@ +"""Persistence helpers for activation visualization payloads.""" + +from __future__ import annotations + +from collections.abc import Mapping +from pathlib import Path + +import pandas as pd +import plotly.graph_objects as go + +from simplexity.activations.activation_visualizations import ( + ActivationVisualizationPayload, + render_visualization, +) +from simplexity.visualization.history import ( + history_paths, + load_history_dataframe, + plot_config_signature, + save_history_dataframe, +) + + +def save_visualization_payloads( + visualizations: Mapping[str, ActivationVisualizationPayload], + root: Path, + step: int, +) -> Mapping[str, str]: + """Persist visualization payloads, accumulating history for slider controls.""" + if not visualizations: + return {} + + figure_names_to_paths = {} + step_dir = root / f"step_{step:05d}" + step_dir.mkdir(parents=True, exist_ok=True) + + for key, payload in visualizations.items(): + safe_name = key.replace("/", "_") + figure = _maybe_accumulate_history(payload, root, safe_name, step) + analysis_dir = step_dir / payload.analysis + analysis_dir.mkdir(parents=True, exist_ok=True) + output_path = analysis_dir / f"{payload.name}.html" + if isinstance(figure, go.Figure): + figure.write_html(str(output_path)) + else: + figure.save(str(output_path), format="html") + + figure_names_to_paths[key] = str(output_path) + + return figure_names_to_paths + + +def _maybe_accumulate_history( + payload: ActivationVisualizationPayload, + root: Path, + safe_name: str, + step: int, +): + if not _should_accumulate_steps(payload): + return payload.figure + + data_path, meta_path = history_paths(root, safe_name) + signature = plot_config_signature(payload.plot_config) + existing_df = load_history_dataframe(data_path, meta_path, expected_signature=signature) + new_rows = payload.dataframe.copy(deep=True) + if "step" in new_rows.columns: + new_rows["sequence_step"] = new_rows["step"] + new_rows["step"] = step + combined_df = pd.concat([existing_df, new_rows], ignore_index=True) if not existing_df.empty else new_rows + slider = payload.controls.slider if payload.controls else None + if slider and slider.field in combined_df.columns: + slider.options = list(pd.unique(combined_df[slider.field])) + save_history_dataframe( + combined_df, + data_path, + meta_path, + signature=signature, + analysis=payload.analysis, + name=payload.name, + backend=payload.backend, + ) + return render_visualization(payload.plot_config, combined_df, payload.controls) + + +def _should_accumulate_steps(payload: ActivationVisualizationPayload) -> bool: + if payload.controls is None: + return False + if getattr(payload.controls, "accumulate_steps", False): + return True + slider = payload.controls.slider + return slider is not None and slider.field == "step" + + +__all__ = ["save_visualization_payloads"] diff --git a/simplexity/analysis/layerwise_analysis.py b/simplexity/analysis/layerwise_analysis.py index 637531cf..e76e1c6d 100644 --- a/simplexity/analysis/layerwise_analysis.py +++ b/simplexity/analysis/layerwise_analysis.py @@ -119,6 +119,7 @@ def __init__( last_token_only: bool = False, concat_layers: bool = False, use_probs_as_weights: bool = True, + skip_first_token: bool = False, analysis_kwargs: Mapping[str, Any] | None = None, ) -> None: if analysis_type not in ANALYSIS_REGISTRY: @@ -130,6 +131,7 @@ def __init__( self._last_token_only = last_token_only self._concat_layers = concat_layers self._use_probs_as_weights = use_probs_as_weights + self._skip_first_token = skip_first_token @property def last_token_only(self) -> bool: @@ -151,6 +153,11 @@ def requires_belief_states(self) -> bool: """Whether the analysis needs belief state targets.""" return self._requires_belief_states + @property + def skip_first_token(self) -> bool: + """Whether to skip the first token (useful for off-manifold initial states).""" + return self._skip_first_token + def analyze( self, activations: Mapping[str, jax.Array], diff --git a/simplexity/analysis/linear_regression.py b/simplexity/analysis/linear_regression.py index 1e3bcf3e..1ce5c086 100644 --- a/simplexity/analysis/linear_regression.py +++ b/simplexity/analysis/linear_regression.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Mapping, Sequence +from collections.abc import Callable, Mapping, Sequence from typing import Any import jax @@ -136,33 +136,45 @@ def linear_regression_svd( return scalars, projections -def layer_linear_regression( +def _apply_layer_regression( + regression_fn: Callable[..., tuple[Mapping[str, float], Mapping[str, jax.Array]]], layer_activations: jax.Array, weights: jax.Array, - belief_states: jax.Array | tuple[jax.Array, ...] | None, - to_factors: bool = False, + belief_states: jax.Array | tuple[jax.Array, ...], + to_factors: bool, **kwargs: Any, ) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: - """Layer-wise regression helper that wraps :func:`linear_regression`.""" - if belief_states is None: - raise ValueError("linear_regression requires belief_states") - + """Apply a regression function, optionally per-factor.""" if to_factors: - scalars, projections = {}, {} + scalars: dict[str, float] = {} + projections: dict[str, jax.Array] = {} if not isinstance(belief_states, tuple): raise ValueError("belief_states must be a tuple when to_factors is True") for factor_idx, factor in enumerate(belief_states): if not isinstance(factor, jax.Array): raise ValueError("Each factor in belief_states must be a jax.Array") - factor_scalars, factor_projections = linear_regression(layer_activations, factor, weights, **kwargs) + factor_scalars, factor_projections = regression_fn(layer_activations, factor, weights, **kwargs) for key, value in factor_scalars.items(): scalars[f"factor_{factor_idx}/{key}"] = value for key, value in factor_projections.items(): projections[f"factor_{factor_idx}/{key}"] = value return scalars, projections else: - belief_states = jnp.concatenate(belief_states, axis=-1) if isinstance(belief_states, tuple) else belief_states - return linear_regression(layer_activations, belief_states, weights, **kwargs) + targets = jnp.concatenate(belief_states, axis=-1) if isinstance(belief_states, tuple) else belief_states + return regression_fn(layer_activations, targets, weights, **kwargs) + + +def layer_linear_regression( + layer_activations: jax.Array, + weights: jax.Array, + belief_states: jax.Array | tuple[jax.Array, ...] | None, + to_factors: bool = False, + **kwargs: Any, +) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: + """Layer-wise regression helper that wraps :func:`linear_regression`.""" + if belief_states is None: + raise ValueError("linear_regression requires belief_states") + return _apply_layer_regression(linear_regression, layer_activations, weights, belief_states, to_factors, **kwargs) def layer_linear_regression_svd( @@ -175,20 +187,6 @@ def layer_linear_regression_svd( """Layer-wise regression helper that wraps :func:`linear_regression_svd`.""" if belief_states is None: raise ValueError("linear_regression_svd requires belief_states") - - if to_factors: - scalars, projections = {}, {} - if not isinstance(belief_states, tuple): - raise ValueError("belief_states must be a tuple when to_factors is True") - for factor_idx, factor in enumerate(belief_states): - if not isinstance(factor, jax.Array): - raise ValueError("Each factor in belief_states must be a jax.Array") - factor_scalars, factor_projections = linear_regression_svd(layer_activations, factor, weights, **kwargs) - for key, value in factor_scalars.items(): - scalars[f"factor_{factor_idx}/{key}"] = value - for key, value in factor_projections.items(): - projections[f"factor_{factor_idx}/{key}"] = value - return scalars, projections - else: - belief_states = jnp.concatenate(belief_states, axis=-1) if isinstance(belief_states, tuple) else belief_states - return linear_regression_svd(layer_activations, belief_states, weights, **kwargs) + return _apply_layer_regression( + linear_regression_svd, layer_activations, weights, belief_states, to_factors, **kwargs + ) diff --git a/simplexity/generative_processes/generator.py b/simplexity/generative_processes/generator.py index d8ec2c34..15c0ddf1 100644 --- a/simplexity/generative_processes/generator.py +++ b/simplexity/generative_processes/generator.py @@ -9,6 +9,8 @@ # (code quality, style, undefined names, etc.) to run normally while bypassing # the problematic imports checker that would crash during AST traversal. +from typing import Any + import equinox as eqx import jax import jax.numpy as jnp @@ -18,14 +20,14 @@ @eqx.filter_jit def generate_data_batch( - gen_states: jax.Array, + gen_states: jax.Array | tuple[jax.Array, ...], data_generator: GenerativeProcess, batch_size: int, sequence_len: int, key: jax.Array, bos_token: int | None = None, eos_token: int | None = None, -) -> tuple[jax.Array, jax.Array, jax.Array]: +) -> tuple[jax.Array | tuple[jax.Array, ...], jax.Array, jax.Array]: """Generate a batch of data without tracking intermediate beliefs.""" batch_keys = jax.random.split(key, batch_size) gen_states, tokens = data_generator.generate(gen_states, batch_keys, sequence_len, False) @@ -42,7 +44,7 @@ def generate_data_batch( @eqx.filter_jit def generate_data_batch_with_full_history( - gen_states: jax.Array, + gen_states: jax.Array | tuple[jax.Array, ...], data_generator: GenerativeProcess, batch_size: int, sequence_len: int, @@ -73,6 +75,19 @@ def generate_data_batch_with_full_history( labels = tokens[:, 1:] prefix_probs = prefix_probs[:, : inputs.shape[1]] + if bos_token is None: + # Drop first belief state since it's the initial state before any token + if isinstance(belief_states, tuple): + belief_states = tuple(b[:, 1:, ...] for b in belief_states) + else: + belief_states = belief_states[:, 1:, ...] + + input_len = inputs.shape[1] + if isinstance(belief_states, tuple): + belief_states = tuple(b[:, :input_len, ...] for b in belief_states) + else: + belief_states = belief_states[:, :input_len, ...] + result = { "belief_states": belief_states, "prefix_probabilities": prefix_probs, @@ -85,11 +100,11 @@ def generate_data_batch_with_full_history( def _compute_prefix_probabilities( data_generator: GenerativeProcess, - initial_states: jax.Array, + initial_states: jax.Array | tuple[jax.Array, ...], tokens: jax.Array, ) -> jax.Array: - def run_sequence(state: jax.Array, seq: jax.Array) -> jax.Array: - def step(carry_state: jax.Array, token: jax.Array) -> tuple[jax.Array, jax.Array]: + def run_sequence(state: jax.Array | tuple[jax.Array, ...], seq: jax.Array) -> jax.Array: + def step(carry_state: Any, token: jax.Array) -> tuple[Any, jax.Array]: obs_probs = data_generator.observation_probability_distribution(carry_state) token_prob = obs_probs[token] new_state = data_generator.transition_states(carry_state, token) diff --git a/simplexity/generative_processes/torch_generator.py b/simplexity/generative_processes/torch_generator.py index 53a04980..00f4211a 100644 --- a/simplexity/generative_processes/torch_generator.py +++ b/simplexity/generative_processes/torch_generator.py @@ -23,7 +23,7 @@ def generate_data_batch( - gen_states: jax.Array, + gen_states: jax.Array | tuple[jax.Array, ...], data_generator: GenerativeProcess, batch_size: int, sequence_len: int, @@ -31,7 +31,7 @@ def generate_data_batch( bos_token: int | None = None, eos_token: int | None = None, device: str | torch.device | None = None, -) -> tuple[jax.Array, torch.Tensor, torch.Tensor]: +) -> tuple[jax.Array | tuple[jax.Array, ...], torch.Tensor, torch.Tensor]: """Generate a batch of data. Args: @@ -60,7 +60,7 @@ def generate_data_batch( def generate_data_batch_with_full_history( - gen_states: jax.Array, + gen_states: jax.Array | tuple[jax.Array, ...], data_generator: GenerativeProcess, batch_size: int, sequence_len: int, diff --git a/simplexity/optimization/lr_schedulers.py b/simplexity/optimization/lr_schedulers.py new file mode 100644 index 00000000..1168aff1 --- /dev/null +++ b/simplexity/optimization/lr_schedulers.py @@ -0,0 +1,110 @@ +"""Custom learning rate schedulers with windowed averaging.""" + +from collections import deque +from typing import Any, Literal + +from torch.optim import Optimizer +from torch.optim.lr_scheduler import ReduceLROnPlateau + + +class WindowedReduceLROnPlateau(ReduceLROnPlateau): + """ReduceLROnPlateau with windowed average loss comparison. + + Instead of comparing individual loss values, this scheduler compares the + average loss over a sliding window. This smooths out noise in batch losses + and makes the patience mechanism more effective. + + Losses are accumulated every time `step()` is called. The underlying + ReduceLROnPlateau is only updated every `update_every` calls (once the + window has filled), using the average of the last `window_size` losses. + + Args: + optimizer: Wrapped optimizer. + window_size: Number of recent losses to average. Default: 10. + update_every: Only update the scheduler every N steps. Default: 1. + mode: One of "min" or "max". Default: "min". + factor: Factor by which the learning rate will be reduced. Default: 0.1. + patience: Number of updates with no improvement after which LR is reduced. Default: 10. + threshold: Threshold for measuring the new optimum. Default: 1e-4. + threshold_mode: One of "rel" or "abs". Default: "rel". + cooldown: Number of updates to wait before resuming normal operation. Default: 0. + min_lr: Minimum learning rate. Default: 0. + eps: Minimal decay applied to lr. Default: 1e-8. + """ + + def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments + self, + optimizer: Optimizer, + window_size: int = 10, + update_every: int = 1, + mode: Literal["min", "max"] = "min", + factor: float = 0.1, + patience: int = 10, + threshold: float = 1e-4, + threshold_mode: Literal["rel", "abs"] = "rel", + cooldown: int = 0, + min_lr: float | list[float] = 0, + eps: float = 1e-8, + ): + super().__init__( + optimizer, + mode=mode, + factor=factor, + patience=patience, + threshold=threshold, + threshold_mode=threshold_mode, + cooldown=cooldown, + min_lr=min_lr, + eps=eps, + ) + self.window_size = window_size + self.update_every = update_every + self._loss_window: deque[float] = deque(maxlen=window_size) + self._step_count = 0 + + def step(self, metrics: float, epoch: int | None = None) -> None: # type: ignore[override] + """Record a loss value and potentially update LR based on windowed average. + + Losses are accumulated every call. The underlying scheduler is only + updated every `update_every` calls once the window is full. + + Args: + metrics: Current loss value to add to the window. + epoch: Optional epoch number (passed to parent). + """ + current = float(metrics) + self._loss_window.append(current) + self._step_count += 1 + + if len(self._loss_window) < self.window_size: + return + + if self._step_count % self.update_every != 0: + return + + avg_loss = sum(self._loss_window) / len(self._loss_window) + super().step(avg_loss, epoch) + + def get_window_average(self) -> float | None: + """Return the current window average, or None if window not full.""" + if len(self._loss_window) < self.window_size: + return None + return sum(self._loss_window) / len(self._loss_window) + + def state_dict(self) -> dict[str, Any]: + """Return scheduler state including the loss window.""" + state = super().state_dict() + state["window_size"] = self.window_size + state["update_every"] = self.update_every + state["loss_window"] = list(self._loss_window) + state["step_count"] = self._step_count + return state + + def load_state_dict(self, state_dict: dict[str, Any]) -> None: + """Load scheduler state including the loss window.""" + self.window_size = state_dict.pop("window_size", self.window_size) + self.update_every = state_dict.pop("update_every", self.update_every) + loss_window = state_dict.pop("loss_window", []) + self._step_count = state_dict.pop("step_count", 0) + self._loss_window = deque(loss_window, maxlen=self.window_size) + super().load_state_dict(state_dict) diff --git a/simplexity/run_management/components.py b/simplexity/run_management/components.py index 4fb34192..1b32c169 100644 --- a/simplexity/run_management/components.py +++ b/simplexity/run_management/components.py @@ -28,6 +28,7 @@ class Components: persisters: dict[str, ModelPersister] | None = None predictive_models: dict[str, Any] | None = None # TODO: improve typing optimizers: dict[str, Any] | None = None # TODO: improve typing + lr_schedulers: dict[str, Any] | None = None # TODO: improve typing metric_trackers: dict[str, MetricTracker] | None = None activation_trackers: dict[str, ActivationTracker] | None = None @@ -51,6 +52,10 @@ def get_optimizer(self, key: str | None = None) -> Any | None: """Get the optimizer.""" return self._get_instance_by_key(self.optimizers, key, "optimizer") + def get_learning_rate_scheduler(self, key: str | None = None) -> Any | None: + """Get the learning rate scheduler.""" + return self._get_instance_by_key(self.lr_schedulers, key, "learning rate scheduler") + def get_metric_tracker(self, key: str | None = None) -> MetricTracker | None: """Get the metric tracker.""" return self._get_instance_by_key(self.metric_trackers, key, "metric tracker") diff --git a/simplexity/run_management/run_management.py b/simplexity/run_management/run_management.py index ed75a730..b59fab73 100644 --- a/simplexity/run_management/run_management.py +++ b/simplexity/run_management/run_management.py @@ -54,6 +54,10 @@ resolve_generative_process_config, validate_generative_process_config, ) +from simplexity.structured_configs.learning_rate_scheduler import ( + is_lr_scheduler_target, + validate_lr_scheduler_config, +) from simplexity.structured_configs.logging import ( is_logger_target, update_logging_instance_config, @@ -504,6 +508,37 @@ def _setup_optimizers( return None +def _instantiate_lr_scheduler(cfg: DictConfig, instance_key: str, optimizer: Any | None) -> Any: + """Setup the learning rate scheduler.""" + instance_config = OmegaConf.select(cfg, instance_key, throw_on_missing=True) + if instance_config: + if optimizer is None: + SIMPLEXITY_LOGGER.warning("No optimizer provided, LR scheduler will be skipped") + return None + lr_scheduler = hydra.utils.instantiate(instance_config, optimizer=optimizer) + SIMPLEXITY_LOGGER.info("[lr_scheduler] instantiated LR scheduler: %s", lr_scheduler.__class__.__name__) + return lr_scheduler + raise KeyError + + +def _setup_lr_schedulers( + cfg: DictConfig, instance_keys: list[str], optimizers: dict[str, Any] | None +) -> dict[str, Any] | None: + """Setup the learning rate schedulers.""" + instance_keys = filter_instance_keys( + cfg, + instance_keys, + is_lr_scheduler_target, + validate_fn=validate_lr_scheduler_config, + component_name="lr_scheduler", + ) + if instance_keys: + optimizer = _get_optimizer(optimizers) + return {instance_key: _instantiate_lr_scheduler(cfg, instance_key, optimizer) for instance_key in instance_keys} + SIMPLEXITY_LOGGER.info("[lr_scheduler] no LR scheduler configs found") + return None + + def _instantiate_metric_tracker( cfg: DictConfig, instance_key: str, predictive_model: Any | None, optimizer: Any | None ) -> Any: @@ -548,13 +583,24 @@ def _instantiate_activation_tracker(cfg: DictConfig, instance_key: str) -> Any: if instance_config: tracker_cfg = OmegaConf.create(OmegaConf.to_container(instance_config, resolve=False)) converted_analyses: dict[str, DictConfig] = {} + converted_visualizations: dict[str, list[Any]] = {} analyses_cfg = instance_config.get("analyses") or {} for key, analysis_cfg in analyses_cfg.items(): name_override = analysis_cfg.get("name") + analysis_name = name_override or key cfg_to_instantiate = analysis_cfg.instance - converted_analyses[name_override or key] = cfg_to_instantiate + converted_analyses[analysis_name] = cfg_to_instantiate + + # Extract visualizations for this analysis (if present) + viz_cfg = analysis_cfg.get("visualizations") + if viz_cfg is not None: + viz_container = OmegaConf.to_container(viz_cfg, resolve=False) + assert isinstance(viz_container, list) + converted_visualizations[analysis_name] = viz_container tracker_cfg.analyses = converted_analyses + if converted_visualizations: + tracker_cfg.visualizations = converted_visualizations tracker = hydra.utils.instantiate(tracker_cfg) SIMPLEXITY_LOGGER.info("[activation tracker] instantiated activation tracker: %s", tracker.__class__.__name__) return tracker @@ -608,6 +654,7 @@ def _setup(cfg: DictConfig, strict: bool, verbose: bool) -> Components: components.persisters = _setup_persisters(cfg, instance_keys) components.predictive_models = _setup_predictive_models(cfg, instance_keys, components.persisters) components.optimizers = _setup_optimizers(cfg, instance_keys, components.predictive_models) + components.lr_schedulers = _setup_lr_schedulers(cfg, instance_keys, components.optimizers) components.metric_trackers = _setup_metric_trackers( cfg, instance_keys, components.predictive_models, components.optimizers ) diff --git a/simplexity/structured_configs/learning_rate_scheduler.py b/simplexity/structured_configs/learning_rate_scheduler.py new file mode 100644 index 00000000..01a9bb68 --- /dev/null +++ b/simplexity/structured_configs/learning_rate_scheduler.py @@ -0,0 +1,138 @@ +"""Learning rate scheduler configuration dataclasses.""" + +from dataclasses import dataclass + +from omegaconf import DictConfig + +from simplexity.exceptions import ConfigValidationError +from simplexity.structured_configs.instance import InstanceConfig, validate_instance_config +from simplexity.structured_configs.validation import ( + validate_non_negative_float, + validate_non_negative_int, + validate_nonempty_str, + validate_positive_float, + validate_positive_int, +) + + +@dataclass +class ReduceLROnPlateauInstanceConfig(InstanceConfig): + """Configuration for PyTorch ReduceLROnPlateau scheduler.""" + + mode: str = "min" + factor: float = 0.1 + patience: int = 10 + threshold: float = 1e-4 + threshold_mode: str = "rel" + cooldown: int = 0 + min_lr: float = 0.0 + eps: float = 1e-8 + + +@dataclass +class WindowedReduceLROnPlateauInstanceConfig(ReduceLROnPlateauInstanceConfig): + """Configuration for WindowedReduceLROnPlateau scheduler. + + This scheduler compares the average loss over a sliding window instead of + individual loss values, making the patience mechanism more effective for + noisy batch losses. + + Inherits all fields from ReduceLROnPlateauInstanceConfig and adds: + - window_size: Size of the sliding window for loss averaging + - update_every: Frequency of scheduler updates (steps between updates) + """ + + window_size: int = 10 + update_every: int = 1 + + +def is_reduce_lr_on_plateau_config(cfg: DictConfig) -> bool: + """Check if the configuration is a ReduceLROnPlateau scheduler configuration.""" + target = cfg.get("_target_", None) + if isinstance(target, str): + return target == "torch.optim.lr_scheduler.ReduceLROnPlateau" + return False + + +def validate_reduce_lr_on_plateau_instance_config(cfg: DictConfig) -> None: + """Validate a ReduceLROnPlateauInstanceConfig.""" + validate_instance_config(cfg) + mode = cfg.get("mode") + factor = cfg.get("factor") + patience = cfg.get("patience") + threshold = cfg.get("threshold") + cooldown = cfg.get("cooldown") + min_lr = cfg.get("min_lr") + eps = cfg.get("eps") + + if mode is not None and mode not in ("min", "max"): + raise ConfigValidationError(f"ReduceLROnPlateauInstanceConfig.mode must be 'min' or 'max', got {mode}") + validate_positive_float(factor, "ReduceLROnPlateauInstanceConfig.factor", is_none_allowed=True) + validate_non_negative_int(patience, "ReduceLROnPlateauInstanceConfig.patience", is_none_allowed=True) + validate_non_negative_float(threshold, "ReduceLROnPlateauInstanceConfig.threshold", is_none_allowed=True) + validate_non_negative_int(cooldown, "ReduceLROnPlateauInstanceConfig.cooldown", is_none_allowed=True) + validate_non_negative_float(min_lr, "ReduceLROnPlateauInstanceConfig.min_lr", is_none_allowed=True) + validate_non_negative_float(eps, "ReduceLROnPlateauInstanceConfig.eps", is_none_allowed=True) + + +def is_windowed_reduce_lr_on_plateau_config(cfg: DictConfig) -> bool: + """Check if the configuration is a WindowedReduceLROnPlateau scheduler configuration.""" + target = cfg.get("_target_", None) + if isinstance(target, str): + return target == "simplexity.lr_schedulers.WindowedReduceLROnPlateau" + return False + + +def validate_windowed_reduce_lr_on_plateau_instance_config(cfg: DictConfig) -> None: + """Validate a WindowedReduceLROnPlateauInstanceConfig.""" + validate_reduce_lr_on_plateau_instance_config(cfg) + window_size = cfg.get("window_size") + update_every = cfg.get("update_every") + + validate_positive_int(window_size, "WindowedReduceLROnPlateauInstanceConfig.window_size", is_none_allowed=True) + validate_positive_int(update_every, "WindowedReduceLROnPlateauInstanceConfig.update_every", is_none_allowed=True) + + +@dataclass +class LearningRateSchedulerConfig: + """Base configuration for learning rate schedulers.""" + + instance: InstanceConfig + name: str | None = None + + +def is_lr_scheduler_target(target: str) -> bool: + """Check if the target is a supported learning rate scheduler target.""" + return target in ( + "torch.optim.lr_scheduler.ReduceLROnPlateau", + "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + ) + + +def is_lr_scheduler_config(cfg: DictConfig) -> bool: + """Check if the configuration is a plateau-based learning rate scheduler config.""" + return is_reduce_lr_on_plateau_config(cfg) or is_windowed_reduce_lr_on_plateau_config(cfg) + + +def validate_lr_scheduler_config(cfg: DictConfig) -> None: + """Validate a LearningRateSchedulerConfig. + + Args: + cfg: A DictConfig with instance and optional name fields (from Hydra). + """ + instance = cfg.get("instance") + if not isinstance(instance, DictConfig): + raise ConfigValidationError("LearningRateSchedulerConfig.instance must be a DictConfig") + name = cfg.get("name") + + if is_reduce_lr_on_plateau_config(instance): + validate_reduce_lr_on_plateau_instance_config(instance) + elif is_windowed_reduce_lr_on_plateau_config(instance): + validate_windowed_reduce_lr_on_plateau_instance_config(instance) + else: + validate_instance_config(instance) + if not is_lr_scheduler_config(instance): + raise ConfigValidationError( + "LearningRateSchedulerConfig.instance must be ReduceLROnPlateau or WindowedReduceLROnPlateau" + ) + validate_nonempty_str(name, "LearningRateSchedulerConfig.name", is_none_allowed=True) diff --git a/simplexity/structured_configs/optimizer.py b/simplexity/structured_configs/optimizer.py index 20f8b6a6..4aae1277 100644 --- a/simplexity/structured_configs/optimizer.py +++ b/simplexity/structured_configs/optimizer.py @@ -76,6 +76,8 @@ class OptimizerConfig: def is_optimizer_target(target: str) -> bool: """Check if the target is an optimizer target.""" + if target.startswith("torch.optim.lr_scheduler."): + return False return target.startswith("torch.optim.") or target.startswith("optax.") @@ -91,7 +93,7 @@ def is_pytorch_optimizer_config(cfg: DictConfig) -> bool: """Check if the configuration is a PyTorch optimizer configuration.""" target = cfg.get("_target_", None) if isinstance(target, str): - return target.startswith("torch.optim.") + return is_optimizer_target(target) and target.startswith("torch.optim.") return False diff --git a/simplexity/utils/analysis_utils.py b/simplexity/utils/analysis_utils.py index df78c860..22879124 100644 --- a/simplexity/utils/analysis_utils.py +++ b/simplexity/utils/analysis_utils.py @@ -179,6 +179,7 @@ def build_deduplicated_dataset( probs: jax.Array, activations_by_layer: dict[str, jax.Array], select_last_token: bool = False, + skip_first_token: bool = False, ) -> DeduplicatedDataset: """Deduplicate everything by prefix.""" if select_last_token: @@ -187,6 +188,7 @@ def build_deduplicated_dataset( beliefs, probs, activations_by_layer, + skip_first_token=skip_first_token, ) else: return build_prefix_dataset( @@ -194,6 +196,7 @@ def build_deduplicated_dataset( beliefs, probs, activations_by_layer, + skip_first_token=skip_first_token, ) @@ -202,8 +205,17 @@ def build_prefix_dataset( beliefs: jax.Array | tuple[jax.Array, ...], probs: jax.Array, activations_by_layer: dict[str, jax.Array], + skip_first_token: bool = False, ) -> DeduplicatedDataset: """Deduplicate everything by prefix.""" + if skip_first_token: + inputs = inputs[:, 1:] + if isinstance(beliefs, tuple): + beliefs = tuple(b[:, 1:, ...] for b in beliefs) + else: + beliefs = beliefs[:, 1:, ...] + probs = probs[:, 1:] + activations_by_layer = {name: acts[:, 1:, ...] for name, acts in activations_by_layer.items()} prefix_to_indices = make_prefix_groups(inputs) dedup_beliefs, prefixes = ( @@ -236,8 +248,17 @@ def build_last_token_dataset( beliefs: jax.Array | tuple[jax.Array, ...], probs: jax.Array, activations_by_layer: dict[str, jax.Array], + skip_first_token: bool = False, ) -> DeduplicatedDataset: """Deduplicate everything by full sequence.""" + if skip_first_token: + inputs = inputs[:, 1:] + if isinstance(beliefs, tuple): + beliefs = tuple(b[:, 1:, ...] for b in beliefs) + else: + beliefs = beliefs[:, 1:, ...] + probs = probs[:, 1:] + activations_by_layer = {name: acts[:, 1:, ...] for name, acts in activations_by_layer.items()} if isinstance(beliefs, tuple): beliefs = tuple(b[:, -1, :] for b in beliefs) else: diff --git a/simplexity/utils/factoring_utils.py b/simplexity/utils/factoring_utils.py index 6da30f8a..59c6a227 100644 --- a/simplexity/utils/factoring_utils.py +++ b/simplexity/utils/factoring_utils.py @@ -36,14 +36,18 @@ def compute_obs_dist_for_variant( if component_type == "hmm": # HMM: normalize by sum obs_state = state @ transition_matrix # [V, S] - return jnp.sum(obs_state, axis=1) # [V] + probs = jnp.sum(obs_state, axis=1) # [V] else: # ghmm # GHMM: normalize by eigenvector if normalizing_eigenvector is None: raise ValueError("GHMM requires normalizing_eigenvector") numer = state @ transition_matrix @ normalizing_eigenvector # [V] denom = jnp.sum(state * normalizing_eigenvector) # scalar - return numer / denom + probs = numer / denom + + # Clamp to non-negative to handle numerical precision issues + # (small negative values can arise from GHMM eigenvector computations) + return jnp.maximum(probs, 0.0) def transition_with_obs( diff --git a/simplexity/visualization/altair_renderer.py b/simplexity/visualization/altair_renderer.py new file mode 100644 index 00000000..0834d632 --- /dev/null +++ b/simplexity/visualization/altair_renderer.py @@ -0,0 +1,380 @@ +"""Altair renderer for declarative visualization configs.""" + +from __future__ import annotations + +import logging +from collections.abc import Mapping +from typing import Any + +try: + import altair as alt # type: ignore [import-not-found] +except ImportError as exc: # pragma: no cover - dependency missing only in unsupported envs + raise ImportError("Altair is required for visualization rendering. Install `altair` to continue.") from exc + +import pandas as pd + +from simplexity.exceptions import ConfigValidationError +from simplexity.visualization.data_pipeline import ( + build_plot_level_dataframe, + resolve_layer_dataframe, +) +from simplexity.visualization.data_registry import DataRegistry +from simplexity.visualization.structured_configs import ( + AestheticsConfig, + AxisConfig, + ChannelAestheticsConfig, + FacetConfig, + GeometryConfig, + LayerConfig, + LegendConfig, + PlotConfig, + PlotLevelGuideConfig, + PlotSizeConfig, + ScaleConfig, + SelectionConfig, +) + +LOGGER = logging.getLogger(__name__) + +_CHANNEL_CLASS_MAP = { + "x": "X", + "y": "Y", + "color": "Color", + "size": "Size", + "shape": "Shape", + "opacity": "Opacity", + "row": "Row", + "column": "Column", + "detail": "Detail", +} + + +def build_altair_chart( + plot_cfg: PlotConfig, + data_registry: DataRegistry | Mapping[str, pd.DataFrame], + controls: Any | None = None, +): + """Render a PlotConfig into an Altair Chart.""" + if not plot_cfg.layers: + raise ConfigValidationError("PlotConfig.layers must include at least one layer for Altair rendering.") + + plot_df = build_plot_level_dataframe(plot_cfg.data, plot_cfg.transforms, data_registry) + + layer_charts = [ + _build_layer_chart(layer, resolve_layer_dataframe(layer, plot_df, data_registry)) for layer in plot_cfg.layers + ] + layer_charts = _apply_accumulation_detail(layer_charts, plot_cfg.layers, plot_cfg, plot_df, controls) + + chart = layer_charts[0] if len(layer_charts) == 1 else alt.layer(*layer_charts) + + if plot_cfg.selections: + chart = chart.add_params(*[_build_selection_param(sel) for sel in plot_cfg.selections]) + + # Apply size before faceting (FacetChart doesn't support width/height properties) + chart = _apply_chart_size(chart, plot_cfg.size) + + if plot_cfg.facet: + chart = _apply_facet(chart, plot_cfg.facet) + + chart = _apply_plot_level_properties(chart, plot_cfg.guides, plot_cfg.size, plot_cfg.background) + chart = _apply_chart_controls(chart, controls) + chart = _apply_default_legend_interactivity(chart, plot_cfg.layers) + + return chart + + +def _build_layer_chart(layer: LayerConfig, df: pd.DataFrame): + chart = alt.Chart(df) + chart = _apply_geometry(chart, layer.geometry) + encoding_kwargs = _encode_aesthetics(layer.aesthetics) + if encoding_kwargs: + chart = chart.encode(**encoding_kwargs) + if layer.selections: + chart = chart.add_params(*[_build_selection_param(sel) for sel in layer.selections]) + return chart + + +def _apply_geometry(chart, geometry: GeometryConfig): + mark_name = f"mark_{geometry.type}" + if not hasattr(chart, mark_name): + raise ConfigValidationError(f"Altair chart does not support geometry type '{geometry.type}'") + mark_fn = getattr(chart, mark_name) + return mark_fn(**(geometry.props or {})) + + +def _encode_aesthetics(aesthetics: AestheticsConfig) -> dict[str, Any]: + encodings: dict[str, Any] = {} + for channel_name in ("x", "y", "color", "size", "shape", "opacity", "row", "column", "detail"): + channel_cfg = getattr(aesthetics, channel_name) + channel_value = _channel_to_alt(channel_name, channel_cfg) + if channel_value is not None: + encodings[channel_name] = channel_value + + if aesthetics.tooltip: + encodings["tooltip"] = [_tooltip_to_alt(tooltip_cfg) for tooltip_cfg in aesthetics.tooltip] + + return encodings + + +def _channel_to_alt(channel_name: str, cfg: ChannelAestheticsConfig | None): + if cfg is None: + return None + if cfg.value is not None and cfg.field is None: + return alt.value(cfg.value) + channel_cls_name = _CHANNEL_CLASS_MAP[channel_name] + channel_cls = getattr(alt, channel_cls_name) + kwargs: dict[str, Any] = {} + if cfg.field: + kwargs["field"] = cfg.field + if cfg.type: + kwargs["type"] = cfg.type + if cfg.title: + kwargs["title"] = cfg.title + if cfg.aggregate: + kwargs["aggregate"] = cfg.aggregate + if cfg.bin is not None: + kwargs["bin"] = cfg.bin + if cfg.time_unit: + kwargs["timeUnit"] = cfg.time_unit + if cfg.sort is not None: + kwargs["sort"] = alt.Sort(cfg.sort) if isinstance(cfg.sort, list) else cfg.sort + if cfg.scale: + kwargs["scale"] = _scale_to_alt(cfg.scale) + if cfg.axis and channel_name in {"x", "y", "row", "column"}: + kwargs["axis"] = _axis_to_alt(cfg.axis) + if cfg.legend and channel_name in {"color", "size", "shape", "opacity"}: + if cfg.legend.visible is False: + kwargs["legend"] = None + else: + kwargs["legend"] = _legend_to_alt(cfg.legend) + return channel_cls(**kwargs) + + +def _tooltip_to_alt(cfg: ChannelAestheticsConfig): + if cfg.value is not None and cfg.field is None: + return alt.Tooltip(value=cfg.value, title=cfg.title) + if cfg.field is None: + raise ConfigValidationError("Tooltip channels must set either a field or a constant value.") + + kwargs: dict[str, Any] = {"field": cfg.field} + if cfg.type: + kwargs["type"] = cfg.type + if cfg.title: + kwargs["title"] = cfg.title + return alt.Tooltip(**kwargs) + + +def _scale_to_alt(cfg: ScaleConfig): + kwargs = {k: v for k, v in vars(cfg).items() if v is not None} + return alt.Scale(**kwargs) + + +def _axis_to_alt(cfg: AxisConfig): + kwargs = {k: v for k, v in vars(cfg).items() if v is not None} + return alt.Axis(**kwargs) + + +def _legend_to_alt(cfg: LegendConfig): + kwargs = {k: v for k, v in vars(cfg).items() if v is not None} + return alt.Legend(**kwargs) + + +def _build_selection_param(cfg: SelectionConfig): + kwargs: dict[str, Any] = {} + if cfg.name: + kwargs["name"] = cfg.name + if cfg.encodings: + kwargs["encodings"] = cfg.encodings + if cfg.fields: + kwargs["fields"] = cfg.fields + if cfg.bind: + kwargs["bind"] = cfg.bind + if cfg.type == "interval": + return alt.selection_interval(**kwargs) + if cfg.type == "single": + return alt.selection_single(**kwargs) + if cfg.type == "multi": + return alt.selection_multi(**kwargs) + raise ConfigValidationError(f"Unsupported selection type '{cfg.type}' for Altair renderer.") + + +def _apply_facet(chart, facet_cfg: FacetConfig): + facet_args: dict[str, Any] = {} + if facet_cfg.row: + facet_args["row"] = alt.Row(facet_cfg.row) + if facet_cfg.column: + facet_args["column"] = alt.Column(facet_cfg.column) + if facet_cfg.wrap: + raise ConfigValidationError("FacetConfig.wrap is not yet implemented for Altair rendering.") + if not facet_args: + return chart + return chart.facet(**facet_args) + + +def _apply_chart_size(chart, size: PlotSizeConfig): + """Apply width/height to chart. Must be called before faceting.""" + width = size.width + height = size.height + if width is not None or height is not None: + chart = chart.properties(width=width, height=height) + return chart + + +def _apply_plot_level_properties(chart, guides: PlotLevelGuideConfig, size: PlotSizeConfig, background: str | None): + title_params = _build_title_params(guides) + if title_params is not None: + chart = chart.properties(title=title_params) + if size.autosize: + chart.autosize = size.autosize + if background: + chart = chart.configure(background=background) + if guides.labels: + LOGGER.info("Plot-level labels are not yet implemented for Altair; skipping %s labels.", len(guides.labels)) + return chart + + +def _apply_chart_controls(chart, controls: Any | None): + if not controls: + return chart + chart = _apply_dropdown_control(chart, getattr(controls, "dropdown", None)) + slider_detail = None if getattr(controls, "accumulate_steps", False) else getattr(controls, "slider", None) + chart = _apply_slider_control(chart, slider_detail) + return chart + + +def _apply_dropdown_control(chart, dropdown): + field_name = getattr(dropdown, "field", None) + if dropdown and field_name == "layer": + options = [_normalize_control_value(value) for value in getattr(dropdown, "options", []) or []] + if len(options) > 1: + binding = alt.binding_select(options=options, name="Layer: ") + param = alt.param(name=f"{field_name}_dropdown", bind=binding, value=options[0]) + # Include layer-independent rows (layer == "_no_layer_") along with selected layer + filter_expr = f"(datum.{field_name} == {param.name}) || (datum.{field_name} == '_no_layer_')" + return chart.add_params(param).transform_filter(filter_expr) + return chart + + +def _apply_slider_control(chart, slider): + field_name = getattr(slider, "field", None) + options = [_normalize_control_value(value) for value in getattr(slider, "options", []) or []] + if not slider or not field_name or len(options) <= 1: + return chart + + numeric_options = _numeric_control_values(options) + if numeric_options: + min_val, max_val = numeric_options[0], numeric_options[-1] + step = _infer_slider_step(numeric_options) + binding = alt.binding_range(min=min_val, max=max_val, step=step, name=f"{field_name}: ") + initial_value = numeric_options[0] + else: + binding = alt.binding_select(options=options, name=f"{field_name}: ") + initial_value = options[0] + + param = alt.param(name=f"{field_name}_slider", bind=binding, value=initial_value) + return chart.add_params(param).transform_filter(f"datum.{field_name} == {param.name}") + + +def _apply_default_legend_interactivity(chart, layers: list[LayerConfig]): + if not layers: + return chart + # FacetChart doesn't support encode() - skip legend interactivity for faceted charts + if isinstance(chart, alt.FacetChart): + return chart + color_fields: set[str] = set() + for layer in layers: + aesthetics = layer.aesthetics + if aesthetics and aesthetics.color and aesthetics.color.field: + color_fields.add(aesthetics.color.field) + if len(color_fields) != 1: + return chart + if any(layer.aesthetics and layer.aesthetics.opacity is not None for layer in layers): + return chart + field_name = next(iter(color_fields)) + legend_selection = alt.selection_multi(fields=[field_name], bind="legend", toggle=True, empty="all") + chart = chart.add_params(legend_selection) + opacity_encoding = alt.condition(legend_selection, alt.value(1.0), alt.value(0.05)) + return chart.encode(opacity=opacity_encoding) + + +def _normalize_control_value(value): + return value.item() if hasattr(value, "item") else value + + +def _numeric_control_values(options: list[Any]) -> list[float]: + numeric: list[float] = [] + for value in options: + try: + numeric.append(float(value)) + except (TypeError, ValueError): + return [] + numeric = sorted(dict.fromkeys(numeric)) + return numeric + + +def _infer_slider_step(values: list[float]) -> float: + if len(values) < 2: + return 1.0 + diffs = [round(values[idx + 1] - values[idx], 10) for idx in range(len(values) - 1)] + # Use smallest positive difference or default to 1.0 + step = min((diff for diff in diffs if diff > 0), default=1.0) + return step + + +def _apply_accumulation_detail(layer_charts, layers, plot_cfg, plot_df: pd.DataFrame, controls: Any | None): + if not controls or not getattr(controls, "accumulate_steps", False): + return layer_charts + if "step" not in plot_df.columns: + return layer_charts + updated = [] + for chart, layer_cfg in zip(layer_charts, layers, strict=False): + aesthetics = layer_cfg.aesthetics + if aesthetics and aesthetics.detail is not None: + updated.append(chart) + continue + if _layer_references_field(layer_cfg, "step"): + updated.append(chart) + continue + updated.append(chart.encode(detail=alt.Detail(field="step", type="ordinal"))) + return updated + + +def _layer_references_field(layer_cfg: LayerConfig, field: str) -> bool: + aesthetics = layer_cfg.aesthetics + if not aesthetics: + return False + + channel_names = [ + "x", + "y", + "x2", + "y2", + "color", + "stroke", + "strokeDash", + "size", + "shape", + "tooltip", + ] + for name in channel_names: + channel = getattr(aesthetics, name, None) + if channel is None: + continue + # tooltip can be list-like + if isinstance(channel, list): + for entry in channel: + if getattr(entry, "field", None) == field: + return True + continue + if getattr(channel, "field", None) == field: + return True + return False + + +def _build_title_params(guides: PlotLevelGuideConfig): + subtitle_lines = [text for text in (guides.subtitle, guides.caption) if text] + if not guides.title and not subtitle_lines: + return None + if subtitle_lines: + return alt.TitleParams(text=guides.title or "", subtitle=subtitle_lines) + return guides.title diff --git a/simplexity/visualization/data_pipeline.py b/simplexity/visualization/data_pipeline.py new file mode 100644 index 00000000..be7d88cf --- /dev/null +++ b/simplexity/visualization/data_pipeline.py @@ -0,0 +1,194 @@ +"""Reusable helpers for preparing data prior to rendering.""" + +from __future__ import annotations + +import math +from collections.abc import Mapping + +import numpy as np +import pandas as pd + +from simplexity.exceptions import ConfigValidationError +from simplexity.visualization.data_registry import DataRegistry, resolve_data_source +from simplexity.visualization.structured_configs import ( + DataConfig, + LayerConfig, + TransformConfig, +) + +CALC_ENV = { + "np": np, + "pd": pd, + "math": math, + "log": np.log, + "exp": np.exp, + "sqrt": np.sqrt, + "abs": np.abs, + "clip": np.clip, +} + + +def normalize_expression(expr: str) -> str: + """Normalize expressions shared between pandas and Vega-Lite syntaxes.""" + return expr.replace("datum.", "").strip() + + +def materialize_data(data_cfg: DataConfig, data_registry: DataRegistry | Mapping[str, pd.DataFrame]) -> pd.DataFrame: + """Resolve a logical data source and apply lightweight filters/column selection.""" + df = resolve_data_source(data_cfg.source, data_registry).copy() + if data_cfg.filters: + df = apply_filters(df, data_cfg.filters) + if data_cfg.columns: + missing = [col for col in data_cfg.columns if col not in df.columns] + if missing: + raise ConfigValidationError(f"Columns {missing} are not present in data source '{data_cfg.source}'") + df = df.loc[:, data_cfg.columns] + return df + + +def build_plot_level_dataframe( + data_cfg: DataConfig, + transforms: list[TransformConfig], + data_registry: DataRegistry | Mapping[str, pd.DataFrame], +) -> pd.DataFrame: + """Materialize the base dataframe for a plot, applying plot-level transforms.""" + df = materialize_data(data_cfg, data_registry) + return apply_transforms(df, transforms) + + +def resolve_layer_dataframe( + layer: LayerConfig, + plot_df: pd.DataFrame, + data_registry: DataRegistry | Mapping[str, pd.DataFrame], +) -> pd.DataFrame: + """Resolve the dataframe for an individual layer.""" + if layer.data is None: + df = plot_df.copy() + else: + df = materialize_data(layer.data, data_registry) + if layer.transforms: + df = apply_transforms(df, layer.transforms) + return df + + +def apply_filters(df: pd.DataFrame, filters: list[str]) -> pd.DataFrame: + """Apply pandas-compatible query filters.""" + result = df.copy() + for expr in filters: + norm_expr = normalize_expression(expr) + result = result.query(norm_expr, engine="python", local_dict=CALC_ENV) + return result + + +def apply_transforms(df: pd.DataFrame, transforms: list[TransformConfig]) -> pd.DataFrame: + """Sequentially apply configured transforms to a dataframe.""" + result = df.copy() + for transform in transforms: + result = _apply_transform(result, transform) + return result + + +def _apply_transform(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: + if transform.op == "filter": + if transform.filter is None: + raise ConfigValidationError("Filter transforms require the `filter` expression.") + return apply_filters(df, [transform.filter]) + if transform.op == "calculate": + return _apply_calculate(df, transform) + if transform.op == "aggregate": + return _apply_aggregate(df, transform) + if transform.op == "bin": + return _apply_bin(df, transform) + if transform.op == "window": + return _apply_window(df, transform) + if transform.op == "fold": + return _apply_fold(df, transform) + if transform.op == "pivot": + raise ConfigValidationError("Pivot transforms are not implemented yet.") + raise ConfigValidationError(f"Unsupported transform operation '{transform.op}'") + + +def _apply_calculate(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: + expr = normalize_expression(transform.expr or "") + target = transform.as_field or "" + if not target: + raise ConfigValidationError("TransformConfig.as_field is required for calculate transforms") + result = df.copy() + result[target] = result.eval(expr, engine="python", local_dict=CALC_ENV) + return result + + +def _apply_aggregate(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: + groupby = transform.groupby or [] + aggregations = transform.aggregations or {} + if not groupby or not aggregations: + raise ConfigValidationError("Aggregate transforms require `groupby` and `aggregations` fields.") + + agg_kwargs: dict[str, tuple[str, str]] = {} + for alias, expr in aggregations.items(): + func, field = _parse_function_expr(expr, expected_arg=True) + agg_kwargs[alias] = (field, func) + + grouped = df.groupby(groupby, dropna=False).agg(**agg_kwargs).reset_index() + return grouped + + +def _apply_bin(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: + if not transform.field or not transform.binned_as: + raise ConfigValidationError("Bin transforms require `field` and `binned_as`.") + bins = transform.maxbins or 10 + result = df.copy() + result[transform.binned_as] = pd.cut(result[transform.field], bins=bins, include_lowest=True) + return result + + +def _apply_window(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: + if not transform.window: + raise ConfigValidationError("Window transforms require the `window` mapping.") + result = df.copy() + for alias, expr in transform.window.items(): + func, field = _parse_function_expr(expr, expected_arg=True) + if func == "rank": + result[alias] = result[field].rank(method="average") + elif func == "cumsum": + result[alias] = result[field].cumsum() + else: + raise ConfigValidationError(f"Window function '{func}' is not supported.") + return result + + +def _apply_fold(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: + if not transform.fold_fields: + raise ConfigValidationError("Fold transforms require `fold_fields`.") + var_name, value_name = _derive_fold_names(transform.as_fields) + return df.melt(value_vars=transform.fold_fields, var_name=var_name, value_name=value_name) + + +def _parse_function_expr(expr: str, expected_arg: bool) -> tuple[str, str]: + if "(" not in expr or not expr.endswith(")"): + raise ConfigValidationError(f"Expression '{expr}' must be of the form func(field).") + func, rest = expr.split("(", 1) + value = rest[:-1].strip() + func = func.strip() + if expected_arg and not value: + raise ConfigValidationError(f"Expression '{expr}' must supply an argument.") + return func, value + + +def _derive_fold_names(as_fields: list[str] | None) -> tuple[str, str]: + if not as_fields: + return "key", "value" + if len(as_fields) == 1: + return as_fields[0], "value" + return as_fields[0], as_fields[1] + + +__all__ = [ + "CALC_ENV", + "apply_filters", + "apply_transforms", + "build_plot_level_dataframe", + "materialize_data", + "normalize_expression", + "resolve_layer_dataframe", +] diff --git a/simplexity/visualization/data_registry.py b/simplexity/visualization/data_registry.py new file mode 100644 index 00000000..c70f44c6 --- /dev/null +++ b/simplexity/visualization/data_registry.py @@ -0,0 +1,39 @@ +"""Helpers for resolving logical visualization data sources.""" + +from __future__ import annotations + +from collections.abc import Mapping +from typing import Protocol + +import pandas as pd + + +class DataRegistry(Protocol): # pylint: disable=too-few-public-methods + """Protocol for registry objects that return pandas DataFrames.""" + + def get(self, source_name: str) -> pd.DataFrame: + """Return the DataFrame associated with ``source_name``.""" + ... # pylint: disable=unnecessary-ellipsis + + +class DictDataRegistry: # pylint: disable=too-few-public-methods + """Simple registry backed by an in-memory mapping.""" + + def __init__(self, data: Mapping[str, pd.DataFrame] | None = None) -> None: + self._data: dict[str, pd.DataFrame] = dict(data or {}) + + def get(self, source_name: str) -> pd.DataFrame: + """Get the DataFrame associated with ``source_name``.""" + try: + return self._data[source_name] + except KeyError as exc: # pragma: no cover - simple error wrapper + raise ValueError(f"Data source '{source_name}' is not registered") from exc + + +def resolve_data_source(source_name: str, data_registry: DataRegistry | Mapping[str, pd.DataFrame]) -> pd.DataFrame: + """Resolve a logical source name regardless of the registry implementation.""" + if isinstance(data_registry, Mapping): + if source_name not in data_registry: + raise ValueError(f"Data source '{source_name}' is not registered") + return data_registry[source_name] + return data_registry.get(source_name) diff --git a/simplexity/visualization/history.py b/simplexity/visualization/history.py new file mode 100644 index 00000000..e57e657b --- /dev/null +++ b/simplexity/visualization/history.py @@ -0,0 +1,105 @@ +"""Utilities for persisting visualization history for interactive controls.""" + +from __future__ import annotations + +import dataclasses +import hashlib +import json +import logging +from pathlib import Path +from typing import Any + +import pandas as pd + +from simplexity.visualization.structured_configs import PlotConfig + +LOGGER = logging.getLogger(__name__) + +HISTORY_VERSION = 1 +HISTORY_DIRNAME = "history" +HISTORY_DATA_SUFFIX = ".jsonl" +HISTORY_META_SUFFIX = ".meta.json" + + +def history_paths(root: Path, safe_name: str) -> tuple[Path, Path]: + """Return the data and metadata file paths for a visualization history entry.""" + history_dir = root / HISTORY_DIRNAME + data_path = history_dir / f"{safe_name}{HISTORY_DATA_SUFFIX}" + meta_path = history_dir / f"{safe_name}{HISTORY_META_SUFFIX}" + return data_path, meta_path + + +def plot_config_signature(plot_cfg: PlotConfig) -> str: + """Create a stable hash of a PlotConfig to detect incompatible history files.""" + serialized = json.dumps( + dataclasses.asdict(plot_cfg), + sort_keys=True, + default=_serialize_unknown, + ) + return hashlib.sha256(serialized.encode("utf-8")).hexdigest() + + +def load_history_dataframe(data_path: Path, meta_path: Path, *, expected_signature: str) -> pd.DataFrame: + """Load previously saved visualization dataframe if metadata matches signature.""" + if not data_path.exists() or not meta_path.exists(): + return pd.DataFrame() + + try: + with meta_path.open(encoding="utf-8") as source: + metadata = json.load(source) + except json.JSONDecodeError: + LOGGER.warning("Visualization history metadata at %s is corrupted; ignoring existing history.", meta_path) + return pd.DataFrame() + + if metadata.get("version") != HISTORY_VERSION or metadata.get("signature") != expected_signature: + LOGGER.info("Visualization history metadata at %s is outdated or mismatched; starting fresh.", meta_path) + return pd.DataFrame() + + try: + return pd.read_json(data_path, orient="records", lines=True) + except ValueError: + LOGGER.warning("Visualization history data at %s is corrupted; ignoring existing history.", data_path) + return pd.DataFrame() + + +def save_history_dataframe( + dataframe: pd.DataFrame, + data_path: Path, + meta_path: Path, + *, + signature: str, + analysis: str, + name: str, + backend: str, +) -> None: + """Persist visualization dataframe and metadata for future accumulation.""" + data_path.parent.mkdir(parents=True, exist_ok=True) + dataframe.to_json(data_path, orient="records", lines=True) + metadata = { + "version": HISTORY_VERSION, + "analysis": analysis, + "name": name, + "backend": backend, + "signature": signature, + "rows": len(dataframe), + } + with meta_path.open("w", encoding="utf-8") as sink: + json.dump(metadata, sink, indent=2) + + +def _serialize_unknown(value: Any) -> str: + """Best-effort serialization hook for dataclasses.asdict JSON dumps.""" + if isinstance(value, Path): + return str(value) + return str(value) + + +__all__ = [ + "HISTORY_DIRNAME", + "HISTORY_DATA_SUFFIX", + "HISTORY_META_SUFFIX", + "history_paths", + "load_history_dataframe", + "plot_config_signature", + "save_history_dataframe", +] diff --git a/simplexity/visualization/plotly_renderer.py b/simplexity/visualization/plotly_renderer.py new file mode 100644 index 00000000..856bfaf7 --- /dev/null +++ b/simplexity/visualization/plotly_renderer.py @@ -0,0 +1,1476 @@ +"""Plotly renderer for visualization PlotConfigs.""" + +from __future__ import annotations + +import logging +import re +from collections.abc import Mapping +from dataclasses import dataclass +from typing import Any, Literal + +import pandas as pd +import plotly.graph_objects as go +from plotly.colors import qualitative as qualitative_colors +from plotly.subplots import make_subplots + +from simplexity.exceptions import ConfigValidationError +from simplexity.visualization.data_pipeline import ( + build_plot_level_dataframe, + resolve_layer_dataframe, +) +from simplexity.visualization.data_registry import DataRegistry +from simplexity.visualization.structured_configs import ( + AestheticsConfig, + ChannelAestheticsConfig, + FacetConfig, + LayerConfig, + PlotConfig, + PlotLevelGuideConfig, + PlotSizeConfig, +) + +LOGGER = logging.getLogger(__name__) + +_HEX_COLOR_PATTERN = re.compile(r"^#([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$") + + +def build_plotly_figure( + plot_cfg: PlotConfig, + data_registry: DataRegistry | Mapping[str, pd.DataFrame], + controls: Any | None = None, +) -> go.Figure: + """Render a PlotConfig into a Plotly Figure (currently 3D scatter only).""" + if not plot_cfg.layers: + raise ConfigValidationError("PlotConfig.layers must include at least one layer for Plotly rendering.") + if len(plot_cfg.layers) != 1: + raise ConfigValidationError("Plotly renderer currently supports exactly one layer.") + + layer = plot_cfg.layers[0] + if layer.geometry.type != "point": + raise ConfigValidationError("Plotly renderer currently supports point geometry.") + + plot_df = build_plot_level_dataframe(plot_cfg.data, plot_cfg.transforms, data_registry) + layer_df = resolve_layer_dataframe(layer, plot_df, data_registry) + + # Handle faceting + if plot_cfg.facet: + figure = _build_faceted_figure(layer, layer_df, plot_cfg.facet, plot_cfg.size, controls) + # Use empty size config to avoid overwriting the computed facet dimensions + empty_size = PlotSizeConfig(width=None, height=None) + figure = _apply_plot_level_properties( + figure, plot_cfg.guides, empty_size, plot_cfg.background, layer.aesthetics + ) + return figure + + has_z = bool(layer.aesthetics and layer.aesthetics.z and layer.aesthetics.z.field) + if has_z: + figure = _build_scatter3d(layer, layer_df, controls) + else: + figure = _build_scatter2d(layer, layer_df, controls) + figure = _apply_plot_level_properties(figure, plot_cfg.guides, plot_cfg.size, plot_cfg.background, layer.aesthetics) + return figure + + +def _build_faceted_figure( + layer: LayerConfig, + df: pd.DataFrame, + facet_cfg: FacetConfig, + size_cfg: PlotSizeConfig, + controls: Any | None, +): + """Build a faceted subplot figure.""" + aes = layer.aesthetics + x_field = _require_field(aes.x, "x") + y_field = _require_field(aes.y, "y") + has_z = bool(aes.z and aes.z.field) + z_field = _require_field(aes.z, "z") if has_z else None + + row_field = facet_cfg.row + col_field = facet_cfg.column + + # Resolve controls + dropdown = _resolve_layer_dropdown(df, controls) + slider_enabled = not (getattr(controls, "accumulate_steps", False)) + slider = _resolve_slider_control(df, controls if slider_enabled else None) + + # Filter by initial layer if dropdown is active + # Keep rows that don't depend on layer (e.g., ground truth from belief states with layer="_no_layer_") + layer_field, layer_options = dropdown if dropdown else (None, [None]) + working_df: pd.DataFrame + if layer_field is None: + working_df = df + else: + layer_independent_filter = df.loc[df[layer_field] == "_no_layer_"] + layer_dependent_filter = df.loc[(df[layer_field] != "_no_layer_") & (df[layer_field] == layer_options[0])] + working_df = pd.concat([layer_dependent_filter, layer_independent_filter], ignore_index=True) + + # Get unique values for faceting dimensions + # Use dict.fromkeys to deduplicate by string representation (handles mixed int/str types) + row_values: list[str | None] + col_values: list[str | None] + if row_field: + raw_row = list(pd.unique(working_df[row_field])) + row_values = list(dict.fromkeys(str(v) for v in sorted(raw_row, key=str))) + else: + row_values = [None] + if col_field: + raw_col = list(pd.unique(working_df[col_field])) + col_values = list(dict.fromkeys(str(v) for v in sorted(raw_col, key=str))) + else: + col_values = [None] + + n_rows = len(row_values) + n_cols = len(col_values) + + # Build subplot titles + subplot_titles = [] + for row_val in row_values: + for col_val in col_values: + if row_val is not None and col_val is not None: + subplot_titles.append(f"{col_val}") + elif col_val is not None: + subplot_titles.append(str(col_val)) + elif row_val is not None: + subplot_titles.append(str(row_val)) + else: + subplot_titles.append("") + + # Create subplot grid with appropriate specs for 2D or 3D + if has_z: + specs = [[{"type": "scene"} for _ in range(n_cols)] for _ in range(n_rows)] + fig = make_subplots( + rows=n_rows, + cols=n_cols, + subplot_titles=subplot_titles, + horizontal_spacing=0.02, + vertical_spacing=0.05, + specs=specs, + row_titles=[str(v) for v in row_values] if row_field else None, + ) + else: + fig = make_subplots( + rows=n_rows, + cols=n_cols, + subplot_titles=subplot_titles, + horizontal_spacing=0.05, + vertical_spacing=0.08, + row_titles=[str(v) for v in row_values] if row_field else None, + ) + + color_field = _optional_field(aes.color) + size_field_name = _optional_field(aes.size) + size_value = _resolve_size_value(aes.size) + opacity_value = _resolve_opacity(aes.opacity) + hover_fields = _collect_tooltip_fields(aes.tooltip) + color_map = _build_color_discrete_map(df, color_field, aes.color) + color_specs = _build_color_group_specs(df, color_field, aes.color, color_map) + + # Helper to build traces for a given filtered dataframe + def build_facet_traces(source_df: pd.DataFrame, show_legend: bool = True): + traces_by_cell: dict[tuple[int, int], list[Any]] = {} + for row_idx, row_val in enumerate(row_values, start=1): + for col_idx, col_val in enumerate(col_values, start=1): + cell_df = source_df.copy() + if row_field: + cell_df = cell_df.loc[cell_df[row_field].astype(str) == row_val] + if col_field: + cell_df = cell_df.loc[cell_df[col_field].astype(str) == col_val] + + if cell_df.empty: + traces_by_cell[(row_idx, col_idx)] = [] + continue + + if has_z: + assert z_field is not None + traces = _scatter3d_traces( + cell_df, + x_field, + y_field, + z_field, + color_field, + size_field_name, + hover_fields, + opacity_value, + color_specs, + layer_name=layer.name, + size_value=size_value, + ) + scene_idx = (row_idx - 1) * n_cols + col_idx + scene_name = "scene" if scene_idx == 1 else f"scene{scene_idx}" + for trace in traces: + trace.scene = scene_name + else: + traces = _scatter2d_traces( + cell_df, + x_field, + y_field, + color_field, + size_field_name, + hover_fields, + opacity_value, + color_specs, + layer_name=layer.name, + size_value=size_value, + ) + + # Control legend visibility + for trace in traces: + if not show_legend or row_idx > 1 or col_idx > 1: + trace.showlegend = False + + traces_by_cell[(row_idx, col_idx)] = traces + return traces_by_cell + + # Get slider values if slider is active + slider_field, slider_values = slider if slider else (None, [None]) + + if slider and layer_field: + assert slider_field is not None + # Both slider and dropdown: complex case with frames per (layer, step) + # For simplicity, build frames for current layer only + initial_step = slider_values[0] if slider_values else None + initial_df = working_df + if initial_step is not None and slider_field in working_df.columns: + initial_df = working_df.loc[working_df[slider_field] == initial_step] + + traces_by_cell = build_facet_traces(initial_df) + for (row_idx, col_idx), traces in traces_by_cell.items(): + for trace in traces: + fig.add_trace(trace, row=row_idx, col=col_idx) + + # Build frames for slider animation + frames = [] + for step_val in slider_values: + step_filtered = working_df.loc[working_df[slider_field] == step_val] + frame_traces_by_cell = build_facet_traces(step_filtered, show_legend=False) + frame_traces: list[Any] = [] + for row_idx, col_idx in sorted(frame_traces_by_cell.keys()): + frame_traces.extend(frame_traces_by_cell[(row_idx, col_idx)]) + frames.append(go.Frame(name=str(step_val), data=frame_traces)) + fig.frames = frames + _add_slider_layout(fig, slider_field, slider_values) + + # Add layer dropdown + if len(layer_options) > 1: + _add_faceted_layer_dropdown( + fig, + df, + layer_field, + layer_options, + slider_field, + slider_values, + row_values, + col_values, + row_field, + col_field, + x_field, + y_field, + z_field, + color_field, + size_field_name, + hover_fields, + opacity_value, + color_specs, + layer, + has_z, + n_cols, + size_value, + ) + + elif slider: + assert slider_field is not None + # Slider only + initial_step = slider_values[0] if slider_values else None + initial_df = working_df + if initial_step is not None and slider_field in working_df.columns: + initial_df = working_df.loc[working_df[slider_field] == initial_step] + + traces_by_cell = build_facet_traces(initial_df) + for (row_idx, col_idx), traces in traces_by_cell.items(): + for trace in traces: + fig.add_trace(trace, row=row_idx, col=col_idx) + + # Build frames for slider animation + frames = [] + for step_val in slider_values: + step_filtered = working_df.loc[working_df[slider_field] == step_val] + frame_traces_by_cell = build_facet_traces(step_filtered, show_legend=False) + frame_traces: list[Any] = [] + for row_idx, col_idx in sorted(frame_traces_by_cell.keys()): + frame_traces.extend(frame_traces_by_cell[(row_idx, col_idx)]) + frames.append(go.Frame(name=str(step_val), data=frame_traces)) + fig.frames = frames + _add_slider_layout(fig, slider_field, slider_values) + + elif dropdown and len(layer_options) > 1: + assert layer_field is not None + # Dropdown only + traces_by_cell = build_facet_traces(working_df) + for (row_idx, col_idx), traces in traces_by_cell.items(): + for trace in traces: + fig.add_trace(trace, row=row_idx, col=col_idx) + + _add_faceted_layer_dropdown( + fig, + df, + layer_field, + layer_options, + None, + [], + row_values, + col_values, + row_field, + col_field, + x_field, + y_field, + z_field, + color_field, + size_field_name, + hover_fields, + opacity_value, + color_specs, + layer, + has_z, + n_cols, + size_value, + ) + else: + # No controls + traces_by_cell = build_facet_traces(working_df) + for (row_idx, col_idx), traces in traces_by_cell.items(): + for trace in traces: + fig.add_trace(trace, row=row_idx, col=col_idx) + + # Apply size to individual subplots if specified + subplot_width = size_cfg.width or 200 + subplot_height = size_cfg.height or 200 + total_width = subplot_width * n_cols + 100 # Extra space for margins + total_height = subplot_height * n_rows + 100 + + fig.update_layout( + width=total_width, + height=total_height, + showlegend=True, + ) + + # For 3D, set axis titles, ranges, and aspect ratio on each scene + if has_z: + x_title = _axis_title(aes.x) + y_title = _axis_title(aes.y) + z_title = _axis_title(aes.z) + x_range = _axis_domain(aes.x) + y_range = _axis_domain(aes.y) + z_range = _axis_domain(aes.z) + for row_idx in range(1, n_rows + 1): + for col_idx in range(1, n_cols + 1): + scene_idx = (row_idx - 1) * n_cols + col_idx + scene_key = "scene" if scene_idx == 1 else f"scene{scene_idx}" + scene_update: dict[str, Any] = {"aspectmode": "cube"} + xaxis_cfg: dict[str, Any] = {} + yaxis_cfg: dict[str, Any] = {} + zaxis_cfg: dict[str, Any] = {} + if x_title: + xaxis_cfg["title"] = x_title + if y_title: + yaxis_cfg["title"] = y_title + if z_title: + zaxis_cfg["title"] = z_title + if x_range: + xaxis_cfg["range"] = x_range + if y_range: + yaxis_cfg["range"] = y_range + if z_range: + zaxis_cfg["range"] = z_range + if xaxis_cfg: + scene_update["xaxis"] = xaxis_cfg + if yaxis_cfg: + scene_update["yaxis"] = yaxis_cfg + if zaxis_cfg: + scene_update["zaxis"] = zaxis_cfg + layout_update: dict[str, Any] = {scene_key: scene_update} + fig.update_layout(**layout_update) + + return fig + + +def _add_faceted_layer_dropdown( + fig: go.Figure, + df: pd.DataFrame, + layer_field: str, + layer_options: list[Any], + slider_field: str | None, + slider_values: list[Any], + row_values: list[str | None], + col_values: list[str | None], + row_field: str | None, + col_field: str | None, + x_field: str, + y_field: str, + z_field: str | None, + color_field: str | None, + size_field: str | None, + hover_fields: list[str], + opacity_value: float | None, + color_specs: list[ColorGroupSpec], + layer: LayerConfig, + has_z: bool, + n_cols: int, + size_value: float | None = None, +) -> None: + """Add a layer dropdown menu that rebuilds traces for faceted figures.""" + # Get layer-independent rows (e.g., ground truth from belief states) + layer_independent = df.loc[df[layer_field] == "_no_layer_"] + + buttons = [] + for layer_opt in layer_options: + # Combine layer-specific rows with layer-independent rows + layer_specific_filtered = df.loc[(df[layer_field] != "_no_layer_") & (df[layer_field] == layer_opt)] + layer_df = pd.concat([layer_specific_filtered, layer_independent], ignore_index=True) + + # If there's a slider, filter to initial step + if slider_field and slider_values: + layer_df = layer_df.loc[layer_df[slider_field] == slider_values[0]] + + # Build traces for this layer + all_traces: list[Any] = [] + for row_idx, row_val in enumerate(row_values, start=1): + for col_idx, col_val in enumerate(col_values, start=1): + cell_df = layer_df.copy() + if row_field: + cell_df = cell_df.loc[cell_df[row_field].astype(str) == row_val] + if col_field: + cell_df = cell_df.loc[cell_df[col_field].astype(str) == col_val] + + if cell_df.empty: + continue + + if has_z: + assert z_field is not None + traces = _scatter3d_traces( + cell_df, + x_field, + y_field, + z_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + layer_name=layer.name, + size_value=size_value, + ) + scene_idx = (row_idx - 1) * n_cols + col_idx + scene_name = "scene" if scene_idx == 1 else f"scene{scene_idx}" + for trace in traces: + trace.scene = scene_name + trace.showlegend = False + else: + traces = _scatter2d_traces( + cell_df, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + layer_name=layer.name, + size_value=size_value, + ) + for trace in traces: + trace.showlegend = False + + all_traces.extend(traces) + + # Create button that replaces all trace data + button = { + "label": str(layer_opt), + "method": "restyle", + "args": [ + { + "x": [list(t.x) if hasattr(t, "x") else [] for t in all_traces], + "y": [list(t.y) if hasattr(t, "y") else [] for t in all_traces], + } + ], + } + if has_z: + button["args"][0]["z"] = [list(t.z) if hasattr(t, "z") else [] for t in all_traces] + + buttons.append(button) + + fig.update_layout( + updatemenus=[ + { + "buttons": buttons, + "direction": "down", + "showactive": True, + "x": 1.05, + "xanchor": "left", + "y": 1, + "yanchor": "top", + "pad": {"l": 10, "r": 10, "t": 0, "b": 0}, + } + ] + ) + + +def _build_scatter3d(layer: LayerConfig, df: pd.DataFrame, controls: Any | None): + aes = layer.aesthetics + x_field = _require_field(aes.x, "x") + y_field = _require_field(aes.y, "y") + z_field = _require_field(aes.z, "z") + + color_field = _optional_field(aes.color) + size_field = _optional_field(aes.size) + size_value = _resolve_size_value(aes.size) + opacity_value = _resolve_opacity(aes.opacity) + hover_fields = _collect_tooltip_fields(aes.tooltip) + + dropdown = _resolve_layer_dropdown(df, controls) + slider_enabled = not (getattr(controls, "accumulate_steps", False)) + slider = _resolve_slider_control(df, controls if slider_enabled else None) + color_map = _build_color_discrete_map(df, color_field, aes.color) + color_specs = _build_color_group_specs(df, color_field, aes.color, color_map) + + if slider: + figure = _build_slider_scatter3d( + df, + slider, + dropdown, + x_field, + y_field, + z_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + aes, + layer, + size_value, + ) + elif dropdown: + figure = _build_layer_filtered_scatter3d( + df, + dropdown, + x_field, + y_field, + z_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + aes, + layer, + size_value, + ) + else: + traces = _scatter3d_traces( + df, + x_field, + y_field, + z_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + layer_name=layer.name, + size_value=size_value, + ) + figure = go.Figure(data=traces) + figure = _apply_constant_channels(figure, aes) + _maybe_update_trace_name(figure, layer, color_field) + + _apply_legend_visibility(figure, aes) + + return figure + + +def _build_scatter2d(layer: LayerConfig, df: pd.DataFrame, controls: Any | None): + aes = layer.aesthetics + x_field = _require_field(aes.x, "x") + y_field = _require_field(aes.y, "y") + + color_field = _optional_field(aes.color) + size_field = _optional_field(aes.size) + size_value = _resolve_size_value(aes.size) + opacity_value = _resolve_opacity(aes.opacity) + hover_fields = _collect_tooltip_fields(aes.tooltip) + + dropdown = _resolve_layer_dropdown(df, controls) + slider_enabled = not (getattr(controls, "accumulate_steps", False)) + slider = _resolve_slider_control(df, controls if slider_enabled else None) + color_map = _build_color_discrete_map(df, color_field, aes.color) + color_specs = _build_color_group_specs(df, color_field, aes.color, color_map) + + if slider: + figure = _build_slider_scatter2d( + df, + slider, + dropdown, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + aes, + layer, + size_value, + ) + elif dropdown: + figure = _build_layer_filtered_scatter2d( + df, + dropdown, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + aes, + layer, + size_value, + ) + else: + traces = _scatter2d_traces( + df, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + layer_name=layer.name, + size_value=size_value, + ) + figure = go.Figure(data=traces) + figure = _apply_constant_channels(figure, aes) + _maybe_update_trace_name(figure, layer, color_field) + + _apply_legend_visibility(figure, aes) + return figure + + +def _apply_plot_level_properties( + figure, + guides: PlotLevelGuideConfig, + size: PlotSizeConfig, + background: str | None, + aes: AestheticsConfig, +): + title_lines = [guides.title] if guides.title else [] + title_lines += [text for text in (guides.subtitle, guides.caption) if text] + if title_lines: + figure.update_layout(title="
".join(title_lines)) + if size.width or size.height: + figure.update_layout(width=size.width, height=size.height) + + has_3d = any(trace.type == "scatter3d" for trace in figure.data) + x_title = _axis_title(aes.x) + y_title = _axis_title(aes.y) + z_title = _axis_title(aes.z) + if has_3d: + scene_updates: dict[str, Any] = {} + if x_title: + scene_updates.setdefault("xaxis", {})["title"] = x_title + if y_title: + scene_updates.setdefault("yaxis", {})["title"] = y_title + if z_title: + scene_updates.setdefault("zaxis", {})["title"] = z_title + if background: + scene_updates["bgcolor"] = background + if scene_updates: + figure.update_layout(scene=scene_updates) + else: + axis_updates: dict[str, Any] = {} + if x_title: + axis_updates.setdefault("xaxis", {})["title"] = x_title + if y_title: + axis_updates.setdefault("yaxis", {})["title"] = y_title + if background: + axis_updates["plot_bgcolor"] = background + if axis_updates: + figure.update_layout(**axis_updates) + + if guides.labels: + LOGGER.info("Plot-level labels are not yet implemented for Plotly; skipping %s labels.", len(guides.labels)) + return figure + + +def _require_field(channel: ChannelAestheticsConfig | None, name: str) -> str: + if channel is None or not channel.field: + raise ConfigValidationError(f"Plotly renderer requires '{name}' channel with a field specified.") + return channel.field + + +def _optional_field(channel: ChannelAestheticsConfig | None) -> str | None: + if channel is None: + return None + return channel.field + + +def _collect_tooltip_fields(tooltips: list[ChannelAestheticsConfig] | None) -> list[str]: + if not tooltips: + return [] + fields: list[str] = [] + for tooltip in tooltips: + if tooltip.field is None: + raise ConfigValidationError("Plotly renderer tooltip entries must reference a data field.") + fields.append(tooltip.field) + return fields + + +def _resolve_opacity(channel: ChannelAestheticsConfig | None) -> float | None: + if channel is None: + return None + if channel.value is None: + raise ConfigValidationError("Plotly renderer opacity channel must specify a constant value.") + try: + opacity = float(channel.value) + except (TypeError, ValueError) as exc: + raise ConfigValidationError("Opacity channel must be a numeric constant.") from exc + if not 0.0 <= opacity <= 1.0: + raise ConfigValidationError("Opacity value must be between 0 and 1.") + return opacity + + +def _resolve_size_value(channel: ChannelAestheticsConfig | None) -> float | None: + if channel is None or channel.value is None: + return None + try: + return float(channel.value) + except (TypeError, ValueError) as exc: + raise ConfigValidationError("Size channel value must be numeric.") from exc + + +def _axis_title(channel: ChannelAestheticsConfig | None) -> str | None: + if channel is None: + return None + return channel.title or channel.field + + +def _axis_domain(channel: ChannelAestheticsConfig | None) -> list[Any] | None: + if channel is None or channel.scale is None: + return None + return channel.scale.domain + + +def _apply_constant_channels(figure, aes: AestheticsConfig): + if aes.color and aes.color.value is not None: + figure.update_traces(marker={"color": aes.color.value}, selector={"type": "scatter3d"}) + figure.update_traces(marker={"color": aes.color.value}, selector={"type": "scatter"}) + for frame in getattr(figure, "frames", []) or []: + for trace in frame.data: + if hasattr(trace, "marker"): + trace.marker = trace.marker or {} + trace.marker["color"] = aes.color.value + if aes.size and aes.size.value is not None: + figure.update_traces(marker={"size": aes.size.value}, selector={"type": "scatter3d"}) + figure.update_traces(marker={"size": aes.size.value}, selector={"type": "scatter"}) + for frame in getattr(figure, "frames", []) or []: + for trace in frame.data: + if hasattr(trace, "marker"): + trace.marker = trace.marker or {} + trace.marker["size"] = aes.size.value + return figure + + +def _apply_legend_visibility(figure, aes: AestheticsConfig): + if not _legend_hidden(aes.color): + return + for trace in figure.data: + trace.showlegend = False + for frame in getattr(figure, "frames", []) or []: + for trace in frame.data: + trace.showlegend = False + + +def _legend_hidden(color_cfg: ChannelAestheticsConfig | None) -> bool: + return bool(color_cfg and color_cfg.legend and color_cfg.legend.visible is False) + + +def _maybe_update_trace_name(figure, layer: LayerConfig, color_field: str | None): + if len(figure.data) != 1: + return + trace_name = layer.name or (color_field or "3d_scatter") + figure.update_traces(name=trace_name, selector={"type": "scatter3d"}) + + +def _resolve_layer_dropdown(df: pd.DataFrame, controls: Any | None) -> tuple[str, list[Any]] | None: + if not controls: + return None + dropdown = getattr(controls, "dropdown", None) + field_name = getattr(dropdown, "field", None) if dropdown else None + if field_name != "layer" or field_name not in df.columns: + return None + raw_options = getattr(dropdown, "options", None) or [] + options = [_normalize_option(value) for value in raw_options] + # Filter out "_no_layer_" placeholder used for layer-independent data + valid_values = [value for value in options if value in set(df[field_name]) and value != "_no_layer_"] + if len(valid_values) <= 1: + return None + return field_name, valid_values + + +def _resolve_slider_control(df: pd.DataFrame, controls: Any | None) -> tuple[str, list[Any]] | None: + if not controls: + return None + slider = getattr(controls, "slider", None) + field_name = getattr(slider, "field", None) if slider else None + if field_name is None or field_name not in df.columns: + return None + raw_options = getattr(slider, "options", None) + option_values = raw_options or list(pd.unique(df[field_name])) + options = [_normalize_option(value) for value in option_values] + if len(options) <= 1: + return None + # Preserve order if numeric; otherwise keep as strings + try: + options = sorted(set(options), key=lambda v: float(v)) + except (TypeError, ValueError): # pragma: no cover - fallback for non-numeric + options = sorted(dict.fromkeys(options)) + return field_name, options + + +def _build_layer_filtered_scatter3d( + df: pd.DataFrame, + dropdown: tuple[str, list[Any]], + x_field: str, + y_field: str, + z_field: str, + color_field: str | None, + size_field: str | None, + hover_fields: list[str], + opacity_value: float | None, + color_specs: list[ColorGroupSpec], + aes: AestheticsConfig, + layer: LayerConfig, + size_value: float | None = None, +): + field_name, options = dropdown + traces: list[Any] = [] + trace_ranges: list[tuple[int, int]] = [] + available: list[Any] = [] + + for option in options: + subset = df.loc[df[field_name] == option] + if subset.empty: + continue + layer_index = len(available) + available.append(option) + subset_traces = _scatter3d_traces( + subset, + x_field, + y_field, + z_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + layer_name=str(option), + size_value=size_value, + ) + for trace in subset_traces: + trace.visible = layer_index == 0 + start = len(traces) + traces.extend(subset_traces) + trace_ranges.append((start, len(traces))) + + if len(available) <= 1: + traces = _scatter3d_traces( + df, + x_field, + y_field, + z_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + layer_name=layer.name, + size_value=size_value, + ) + figure = go.Figure(data=traces) + figure = _apply_constant_channels(figure, aes) + _maybe_update_trace_name(figure, layer, color_field) + return figure + + figure = go.Figure(data=traces) + figure = _apply_constant_channels(figure, aes) + _add_layer_dropdown_menu(figure, available, trace_ranges) + _maybe_update_trace_name(figure, layer, color_field) + return figure + + +def _build_layer_filtered_scatter2d( + df: pd.DataFrame, + dropdown: tuple[str, list[Any]], + x_field: str, + y_field: str, + color_field: str | None, + size_field: str | None, + hover_fields: list[str], + opacity_value: float | None, + color_specs: list[ColorGroupSpec], + aes: AestheticsConfig, + layer: LayerConfig, + size_value: float | None = None, +): + field_name, options = dropdown + traces: list[Any] = [] + trace_ranges: list[tuple[int, int]] = [] + available: list[Any] = [] + + for option in options: + subset = df.loc[df[field_name] == option] + if subset.empty: + continue + layer_index = len(available) + available.append(option) + subset_traces = _scatter2d_traces( + subset, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + layer_name=str(option), + size_value=size_value, + ) + for trace in subset_traces: + trace.visible = layer_index == 0 + start = len(traces) + traces.extend(subset_traces) + trace_ranges.append((start, len(traces))) + + if len(available) <= 1: + traces = _scatter2d_traces( + df, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + layer_name=layer.name, + size_value=size_value, + ) + figure = go.Figure(data=traces) + figure = _apply_constant_channels(figure, aes) + _maybe_update_trace_name(figure, layer, color_field) + return figure + + figure = go.Figure(data=traces) + figure = _apply_constant_channels(figure, aes) + _add_layer_dropdown_menu(figure, available, trace_ranges) + _maybe_update_trace_name(figure, layer, color_field) + return figure + + +def _build_slider_scatter( + df: pd.DataFrame, + slider: tuple[str, list[Any]], + dropdown: tuple[str, list[Any]] | None, + x_field: str, + y_field: str, + color_field: str | None, + size_field: str | None, + hover_fields: list[str], + opacity_value: float | None, + color_specs: list[ColorGroupSpec], + aes: AestheticsConfig, + layer: LayerConfig, + *, + z_field: str | None = None, + size_value: float | None = None, +): + """Build slider-controlled scatter plot (2D or 3D based on z_field presence).""" + slider_field, slider_values = slider + layer_field = dropdown[0] if dropdown else None + layer_options = dropdown[1] if dropdown else [None] + + traces: list[Any] = [] + trace_ranges: list[tuple[int, int]] = [] + available_layers: list[Any] = [] + frames_by_value: dict[str, list[Any]] = {str(value): [] for value in slider_values} + + for option in layer_options: + subset = df if option is None else df.loc[df[layer_field] == option] + if subset.empty: + continue + layer_index = len(available_layers) + available_layers.append(option) + layer_label = str(option) if option is not None else layer.name + + initial_subset = subset.loc[subset[slider_field] == slider_values[0]] + subset_traces = _scatter_traces( + initial_subset, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + z_field=z_field, + layer_name=layer_label, + keep_empty=True, + size_value=size_value, + ) + if dropdown: + for trace in subset_traces: + trace.visible = layer_index == 0 + start = len(traces) + traces.extend(subset_traces) + trace_ranges.append((start, len(traces))) + + for slider_value in slider_values: + slider_subset = subset.loc[subset[slider_field] == slider_value] + frame_traces = _scatter_traces( + slider_subset, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + z_field=z_field, + layer_name=layer_label, + keep_empty=True, + size_value=size_value, + ) + frames_by_value[str(slider_value)].extend(frame_traces) + + figure = go.Figure(data=traces) + figure.frames = _build_slider_frames(frames_by_value, slider_values) + _add_slider_layout(figure, slider_field, slider_values) + + if dropdown and available_layers: + _add_layer_dropdown_menu(figure, available_layers, trace_ranges) + else: + _maybe_update_trace_name(figure, layer, color_field) + + figure = _apply_constant_channels(figure, aes) + return figure + + +def _build_slider_scatter3d( + df: pd.DataFrame, + slider: tuple[str, list[Any]], + dropdown: tuple[str, list[Any]] | None, + x_field: str, + y_field: str, + z_field: str, + color_field: str | None, + size_field: str | None, + hover_fields: list[str], + opacity_value: float | None, + color_specs: list[ColorGroupSpec], + aes: AestheticsConfig, + layer: LayerConfig, + size_value: float | None = None, +): + """Build 3D slider scatter. Wrapper around _build_slider_scatter.""" + return _build_slider_scatter( + df, + slider, + dropdown, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + aes, + layer, + z_field=z_field, + size_value=size_value, + ) + + +def _build_slider_scatter2d( + df: pd.DataFrame, + slider: tuple[str, list[Any]], + dropdown: tuple[str, list[Any]] | None, + x_field: str, + y_field: str, + color_field: str | None, + size_field: str | None, + hover_fields: list[str], + opacity_value: float | None, + color_specs: list[ColorGroupSpec], + aes: AestheticsConfig, + layer: LayerConfig, + size_value: float | None = None, +): + """Build 2D slider scatter. Wrapper around _build_slider_scatter.""" + return _build_slider_scatter( + df, + slider, + dropdown, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + aes, + layer, + z_field=None, + size_value=size_value, + ) + + +def _add_layer_dropdown_menu( + figure, + options: list[Any], + trace_ranges: list[tuple[int, int]], +): + total_traces = len(figure.data) + buttons = [] + for option, (start, end) in zip(options, trace_ranges, strict=False): + visible = [False] * total_traces + for idx in range(start, end): + visible[idx] = True + buttons.append( + { + "label": str(option), + "method": "update", + "args": [{"visible": visible}], + } + ) + + figure.update_layout( + updatemenus=[ + { + "buttons": buttons, + "direction": "down", + "showactive": True, + "x": 1.05, + "xanchor": "left", + "y": 1, + "yanchor": "top", + "pad": {"l": 10, "r": 10, "t": 0, "b": 0}, + } + ] + ) + + +def _build_slider_frames(frames_by_value: dict[str, list[Any]], slider_values: list[Any]): + frames: list[go.Frame] = [] + for value in slider_values: + name = str(value) + frame_traces = frames_by_value.get(name, []) + frames.append(go.Frame(name=name, data=frame_traces)) + return frames + + +def _add_slider_layout(figure, field_name: str, slider_values: list[Any]): + if not slider_values: + return + steps = [] + for value in slider_values: + label = str(value) + steps.append( + { + "label": label, + "method": "animate", + "args": [ + [label], + { + "frame": {"duration": 0, "redraw": True}, + "mode": "immediate", + "transition": {"duration": 0}, + }, + ], + } + ) + figure.update_layout( + sliders=[ + { + "active": 0, + "currentvalue": {"prefix": f"{field_name}="}, + "pad": {"t": 40, "b": 0}, + "steps": steps, + } + ] + ) + + +@dataclass +class ColorGroupSpec: + """Specification for a color grouping in Plotly rendering.""" + + label: str | None + value: Any | None + constant_color: str | None + mode: Literal["none", "literal", "discrete", "field"] = "none" + + +def _scatter_traces( + df: pd.DataFrame, + x_field: str, + y_field: str, + color_field: str | None, + size_field: str | None, + hover_fields: list[str], + opacity_value: float | None, + color_specs: list[ColorGroupSpec], + *, + z_field: str | None = None, + layer_name: str | None = None, + keep_empty: bool = False, + size_value: float | None = None, +) -> list[go.Scatter3d] | list[go.Scatter]: + """Build scatter traces (2D or 3D based on z_field presence).""" + is_3d = z_field is not None + traces: list[Any] = [] + + for idx, spec in enumerate(color_specs): + subset = _subset_for_spec(df, color_field, spec) + if subset.empty and not keep_empty: + continue + marker = _build_marker(subset, color_field, size_field, spec, size_value) + customdata = _build_customdata(subset, hover_fields) + + if is_3d: + assert z_field is not None + trace = go.Scatter3d( + x=subset[x_field].tolist(), + y=subset[y_field].tolist(), + z=subset[z_field].tolist(), + mode="markers", + name=_derive_trace_name(layer_name, spec, idx), + marker=marker, + customdata=customdata, + hovertemplate=_build_hovertemplate(hover_fields), + ) + else: + trace = go.Scatter( + x=subset[x_field].tolist(), + y=subset[y_field].tolist(), + mode="markers", + name=_derive_trace_name(layer_name, spec, idx), + marker=marker, + customdata=customdata, + hovertemplate=_build_hovertemplate(hover_fields), + ) + + if spec.mode == "literal": + trace.showlegend = False + if opacity_value is not None: + trace.opacity = opacity_value + traces.append(trace) + + if not traces: + default_name = layer_name or ("scatter3d" if is_3d else "scatter") + if is_3d: + empty_trace = go.Scatter3d(x=[], y=[], z=[], mode="markers", name=default_name) + else: + empty_trace = go.Scatter(x=[], y=[], mode="markers", name=default_name) + if opacity_value is not None: + empty_trace.opacity = opacity_value + traces.append(empty_trace) + + return traces + + +def _scatter3d_traces( + df: pd.DataFrame, + x_field: str, + y_field: str, + z_field: str, + color_field: str | None, + size_field: str | None, + hover_fields: list[str], + opacity_value: float | None, + color_specs: list[ColorGroupSpec], + *, + layer_name: str | None = None, + keep_empty: bool = False, + size_value: float | None = None, +) -> list[go.Scatter3d]: + """Build 3D scatter traces. Wrapper around _scatter_traces for type safety.""" + return _scatter_traces( + df, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + z_field=z_field, + layer_name=layer_name, + keep_empty=keep_empty, + size_value=size_value, + ) # type: ignore[return-value] + + +def _scatter2d_traces( + df: pd.DataFrame, + x_field: str, + y_field: str, + color_field: str | None, + size_field: str | None, + hover_fields: list[str], + opacity_value: float | None, + color_specs: list[ColorGroupSpec], + *, + layer_name: str | None = None, + keep_empty: bool = False, + size_value: float | None = None, +) -> list[go.Scatter]: + """Build 2D scatter traces. Wrapper around _scatter_traces for type safety.""" + return _scatter_traces( + df, + x_field, + y_field, + color_field, + size_field, + hover_fields, + opacity_value, + color_specs, + z_field=None, + layer_name=layer_name, + keep_empty=keep_empty, + size_value=size_value, + ) # type: ignore[return-value] + + +def _subset_for_spec(df: pd.DataFrame, color_field: str | None, spec: ColorGroupSpec) -> pd.DataFrame: + if spec.mode != "discrete" or color_field is None: + return df + return df.loc[df[color_field] == spec.value] + + +def _build_marker( + df: pd.DataFrame, + color_field: str | None, + size_field: str | None, + spec: ColorGroupSpec, + size_value: float | None = None, +) -> dict[str, Any]: + marker: dict[str, Any] = {} + if size_field and size_field in df.columns: + marker["size"] = df[size_field].tolist() + elif size_value is not None: + marker["size"] = size_value + if spec.mode == "literal" and color_field and color_field in df.columns: + marker["color"] = df[color_field].tolist() + elif spec.mode == "discrete" and spec.constant_color is not None: + marker["color"] = spec.constant_color + elif spec.mode == "field" and color_field and color_field in df.columns: + marker["color"] = df[color_field].tolist() + return marker + + +def _build_customdata(df: pd.DataFrame, hover_fields: list[str]) -> Any: + if not hover_fields: + return None + missing = [field for field in hover_fields if field not in df.columns] + if missing: + raise ConfigValidationError(f"Tooltip field(s) {missing} are missing from dataframe.") + return df[hover_fields].to_numpy() + + +def _build_hovertemplate(hover_fields: list[str]) -> str | None: + if not hover_fields: + return None + template_parts = [f"{field}: %{{customdata[{idx}]}}" for idx, field in enumerate(hover_fields)] + return "
".join(template_parts) + "" + + +def _derive_trace_name(layer_name: str | None, spec: ColorGroupSpec, idx: int) -> str: + if spec.label and layer_name: + return f"{layer_name} - {spec.label}" + if spec.label: + return spec.label + if layer_name: + return layer_name + return f"series_{idx + 1}" + + +def _build_color_discrete_map( + df: pd.DataFrame, color_field: str | None, color_cfg: ChannelAestheticsConfig | None +) -> dict[str, str] | None: + if color_field is None or color_cfg is None: + return None + if color_cfg.type not in {"nominal", "ordinal"}: + return None + if color_field not in df.columns: + return None + series: pd.Series = df.loc[:, color_field] + if _series_is_literal_color(series): + return None + palette = qualitative_colors.Plotly + values = [_normalize_option(value) for value in pd.unique(series)] + return {value: palette[idx % len(palette)] for idx, value in enumerate(values)} + + +def _build_color_group_specs( + df: pd.DataFrame, + color_field: str | None, + color_cfg: ChannelAestheticsConfig | None, + color_map: dict[str, str] | None, +) -> list[ColorGroupSpec]: + if color_field is None or color_field not in df.columns: + return [ColorGroupSpec(label=None, value=None, constant_color=None, mode="none")] + series: pd.Series = df.loc[:, color_field] + if _series_is_literal_color(series): + return [ColorGroupSpec(label=None, value=None, constant_color=None, mode="literal")] + if color_cfg and color_cfg.type in {"nominal", "ordinal"}: + specs: list[ColorGroupSpec] = [] + for value in pd.unique(series): + normalized = _normalize_option(value) + constant_color = (color_map or {}).get(normalized) + specs.append( + ColorGroupSpec( + label=str(value), + value=value, + constant_color=constant_color, + mode="discrete", + ) + ) + return specs + if color_cfg: + return [ColorGroupSpec(label=color_cfg.title or color_field, value=None, constant_color=None, mode="field")] + return [ColorGroupSpec(label=None, value=None, constant_color=None, mode="none")] + + +def _normalize_option(value: Any) -> Any: + if hasattr(value, "item"): + try: + return value.item() + except AttributeError: # pragma: no cover - defensive + return value + return value + + +def _series_is_literal_color(series: pd.Series) -> bool: + if series.empty: + return False + return bool(series.dropna().map(_value_is_color_string).all()) + + +def _value_is_color_string(value: Any) -> bool: + if isinstance(value, str): + candidate = value.strip() + if _HEX_COLOR_PATTERN.match(candidate): + return True + lowered = candidate.lower() + return lowered.startswith("rgb(") or lowered.startswith("rgba(") + return False diff --git a/simplexity/visualization/structured_configs.py b/simplexity/visualization/structured_configs.py new file mode 100644 index 00000000..95d9a781 --- /dev/null +++ b/simplexity/visualization/structured_configs.py @@ -0,0 +1,238 @@ +"""Structured visualization configuration dataclasses. + +This module implements the schema described in docs/visualization.md. The +dataclasses are intentionally backend-agnostic so that Hydra configs can be +validated once and rendered by different visualization engines (Altair, +plotnine, matplotlib, etc.). +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import Any + +from simplexity.exceptions import ConfigValidationError + + +def _ensure(condition: bool, message: str) -> None: + """Raise ConfigValidationError if condition is not met.""" + if not condition: + raise ConfigValidationError(message) + + +@dataclass +class DataConfig: + """Specifies the logical data source and lightweight filtering.""" + + source: str = "main" + filters: list[str] = field(default_factory=list) + columns: list[str] | None = None + + +@dataclass +class TransformConfig: # pylint: disable=too-many-instance-attributes + """Represents a single data transform stage.""" + + op: str # ["filter", "calculate", "aggregate", "bin", "window", "fold", "pivot"] + filter: str | None = None + as_field: str | None = None + expr: str | None = None + groupby: list[str] | None = None + aggregations: dict[str, str] | None = None + field: str | None = None + binned_as: str | None = None + maxbins: int | None = None + window: dict[str, str] | None = None + frame: list[int | None] | None = None + fold_fields: list[str] | None = None + as_fields: list[str] | None = None + + def __post_init__(self) -> None: + if self.op == "filter": + _ensure(bool(self.filter), "TransformConfig.filter must be provided when op='filter'") + if self.op == "calculate": + _ensure(bool(self.as_field), "TransformConfig.as_field is required for calculate transforms") + _ensure(bool(self.expr), "TransformConfig.expr is required for calculate transforms") + if self.op == "aggregate": + _ensure(bool(self.groupby), "TransformConfig.groupby is required for aggregate transforms") + _ensure( + bool(self.aggregations), + "TransformConfig.aggregations is required for aggregate transforms", + ) + if self.op == "bin": + _ensure(bool(self.field), "TransformConfig.field is required for bin transforms") + _ensure(bool(self.binned_as), "TransformConfig.binned_as is required for bin transforms") + if self.op == "window": + _ensure(bool(self.window), "TransformConfig.window is required for window transforms") + + +@dataclass +class ScaleConfig: + """Describes how raw data values are mapped to visual ranges.""" + + type: str | None = None # ["linear", "log", "sqrt", "pow", "symlog", "time", "utc", "ordinal", "band", "point"] + domain: list[Any] | None = None + range: list[Any] | None = None + clamp: bool | None = None + nice: bool | None = None + reverse: bool | None = None + + +@dataclass +class AxisConfig: + """Axis settings for positional channels.""" + + title: str | None = None + grid: bool | None = None + format: str | None = None + tick_count: int | None = None + label_angle: float | None = None + visible: bool = True + + +@dataclass +class LegendConfig: + """Legend settings for categorical or continuous mappings.""" + + title: str | None = None + orient: str | None = None + visible: bool = True + + +@dataclass +class ChannelAestheticsConfig: # pylint: disable=too-many-instance-attributes + """Represents one visual encoding channel (x, y, color, etc.).""" + + field: str | None = None + type: str | None = None # ["quantitative", "ordinal", "nominal", "temporal"] + value: Any | None = None + aggregate: str | None = None + bin: bool | None = None + time_unit: str | None = None + scale: ScaleConfig | None = None + axis: AxisConfig | None = None + legend: LegendConfig | None = None + sort: str | list[Any] | None = None + title: str | None = None + + def __post_init__(self) -> None: + if self.field is not None and self.value is not None: + raise ConfigValidationError( + "ChannelAestheticsConfig cannot specify both 'field' and 'value'; prefer 'field'." + ) + + +@dataclass +class AestheticsConfig: # pylint: disable=too-many-instance-attributes + """Collection of channel encodings for a layer.""" + + x: ChannelAestheticsConfig | None = None + y: ChannelAestheticsConfig | None = None + z: ChannelAestheticsConfig | None = None + color: ChannelAestheticsConfig | None = None + size: ChannelAestheticsConfig | None = None + shape: ChannelAestheticsConfig | None = None + opacity: ChannelAestheticsConfig | None = None + tooltip: list[ChannelAestheticsConfig] | None = None + row: ChannelAestheticsConfig | None = None + column: ChannelAestheticsConfig | None = None + detail: ChannelAestheticsConfig | None = None + + +@dataclass +class GeometryConfig: + """Visual primitive used to draw the layer.""" + + type: str # [point, line, area, bar, rect, rule, tick, circle, square, text, boxplot, errorbar, errorband] + props: dict[str, Any] = field(default_factory=dict) + + def __post_init__(self) -> None: + _ensure(isinstance(self.props, dict), "GeometryConfig.props must be a dictionary") + + +@dataclass +class SelectionConfig: + """Interactive selection definition.""" + + name: str + type: str = "interval" # ["interval", "single", "multi"] + encodings: list[str] | None = None + fields: list[str] | None = None + bind: dict[str, Any] | None = None + + +@dataclass +class PlotSizeConfig: + """Size and layout metadata for an entire plot.""" + + width: int | None = None + height: int | None = None + autosize: str | None = None + + +@dataclass +class LabelConfig: + """Free-form labels or annotations.""" + + text: str | None = None + x: float | str | None = None + y: float | str | None = None + props: dict[str, Any] = field(default_factory=dict) + + +@dataclass +class PlotLevelGuideConfig: + """Titles and caption level guides.""" + + title: str | None = None + subtitle: str | None = None + caption: str | None = None + labels: list[LabelConfig] | None = None + title_scalars: dict[str, str] | None = None + + +@dataclass +class FacetConfig: + """High-level faceting instructions.""" + + row: str | None = None + column: str | None = None + wrap: int | None = None + + +@dataclass +class LayerConfig: + """A single layer in a composed plot.""" + + name: str | None = None + data: DataConfig | None = None + transforms: list[TransformConfig] = field(default_factory=list) + geometry: GeometryConfig = field(default_factory=lambda: GeometryConfig(type="point")) + aesthetics: AestheticsConfig = field(default_factory=AestheticsConfig) + selections: list[SelectionConfig] = field(default_factory=list) + + +@dataclass +class PlotConfig: # pylint: disable=too-many-instance-attributes + """Top-level configuration for one plot.""" + + backend: str = "altair" # ["altair", "plotly"] + data: DataConfig = field(default_factory=DataConfig) + transforms: list[TransformConfig] = field(default_factory=list) + layers: list[LayerConfig] = field(default_factory=list) + facet: FacetConfig | None = None + size: PlotSizeConfig = field(default_factory=PlotSizeConfig) + guides: PlotLevelGuideConfig = field(default_factory=PlotLevelGuideConfig) + background: str | None = None + selections: list[SelectionConfig] = field(default_factory=list) + + def __post_init__(self) -> None: + _ensure(self.layers is not None, "PlotConfig.layers must be a list (can be empty)") + + +@dataclass +class GraphicsConfig: + """Root Visualization config that multiplexes multiple named plots.""" + + default_backend: str = "altair" # ["altair", "plotly"] + plots: dict[str, PlotConfig] = field(default_factory=dict) diff --git a/tests/activations/test_activation_analysis.py b/tests/activations/test_activation_analysis.py index 3d67e13c..b31c8a8a 100644 --- a/tests/activations/test_activation_analysis.py +++ b/tests/activations/test_activation_analysis.py @@ -20,6 +20,12 @@ PcaAnalysis, ) from simplexity.activations.activation_tracker import ActivationTracker, PrepareOptions, prepare_activations +from simplexity.activations.visualization.dataframe_builders import _build_scalar_series_dataframe +from simplexity.activations.visualization_configs import ( + ActivationVisualizationControlsConfig, + ScalarSeriesMapping, +) +from simplexity.exceptions import ConfigValidationError @pytest.fixture @@ -481,7 +487,7 @@ def test_basic_tracking(self, synthetic_data): } ) - scalars, projections = tracker.analyze( + scalars, projections, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], @@ -493,6 +499,7 @@ def test_basic_tracking(self, synthetic_data): assert "regression/layer_0_projected" in projections assert "pca/layer_0_pca" in projections + assert visualizations == {} def test_all_tokens_mode(self, synthetic_data): """Test tracker with all tokens mode.""" @@ -505,7 +512,7 @@ def test_all_tokens_mode(self, synthetic_data): } ) - scalars, projections = tracker.analyze( + scalars, projections, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], @@ -514,6 +521,7 @@ def test_all_tokens_mode(self, synthetic_data): assert "regression/layer_0_r2" in scalars assert "regression/layer_0_projected" in projections + assert visualizations == {} def test_mixed_requirements(self, synthetic_data): """Test tracker with analyses that have different requirements.""" @@ -531,7 +539,7 @@ def test_mixed_requirements(self, synthetic_data): } ) - scalars, _ = tracker.analyze( + scalars, _, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], @@ -540,6 +548,7 @@ def test_mixed_requirements(self, synthetic_data): assert "regression/layer_0_r2" in scalars assert "pca/layer_0_variance_explained" in scalars + assert visualizations == {} def test_concatenated_layers(self, synthetic_data): """Test tracker with concatenated layers.""" @@ -557,7 +566,7 @@ def test_concatenated_layers(self, synthetic_data): } ) - scalars, projections = tracker.analyze( + scalars, projections, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], @@ -569,6 +578,7 @@ def test_concatenated_layers(self, synthetic_data): assert "regression/concatenated_projected" in projections assert "pca/concatenated_pca" in projections + assert visualizations == {} def test_uniform_weights(self, synthetic_data): """Test tracker with uniform weights.""" @@ -582,7 +592,7 @@ def test_uniform_weights(self, synthetic_data): } ) - scalars, _ = tracker.analyze( + scalars, _, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], @@ -590,6 +600,7 @@ def test_uniform_weights(self, synthetic_data): ) assert "regression/layer_0_r2" in scalars + assert visualizations == {} def test_multiple_configs_efficiency(self, synthetic_data): """Test that tracker efficiently pre-computes only needed preprocessing modes.""" @@ -612,7 +623,7 @@ def test_multiple_configs_efficiency(self, synthetic_data): } ) - scalars, projections = tracker.analyze( + scalars, projections, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], @@ -626,6 +637,7 @@ def test_multiple_configs_efficiency(self, synthetic_data): assert "pca_all_tokens/layer_0_pca" in projections assert "pca_last_token/layer_0_pca" in projections assert "regression_concat/concatenated_projected" in projections + assert visualizations == {} def test_tracker_accepts_torch_inputs(self, synthetic_data): """ActivationTracker should handle PyTorch tensors via conversion.""" @@ -651,7 +663,7 @@ def test_tracker_accepts_torch_inputs(self, synthetic_data): name: torch.tensor(np.asarray(layer)) for name, layer in synthetic_data["activations"].items() } - scalars, projections = tracker.analyze( + scalars, projections, visualizations = tracker.analyze( inputs=torch_inputs, beliefs=torch_beliefs, probs=torch_probs, @@ -660,314 +672,103 @@ def test_tracker_accepts_torch_inputs(self, synthetic_data): assert "regression/layer_0_r2" in scalars assert "pca/layer_0_pca" in projections - - -class TestTupleBeliefStates: - """Test activation tracker with tuple belief states for factored processes.""" - - @pytest.fixture - def factored_belief_data(self): - """Create synthetic data with factored belief states.""" - batch_size = 4 - seq_len = 5 - d_layer0 = 8 - d_layer1 = 12 - - inputs = jnp.array( - [ - [1, 2, 3, 4, 5], - [1, 2, 3, 6, 7], - [1, 2, 8, 9, 10], - [1, 2, 3, 4, 11], - ] - ) - - # Factored beliefs: 2 factors with dimensions 3 and 2 - factor_0 = jnp.ones((batch_size, seq_len, 3)) * 0.3 - factor_1 = jnp.ones((batch_size, seq_len, 2)) * 0.7 - factored_beliefs = (factor_0, factor_1) - - probs = jnp.ones((batch_size, seq_len)) * 0.1 - - activations = { - "layer_0": jnp.ones((batch_size, seq_len, d_layer0)) * 0.3, - "layer_1": jnp.ones((batch_size, seq_len, d_layer1)) * 0.7, - } - - return { - "inputs": inputs, - "factored_beliefs": factored_beliefs, - "probs": probs, - "activations": activations, - "batch_size": batch_size, - "seq_len": seq_len, - "factor_0_dim": 3, - "factor_1_dim": 2, - "d_layer0": d_layer0, - "d_layer1": d_layer1, + assert visualizations == {} + + def test_tracker_builds_visualizations(self, synthetic_data, monkeypatch): + """Tracker should build configured visualization payloads.""" + monkeypatch.setattr( + "simplexity.activations.activation_visualizations.build_altair_chart", + lambda plot_cfg, registry, controls=None: { + "backend": "altair", + "layers": len(plot_cfg.layers), + }, + ) + monkeypatch.setattr( + "simplexity.activations.activation_visualizations.build_plotly_figure", + lambda plot_cfg, registry, controls=None: { + "backend": "plotly", + "layers": len(plot_cfg.layers), + }, + ) + viz_cfg = { + "name": "pca_projection", + "data_mapping": { + "mappings": { + "pc0": {"source": "projections", "key": "pca", "component": 0}, + "belief_state": {"source": "belief_states", "reducer": "argmax"}, + } + }, + "controls": {"slider": "step", "dropdown": "layer"}, + "layer": { + "geometry": {"type": "point"}, + "aesthetics": { + "x": {"field": "pc0", "type": "quantitative"}, + "color": {"field": "belief_state", "type": "nominal"}, + }, + }, } - - def test_prepare_activations_accepts_tuple_beliefs(self, factored_belief_data): - """prepare_activations should accept and preserve tuple belief states.""" - result = prepare_activations( - factored_belief_data["inputs"], - factored_belief_data["factored_beliefs"], - factored_belief_data["probs"], - factored_belief_data["activations"], - prepare_options=PrepareOptions( - last_token_only=True, - concat_layers=False, - use_probs_as_weights=False, - ), - ) - - assert result.belief_states is not None - assert isinstance(result.belief_states, tuple) - assert len(result.belief_states) == 2 - - batch_size = factored_belief_data["batch_size"] - assert result.belief_states[0].shape == (batch_size, factored_belief_data["factor_0_dim"]) - assert result.belief_states[1].shape == (batch_size, factored_belief_data["factor_1_dim"]) - - def test_prepare_activations_tuple_beliefs_all_tokens(self, factored_belief_data): - """Tuple beliefs should work with all tokens mode.""" - result = prepare_activations( - factored_belief_data["inputs"], - factored_belief_data["factored_beliefs"], - factored_belief_data["probs"], - factored_belief_data["activations"], - prepare_options=PrepareOptions( - last_token_only=False, - concat_layers=False, - use_probs_as_weights=False, - ), - ) - - assert result.belief_states is not None - assert isinstance(result.belief_states, tuple) - assert len(result.belief_states) == 2 - - # With deduplication, we expect fewer samples than batch_size * seq_len - n_prefixes = result.belief_states[0].shape[0] - assert result.belief_states[0].shape == (n_prefixes, factored_belief_data["factor_0_dim"]) - assert result.belief_states[1].shape == (n_prefixes, factored_belief_data["factor_1_dim"]) - assert result.activations["layer_0"].shape[0] == n_prefixes - - def test_prepare_activations_torch_tuple_beliefs(self, factored_belief_data): - """prepare_activations should accept tuple of PyTorch tensors.""" - torch = pytest.importorskip("torch") - - torch_factor_0 = torch.tensor(np.asarray(factored_belief_data["factored_beliefs"][0])) - torch_factor_1 = torch.tensor(np.asarray(factored_belief_data["factored_beliefs"][1])) - torch_beliefs = (torch_factor_0, torch_factor_1) - - result = prepare_activations( - factored_belief_data["inputs"], - torch_beliefs, - factored_belief_data["probs"], - factored_belief_data["activations"], - prepare_options=PrepareOptions( - last_token_only=True, - concat_layers=False, - use_probs_as_weights=False, - ), - ) - - assert result.belief_states is not None - assert isinstance(result.belief_states, tuple) - assert len(result.belief_states) == 2 - # Should be converted to JAX arrays - assert isinstance(result.belief_states[0], jnp.ndarray) - assert isinstance(result.belief_states[1], jnp.ndarray) - - def test_prepare_activations_numpy_tuple_beliefs(self, factored_belief_data): - """prepare_activations should accept tuple of numpy arrays.""" - np_factor_0 = np.asarray(factored_belief_data["factored_beliefs"][0]) - np_factor_1 = np.asarray(factored_belief_data["factored_beliefs"][1]) - np_beliefs = (np_factor_0, np_factor_1) - - result = prepare_activations( - factored_belief_data["inputs"], - np_beliefs, - factored_belief_data["probs"], - factored_belief_data["activations"], - prepare_options=PrepareOptions( - last_token_only=True, - concat_layers=False, - use_probs_as_weights=False, - ), - ) - - assert result.belief_states is not None - assert isinstance(result.belief_states, tuple) - assert len(result.belief_states) == 2 - # Should be converted to JAX arrays - assert isinstance(result.belief_states[0], jnp.ndarray) - assert isinstance(result.belief_states[1], jnp.ndarray) - - def test_linear_regression_with_to_factors_true(self, factored_belief_data): - """LinearRegressionAnalysis with to_factors=True should regress to each factor separately.""" - analysis = LinearRegressionAnalysis(to_factors=True) - - prepared = prepare_activations( - factored_belief_data["inputs"], - factored_belief_data["factored_beliefs"], - factored_belief_data["probs"], - factored_belief_data["activations"], - prepare_options=PrepareOptions( - last_token_only=True, - concat_layers=False, - use_probs_as_weights=False, - ), - ) - - scalars, projections = analysis.analyze( - activations=prepared.activations, - belief_states=prepared.belief_states, - weights=prepared.weights, - ) - - # Should have separate metrics for each factor - # Format is: layer_name_factor_idx/metric_name - assert "layer_0_factor_0/r2" in scalars - assert "layer_0_factor_1/r2" in scalars - assert "layer_0_factor_0/rmse" in scalars - assert "layer_0_factor_1/rmse" in scalars - assert "layer_0_factor_0/mae" in scalars - assert "layer_0_factor_1/mae" in scalars - assert "layer_0_factor_0/dist" in scalars - assert "layer_0_factor_1/dist" in scalars - - assert "layer_1_factor_0/r2" in scalars - assert "layer_1_factor_1/r2" in scalars - - # Should have separate projections for each factor - assert "layer_0_factor_0/projected" in projections - assert "layer_0_factor_1/projected" in projections - assert "layer_1_factor_0/projected" in projections - assert "layer_1_factor_1/projected" in projections - - # Check projection shapes - batch_size = factored_belief_data["batch_size"] - assert projections["layer_0_factor_0/projected"].shape == (batch_size, factored_belief_data["factor_0_dim"]) - assert projections["layer_0_factor_1/projected"].shape == (batch_size, factored_belief_data["factor_1_dim"]) - - def test_linear_regression_svd_with_to_factors_true(self, factored_belief_data): - """LinearRegressionSVDAnalysis with to_factors=True should regress to each factor separately.""" - analysis = LinearRegressionSVDAnalysis(to_factors=True, rcond_values=[1e-10]) - - prepared = prepare_activations( - factored_belief_data["inputs"], - factored_belief_data["factored_beliefs"], - factored_belief_data["probs"], - factored_belief_data["activations"], - prepare_options=PrepareOptions( - last_token_only=True, - concat_layers=False, - use_probs_as_weights=False, - ), - ) - - scalars, projections = analysis.analyze( - activations=prepared.activations, - belief_states=prepared.belief_states, - weights=prepared.weights, - ) - - # Should have separate metrics for each factor including best_rcond - assert "layer_0_factor_0/r2" in scalars - assert "layer_0_factor_1/r2" in scalars - assert "layer_0_factor_0/best_rcond" in scalars - assert "layer_0_factor_1/best_rcond" in scalars - - # Should have separate projections for each factor - assert "layer_0_factor_0/projected" in projections - assert "layer_0_factor_1/projected" in projections - - def test_tracker_with_factored_beliefs(self, factored_belief_data): - """ActivationTracker should work with tuple belief states.""" tracker = ActivationTracker( { - "regression": LinearRegressionAnalysis( - last_token_only=True, - concat_layers=False, - to_factors=True, - ), "pca": PcaAnalysis( - n_components=2, - last_token_only=True, + n_components=1, + last_token_only=False, concat_layers=False, ), - } + }, + visualizations={"pca": [viz_cfg]}, ) - scalars, projections = tracker.analyze( - inputs=factored_belief_data["inputs"], - beliefs=factored_belief_data["factored_beliefs"], - probs=factored_belief_data["probs"], - activations=factored_belief_data["activations"], + _, _, visualizations = tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], ) - # Regression should have per-factor metrics - assert "regression/layer_0_factor_0/r2" in scalars - assert "regression/layer_0_factor_1/r2" in scalars + key = "pca/pca_projection" + assert key in visualizations + payload = visualizations[key] + assert not payload.dataframe.empty + assert payload.controls is not None + assert payload.controls.slider is not None + assert payload.controls.slider.field == "step" + assert set(payload.dataframe["layer"]) == {"layer_0", "layer_1"} - # PCA should still work (doesn't use belief states) - assert "pca/layer_0_variance_explained" in scalars + def test_controls_accumulate_steps_conflict(self): + """Controls should forbid accumulate_steps with slider targeting step.""" + with pytest.raises(ConfigValidationError): + ActivationVisualizationControlsConfig(slider="step", accumulate_steps=True) - # Projections should be present - assert "regression/layer_0_factor_0/projected" in projections - assert "regression/layer_0_factor_1/projected" in projections - assert "pca/layer_0_pca" in projections - def test_single_factor_tuple(self, synthetic_data): - """Test with a single-factor tuple (edge case).""" - # Create single-factor tuple - single_factor = (synthetic_data["beliefs"],) +class TestScalarSeriesMapping: + """Tests for scalar_series dataframe construction.""" - result = prepare_activations( - synthetic_data["inputs"], - single_factor, - synthetic_data["probs"], - synthetic_data["activations"], - prepare_options=PrepareOptions( - last_token_only=True, - concat_layers=False, - use_probs_as_weights=False, - ), + def test_infers_indices_when_not_provided(self): + mapping = ScalarSeriesMapping( + key_template="{layer}_metric_{index}", + index_field="component", + value_field="score", ) + metadata_columns = {"step": np.array([0])} + scalars = { + "test_analysis/layer_0_metric_1": 0.1, + "test_analysis/layer_0_metric_2": 0.2, + "test_analysis/layer_1_metric_1": 0.3, + } + df = _build_scalar_series_dataframe(mapping, metadata_columns, scalars, ["layer_0", "layer_1"], "test_analysis") - assert result.belief_states is not None - assert isinstance(result.belief_states, tuple) - assert len(result.belief_states) == 1 - assert result.belief_states[0].shape == (synthetic_data["batch_size"], synthetic_data["belief_dim"]) - - def test_three_factor_tuple(self, factored_belief_data): - """Test with three factors to ensure generalization.""" - batch_size = factored_belief_data["batch_size"] - seq_len = factored_belief_data["seq_len"] - - # Add a third factor - factor_0 = jnp.ones((batch_size, seq_len, 3)) * 0.3 - factor_1 = jnp.ones((batch_size, seq_len, 2)) * 0.5 - factor_2 = jnp.ones((batch_size, seq_len, 4)) * 0.7 - three_factor_beliefs = (factor_0, factor_1, factor_2) + assert set(df["component"]) == {1, 2} + assert set(df[df["layer"] == "layer_0"]["component"]) == {1, 2} + assert set(df[df["layer"] == "layer_1"]["component"]) == {1} - result = prepare_activations( - factored_belief_data["inputs"], - three_factor_beliefs, - factored_belief_data["probs"], - factored_belief_data["activations"], - prepare_options=PrepareOptions( - last_token_only=True, - concat_layers=False, - use_probs_as_weights=False, - ), + def test_infer_indices_errors_when_missing(self): + mapping = ScalarSeriesMapping( + key_template="{layer}_metric_{index}", + index_field="k", + value_field="value", ) + metadata_columns = {"step": np.array([0])} + scalars = {} - assert result.belief_states is not None - assert isinstance(result.belief_states, tuple) - assert len(result.belief_states) == 3 - assert result.belief_states[0].shape == (batch_size, 3) - assert result.belief_states[1].shape == (batch_size, 2) - assert result.belief_states[2].shape == (batch_size, 4) + with pytest.raises(ConfigValidationError): + _build_scalar_series_dataframe(mapping, metadata_columns, scalars, ["layer_0"], "test_analysis") diff --git a/tests/activations/test_activation_tracker.py b/tests/activations/test_activation_tracker.py new file mode 100644 index 00000000..1a80d9a0 --- /dev/null +++ b/tests/activations/test_activation_tracker.py @@ -0,0 +1,364 @@ +"""Tests for ActivationTracker class.""" + +# pylint: disable=all +# Temporarily disable all pylint checkers during AST traversal to prevent crash. +# pylint: enable=all + +from unittest.mock import MagicMock, patch + +import jax.numpy as jnp +import numpy as np +import pandas as pd +import pytest + +from simplexity.activations.activation_analyses import PcaAnalysis +from simplexity.activations.activation_tracker import ( + ActivationTracker, + PrepareOptions, + _get_uniform_weights, + _to_jax_array, + prepare_activations, +) + + +class TestGetUniformWeights: + """Tests for _get_uniform_weights helper.""" + + def test_returns_uniform_weights(self): + """Test that uniform weights sum to 1.""" + weights = _get_uniform_weights(5, jnp.float32) + assert weights.shape == (5,) + assert np.isclose(float(weights.sum()), 1.0) + + def test_each_weight_equal(self): + """Test that each weight is equal.""" + weights = _get_uniform_weights(4, jnp.float32) + expected = 0.25 + for w in weights: + assert np.isclose(float(w), expected) + + +class TestToJaxArray: + """Tests for _to_jax_array helper.""" + + def test_numpy_array(self): + """Test conversion from numpy array.""" + arr = np.array([1, 2, 3]) + result = _to_jax_array(arr) + assert isinstance(result, jnp.ndarray) + assert list(result) == [1, 2, 3] + + def test_jax_array_passthrough(self): + """Test that JAX arrays pass through unchanged.""" + arr = jnp.array([1, 2, 3]) + result = _to_jax_array(arr) + assert result is arr + + +class TestPrepareActivations: + """Tests for prepare_activations function.""" + + @pytest.fixture + def basic_data(self): + """Create basic test data.""" + batch_size = 2 + seq_len = 3 + belief_dim = 2 + d_model = 4 + + inputs = jnp.array([[1, 2, 3], [1, 2, 4]]) + beliefs = jnp.ones((batch_size, seq_len, belief_dim)) * 0.5 + probs = jnp.ones((batch_size, seq_len)) * 0.1 + activations = {"layer_0": jnp.ones((batch_size, seq_len, d_model)) * 0.3} + + return { + "inputs": inputs, + "beliefs": beliefs, + "probs": probs, + "activations": activations, + } + + def test_uses_probs_as_weights(self, basic_data): + """Test that probs are used as weights when specified.""" + result = prepare_activations( + basic_data["inputs"], + basic_data["beliefs"], + basic_data["probs"], + basic_data["activations"], + PrepareOptions(last_token_only=False, concat_layers=False, use_probs_as_weights=True), + ) + assert result.weights is not None + + def test_uses_uniform_weights(self, basic_data): + """Test that uniform weights are used when probs not used.""" + result = prepare_activations( + basic_data["inputs"], + basic_data["beliefs"], + basic_data["probs"], + basic_data["activations"], + PrepareOptions(last_token_only=False, concat_layers=False, use_probs_as_weights=False), + ) + assert result.weights is not None + assert np.isclose(float(result.weights.sum()), 1.0) + + def test_concat_layers(self, basic_data): + """Test layer concatenation.""" + basic_data["activations"]["layer_1"] = jnp.ones((2, 3, 6)) * 0.5 + result = prepare_activations( + basic_data["inputs"], + basic_data["beliefs"], + basic_data["probs"], + basic_data["activations"], + PrepareOptions(last_token_only=False, concat_layers=True, use_probs_as_weights=False), + ) + assert "concatenated" in result.activations + assert len(result.activations) == 1 + + def test_tuple_beliefs(self, basic_data): + """Test handling of tuple belief states (factored processes).""" + beliefs_tuple = ( + jnp.ones((2, 3, 2)) * 0.3, + jnp.ones((2, 3, 3)) * 0.7, + ) + result = prepare_activations( + basic_data["inputs"], + beliefs_tuple, + basic_data["probs"], + basic_data["activations"], + PrepareOptions(last_token_only=False, concat_layers=False, use_probs_as_weights=False), + ) + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + + +class TestActivationTrackerScalarHistory: + """Tests for ActivationTracker scalar history methods.""" + + @pytest.fixture + def tracker_with_history(self): + """Create tracker with some scalar history.""" + tracker = ActivationTracker( + analyses={"pca": PcaAnalysis(n_components=1, last_token_only=False, concat_layers=False)}, + ) + # Manually populate scalar history + tracker._scalar_history = { + "pca/layer_0_r2": [(0, 0.5), (1, 0.6), (2, 0.7)], + "pca/layer_1_r2": [(0, 0.4), (1, 0.5), (2, 0.6)], + "pca/layer_0_rmse": [(0, 0.3), (1, 0.2), (2, 0.1)], + "pca/layer_1_rmse": [(0, 0.4), (1, 0.3), (2, 0.2)], + } + return tracker + + def test_get_scalar_history_no_pattern(self, tracker_with_history): + """Test getting all scalar history without pattern.""" + history = tracker_with_history.get_scalar_history() + assert len(history) == 4 + assert "pca/layer_0_r2" in history + assert "pca/layer_1_r2" in history + + def test_get_scalar_history_exact_match(self, tracker_with_history): + """Test getting scalar history with exact match (no wildcards).""" + history = tracker_with_history.get_scalar_history("pca/layer_0_r2") + assert len(history) == 1 + assert "pca/layer_0_r2" in history + + def test_get_scalar_history_exact_match_not_found(self, tracker_with_history): + """Test exact match returns empty when not found.""" + history = tracker_with_history.get_scalar_history("nonexistent") + assert len(history) == 0 + + def test_get_scalar_history_star_pattern(self, tracker_with_history): + """Test getting scalar history with * wildcard.""" + history = tracker_with_history.get_scalar_history("pca/layer_*_r2") + assert len(history) == 2 + assert "pca/layer_0_r2" in history + assert "pca/layer_1_r2" in history + + def test_get_scalar_history_star_pattern_all(self, tracker_with_history): + """Test * wildcard matching all metrics.""" + history = tracker_with_history.get_scalar_history("pca/*") + assert len(history) == 4 + + def test_get_scalar_history_range_pattern(self, tracker_with_history): + """Test getting scalar history with range pattern.""" + history = tracker_with_history.get_scalar_history("pca/layer_0...2_r2") + assert len(history) == 2 + assert "pca/layer_0_r2" in history + assert "pca/layer_1_r2" in history + + def test_get_scalar_history_range_partial_match(self, tracker_with_history): + """Test range pattern with partial matches.""" + history = tracker_with_history.get_scalar_history("pca/layer_0...1_r2") + assert len(history) == 1 + assert "pca/layer_0_r2" in history + + def test_get_scalar_history_df_empty(self): + """Test get_scalar_history_df with empty history.""" + tracker = ActivationTracker(analyses={}) + df = tracker.get_scalar_history_df() + assert isinstance(df, pd.DataFrame) + assert list(df.columns) == ["metric", "step", "value"] + assert len(df) == 0 + + def test_get_scalar_history_df_with_data(self, tracker_with_history): + """Test get_scalar_history_df with data.""" + df = tracker_with_history.get_scalar_history_df() + assert isinstance(df, pd.DataFrame) + assert "metric" in df.columns + assert "step" in df.columns + assert "value" in df.columns + assert len(df) == 12 # 4 metrics * 3 steps each + + def test_get_scalar_history_df_structure(self, tracker_with_history): + """Test that DataFrame has correct structure.""" + df = tracker_with_history.get_scalar_history_df() + pca_layer0_r2 = df[df["metric"] == "pca/layer_0_r2"] + assert len(pca_layer0_r2) == 3 + assert list(pca_layer0_r2["step"]) == [0, 1, 2] + assert list(pca_layer0_r2["value"]) == [0.5, 0.6, 0.7] + + +class TestActivationTrackerVisualizationHandling: + """Tests for visualization handling in ActivationTracker.""" + + @pytest.fixture + def synthetic_data(self): + """Create synthetic data for testing.""" + batch_size = 2 + seq_len = 3 + belief_dim = 2 + d_model = 4 + + inputs = jnp.array([[1, 2, 3], [1, 2, 4]]) + beliefs = jnp.ones((batch_size, seq_len, belief_dim)) * 0.5 + probs = jnp.ones((batch_size, seq_len)) * 0.1 + activations = {"layer_0": jnp.ones((batch_size, seq_len, d_model)) * 0.3} + + return { + "inputs": inputs, + "beliefs": beliefs, + "probs": probs, + "activations": activations, + } + + def test_analyze_without_visualizations(self, synthetic_data): + """Test analyze works without visualizations configured.""" + tracker = ActivationTracker( + analyses={"pca": PcaAnalysis(n_components=1, last_token_only=False, concat_layers=False)}, + ) + scalars, projections, visualizations = tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + ) + assert len(scalars) > 0 + assert len(projections) > 0 + assert len(visualizations) == 0 + + def test_analyze_records_scalar_history(self, synthetic_data): + """Test that analyze records scalar history when step is provided.""" + tracker = ActivationTracker( + analyses={"pca": PcaAnalysis(n_components=1, last_token_only=False, concat_layers=False)}, + ) + tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + step=0, + ) + tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + step=1, + ) + history = tracker.get_scalar_history() + assert len(history) > 0 + for _key, values in history.items(): + assert len(values) == 2 + assert values[0][0] == 0 # First step + assert values[1][0] == 1 # Second step + + def test_analyze_with_tuple_beliefs_creates_stacked_array(self, synthetic_data): + """Test that tuple beliefs are stacked correctly for visualization.""" + # Tuple beliefs must have same shape for stacking + beliefs_tuple = ( + jnp.ones((2, 3, 2)) * 0.3, + jnp.ones((2, 3, 2)) * 0.7, + ) + viz_cfg = { + "name": "test_viz", + "data_mapping": { + "mappings": { + "pc0": {"source": "projections", "key": "pca", "component": 0}, + }, + }, + "layer": { + "geometry": {"type": "point"}, + "aesthetics": { + "x": {"field": "pc0", "type": "quantitative"}, + }, + }, + } + tracker = ActivationTracker( + analyses={"pca": PcaAnalysis(n_components=1, last_token_only=False, concat_layers=False)}, + visualizations={"pca": [viz_cfg]}, + ) + scalars, projections, visualizations = tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=beliefs_tuple, + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + step=0, + ) + assert len(visualizations) > 0 + + def test_analyze_with_none_beliefs(self, synthetic_data): + """Test visualization when beliefs are None.""" + viz_cfg = { + "name": "test_viz", + "data_mapping": { + "mappings": { + "pc0": {"source": "projections", "key": "pca", "component": 0}, + }, + }, + "layer": { + "geometry": {"type": "point"}, + "aesthetics": { + "x": {"field": "pc0", "type": "quantitative"}, + }, + }, + } + tracker = ActivationTracker( + analyses={"pca": PcaAnalysis(n_components=1, last_token_only=False, concat_layers=False)}, + visualizations={"pca": [viz_cfg]}, + ) + # PCA doesn't require beliefs, so this should work + scalars, projections, visualizations = tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + step=0, + ) + assert len(visualizations) > 0 + + +class TestActivationTrackerSaveVisualizations: + """Tests for save_visualizations method.""" + + def test_save_visualizations_delegates_to_persistence(self, tmp_path): + """Test that save_visualizations calls the persistence function.""" + tracker = ActivationTracker(analyses={}) + mock_payload = MagicMock() + mock_payload.name = "test_viz" + visualizations = {"pca/test_viz": mock_payload} + + with patch("simplexity.activations.activation_tracker.save_visualization_payloads") as mock_save: + mock_save.return_value = {"pca/test_viz": str(tmp_path / "test.html")} + result = tracker.save_visualizations(visualizations, tmp_path, step=0) + mock_save.assert_called_once_with(visualizations, tmp_path, 0) + assert "pca/test_viz" in result diff --git a/tests/activations/test_activation_visualizations.py b/tests/activations/test_activation_visualizations.py new file mode 100644 index 00000000..8a23619f --- /dev/null +++ b/tests/activations/test_activation_visualizations.py @@ -0,0 +1,298 @@ +"""Tests for activation visualization functions.""" + +# pylint: disable=all +# Temporarily disable all pylint checkers during AST traversal to prevent crash. +# pylint: enable=all + +import numpy as np +import pytest + +from simplexity.activations.activation_visualizations import ( + PreparedMetadata, + _compute_aggregation, + _parse_scalar_expression, + _render_title_template, + build_visualization_payloads, +) +from simplexity.activations.visualization_configs import build_activation_visualization_config +from simplexity.exceptions import ConfigValidationError + + +class TestParseScalarExpression: + """Tests for _parse_scalar_expression function.""" + + def test_simple_key(self): + """Test parsing a simple scalar key without aggregation.""" + key, agg = _parse_scalar_expression("my_scalar") + assert key == "my_scalar" + assert agg is None + + def test_min_aggregation(self): + """Test parsing min aggregation.""" + key, agg = _parse_scalar_expression("min(my_scalar)") + assert key == "my_scalar" + assert agg == "min" + + def test_max_aggregation(self): + """Test parsing max aggregation.""" + key, agg = _parse_scalar_expression("max(my_scalar)") + assert key == "my_scalar" + assert agg == "max" + + def test_avg_aggregation(self): + """Test parsing avg aggregation.""" + key, agg = _parse_scalar_expression("avg(my_scalar)") + assert key == "my_scalar" + assert agg == "avg" + + def test_mean_aggregation(self): + """Test parsing mean aggregation.""" + key, agg = _parse_scalar_expression("mean(my_scalar)") + assert key == "my_scalar" + assert agg == "mean" + + def test_latest_aggregation(self): + """Test parsing latest aggregation.""" + key, agg = _parse_scalar_expression("latest(my_scalar)") + assert key == "my_scalar" + assert agg == "latest" + + def test_first_aggregation(self): + """Test parsing first aggregation.""" + key, agg = _parse_scalar_expression("first(my_scalar)") + assert key == "my_scalar" + assert agg == "first" + + def test_last_aggregation(self): + """Test parsing last aggregation.""" + key, agg = _parse_scalar_expression("last(my_scalar)") + assert key == "my_scalar" + assert agg == "last" + + def test_strips_whitespace(self): + """Test that whitespace is stripped.""" + key, agg = _parse_scalar_expression(" min( my_scalar ) ") + assert key == "my_scalar" + assert agg == "min" + + +class TestComputeAggregation: + """Tests for _compute_aggregation function.""" + + def test_min(self): + """Test min aggregation.""" + history = [(0, 5.0), (1, 3.0), (2, 7.0)] + result = _compute_aggregation(history, "min") + assert result == 3.0 + + def test_max(self): + """Test max aggregation.""" + history = [(0, 5.0), (1, 3.0), (2, 7.0)] + result = _compute_aggregation(history, "max") + assert result == 7.0 + + def test_avg(self): + """Test avg aggregation.""" + history = [(0, 3.0), (1, 6.0), (2, 9.0)] + result = _compute_aggregation(history, "avg") + assert result == 6.0 + + def test_mean(self): + """Test mean aggregation (alias for avg).""" + history = [(0, 3.0), (1, 6.0), (2, 9.0)] + result = _compute_aggregation(history, "mean") + assert result == 6.0 + + def test_latest(self): + """Test latest aggregation.""" + history = [(0, 5.0), (1, 3.0), (2, 7.0)] + result = _compute_aggregation(history, "latest") + assert result == 7.0 + + def test_last(self): + """Test last aggregation (alias for latest).""" + history = [(0, 5.0), (1, 3.0), (2, 7.0)] + result = _compute_aggregation(history, "last") + assert result == 7.0 + + def test_first(self): + """Test first aggregation.""" + history = [(0, 5.0), (1, 3.0), (2, 7.0)] + result = _compute_aggregation(history, "first") + assert result == 5.0 + + def test_empty_history_raises(self): + """Test that empty history raises error.""" + with pytest.raises(ConfigValidationError, match="empty history"): + _compute_aggregation([], "min") + + def test_unknown_function_raises(self): + """Test that unknown function raises error.""" + history = [(0, 5.0)] + with pytest.raises(ConfigValidationError, match="Unknown aggregation"): + _compute_aggregation(history, "unknown_func") + + +class TestRenderTitleTemplate: + """Tests for _render_title_template function.""" + + def test_none_title_returns_none(self): + """Test that None title returns None.""" + result = _render_title_template(None, None, {}, {}) + assert result is None + + def test_simple_title_no_substitution(self): + """Test title without placeholders.""" + result = _render_title_template("My Title", None, {}, {}) + assert result == "My Title" + + def test_title_with_scalar_substitution(self): + """Test title with scalar value substitution.""" + title = "Loss: {loss:.3f}" + title_scalars = {"loss": "test/loss"} + scalars = {"test/loss": 0.12345} + result = _render_title_template(title, title_scalars, scalars, {}) + assert result == "Loss: 0.123" + + def test_title_with_history_aggregation(self): + """Test title with scalar history aggregation.""" + title = "Min Loss: {min_loss:.2f}" + title_scalars = {"min_loss": "min(test/loss)"} + scalars = {} + scalar_history = {"test/loss": [(0, 0.5), (1, 0.3), (2, 0.4)]} + result = _render_title_template(title, title_scalars, scalars, scalar_history) + assert result == "Min Loss: 0.30" + + +class TestBuildVisualizationPayloads: + """Tests for build_visualization_payloads function.""" + + @pytest.fixture + def basic_metadata(self): + """Create basic metadata for testing.""" + return PreparedMetadata( + sequences=[(1, 2), (1, 3)], + steps=np.array([2, 2]), + select_last_token=False, + ) + + @pytest.fixture + def basic_viz_config(self): + """Create a basic visualization config.""" + return build_activation_visualization_config( + { + "name": "test_viz", + "data_mapping": { + "mappings": { + "pc0": {"source": "projections", "key": "pca", "component": 0}, + }, + }, + "layer": { + "geometry": {"type": "point"}, + "aesthetics": { + "x": {"field": "pc0", "type": "quantitative"}, + }, + }, + } + ) + + def test_builds_payload_with_projections(self, basic_metadata, basic_viz_config): + """Test building a payload with projection data.""" + projections = {"layer_0_pca": np.array([[1.0, 2.0], [3.0, 4.0]])} + payloads = build_visualization_payloads( + analysis_name="test", + viz_cfgs=[basic_viz_config], + default_backend="altair", + prepared_metadata=basic_metadata, + weights=np.array([0.5, 0.5]), + belief_states=None, + projections=projections, + scalars={}, + scalar_history={}, + scalar_history_step=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + assert len(payloads) == 1 + payload = payloads[0] + assert payload.name == "test_viz" + assert not payload.dataframe.empty + + def test_builds_payload_with_belief_states(self, basic_metadata): + """Test building a payload with belief state data.""" + viz_config = build_activation_visualization_config( + { + "name": "belief_viz", + "data_mapping": { + "mappings": { + "belief_0": {"source": "belief_states", "component": 0}, + }, + }, + "layer": { + "geometry": {"type": "point"}, + "aesthetics": { + "x": {"field": "belief_0", "type": "quantitative"}, + }, + }, + } + ) + belief_states = np.array([[0.5, 0.5], [0.3, 0.7]]) + payloads = build_visualization_payloads( + analysis_name="test", + viz_cfgs=[viz_config], + default_backend="altair", + prepared_metadata=basic_metadata, + weights=np.array([0.5, 0.5]), + belief_states=belief_states, + projections={}, + scalars={}, + scalar_history={}, + scalar_history_step=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + assert len(payloads) == 1 + assert "belief_0" in payloads[0].dataframe.columns + + def test_handles_multiple_configs(self, basic_metadata): + """Test building multiple payloads from multiple configs.""" + configs = [ + build_activation_visualization_config( + { + "name": "viz_1", + "data_mapping": {"mappings": {"pc0": {"source": "projections", "key": "pca", "component": 0}}}, + "layer": { + "geometry": {"type": "point"}, + "aesthetics": {"x": {"field": "pc0", "type": "quantitative"}}, + }, + } + ), + build_activation_visualization_config( + { + "name": "viz_2", + "data_mapping": {"mappings": {"pc1": {"source": "projections", "key": "pca", "component": 1}}}, + "layer": { + "geometry": {"type": "point"}, + "aesthetics": {"x": {"field": "pc1", "type": "quantitative"}}, + }, + } + ), + ] + projections = {"layer_0_pca": np.array([[1.0, 2.0], [3.0, 4.0]])} + payloads = build_visualization_payloads( + analysis_name="test", + viz_cfgs=configs, + default_backend="altair", + prepared_metadata=basic_metadata, + weights=np.array([0.5, 0.5]), + belief_states=None, + projections=projections, + scalars={}, + scalar_history={}, + scalar_history_step=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + assert len(payloads) == 2 + assert payloads[0].name == "viz_1" + assert payloads[1].name == "viz_2" diff --git a/tests/activations/test_dataframe_integration.py b/tests/activations/test_dataframe_integration.py new file mode 100644 index 00000000..4ca363fc --- /dev/null +++ b/tests/activations/test_dataframe_integration.py @@ -0,0 +1,349 @@ +"""Integration tests for projection DataFrame building with factor patterns.""" + +import jax.numpy as jnp +import numpy as np +import pandas as pd +import pytest + +from simplexity.activations.visualization.dataframe_builders import ( + _build_dataframe, + _build_dataframe_for_mappings, +) +from simplexity.activations.visualization_configs import ( + ActivationVisualizationConfig, + ActivationVisualizationDataMapping, + ActivationVisualizationFieldRef, + CombinedMappingSection, +) +from simplexity.analysis.linear_regression import layer_linear_regression_svd +from simplexity.exceptions import ConfigValidationError + + +class TestProjectionDataframeIntegration: + """Integration tests for projection DataFrame building with factor patterns.""" + + def test_factored_projection_dataframe_values_match(self): + """Test that factored projection values are correctly associated with each factor. + + This is a regression test for the bug where projections looked 'random' + when visualizing factored linear regression results. + """ + # Simulate projection keys as produced by LayerwiseAnalysis with to_factors=True + # Keys format: layer_name_factor_idx/projected + factor_0_data = np.array([[0.1, 0.8, 0.1], [0.2, 0.7, 0.1], [0.3, 0.6, 0.1]]) + factor_1_data = np.array([[0.5, 0.5], [0.4, 0.6], [0.3, 0.7]]) + + projections = { + "layer_0_factor_0/projected": factor_0_data, + "layer_0_factor_1/projected": factor_1_data, + } + + # Metadata columns with 3 samples + metadata_columns = { + "step": np.array([3, 3, 3]), + "sample_index": np.array([0, 1, 2]), + } + + # Create mappings that use key patterns (as in user's YAML config) + # Note: Each mapping is for a SPECIFIC component, not a wildcard + mappings = { + "factor_*_prob_0": ActivationVisualizationFieldRef( + source="projections", + key="factor_*/projected", + component=0, + group_as="factor", + ), + "factor_*_prob_1": ActivationVisualizationFieldRef( + source="projections", + key="factor_*/projected", + component=1, + group_as="factor", + ), + } + + # Build the DataFrame + df = _build_dataframe_for_mappings( + mappings=mappings, + metadata_columns=metadata_columns, + projections=projections, + scalars={}, + belief_states=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + + # Verify structure: should have 2 groups (factor_0 and factor_1) * 3 samples = 6 rows + assert len(df) == 6, f"Expected 6 rows, got {len(df)}" + + # Verify factor column exists + assert "factor" in df.columns, "Missing 'factor' column" + + # Check factor_0 data + factor_0_rows = df[df["factor"] == "0"] + assert len(factor_0_rows) == 3, f"Expected 3 rows for factor_0, got {len(factor_0_rows)}" + + # Verify factor_0 prob_0 values match the source data + np.testing.assert_array_almost_equal( + np.asarray(factor_0_rows["prob_0"]), + factor_0_data[:, 0], + err_msg="Factor 0 prob_0 values don't match source data", + ) + + # Verify factor_0 prob_1 values match the source data + np.testing.assert_array_almost_equal( + np.asarray(factor_0_rows["prob_1"]), + factor_0_data[:, 1], + err_msg="Factor 0 prob_1 values don't match source data", + ) + + # Check factor_1 data + factor_1_rows = df[df["factor"] == "1"] + assert len(factor_1_rows) == 3, f"Expected 3 rows for factor_1, got {len(factor_1_rows)}" + + # Verify factor_1 prob_0 values match the source data + np.testing.assert_array_almost_equal( + np.asarray(factor_1_rows["prob_0"]), + factor_1_data[:, 0], + err_msg="Factor 1 prob_0 values don't match source data", + ) + + # Verify factor_1 prob_1 values match the source data + np.testing.assert_array_almost_equal( + np.asarray(factor_1_rows["prob_1"]), + factor_1_data[:, 1], + err_msg="Factor 1 prob_1 values don't match source data", + ) + + def test_factored_projection_different_component_counts(self): + """Test that factors with different numbers of components are handled correctly. + + Factor 0 has 3 components (states), factor 1 has 2 components. + Requesting component 2 should work for factor 0 but raise an error for factor 1. + """ + factor_0_data = np.array([[0.1, 0.8, 0.1], [0.2, 0.7, 0.1]]) # 3 components + factor_1_data = np.array([[0.5, 0.5], [0.4, 0.6]]) # 2 components + + projections = { + "layer_0_factor_0/projected": factor_0_data, + "layer_0_factor_1/projected": factor_1_data, + } + + metadata_columns = { + "step": np.array([1, 1]), + "sample_index": np.array([0, 1]), + } + + # Request component 2 - this should fail for factor_1 which only has 2 components + mappings = { + "factor_*_prob_2": ActivationVisualizationFieldRef( + source="projections", + key="factor_*/projected", + component=2, + group_as="factor", + ), + } + + # Should raise an error because factor_1 doesn't have component 2 + with pytest.raises(ConfigValidationError, match="out of bounds"): + _build_dataframe_for_mappings( + mappings=mappings, + metadata_columns=metadata_columns, + projections=projections, + scalars={}, + belief_states=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + + def test_combined_projections_and_beliefs_data_integrity(self): + """Test combined mode with projections and belief states.""" + n_samples = 4 + n_factors = 2 + n_states = 3 + + belief_states = np.array( + [ + [[0.1, 0.2, 0.7], [0.3, 0.4, 0.3]], + [[0.2, 0.3, 0.5], [0.5, 0.3, 0.2]], + [[0.3, 0.4, 0.3], [0.2, 0.6, 0.2]], + [[0.4, 0.5, 0.1], [0.1, 0.1, 0.8]], + ] + ) + + noise = np.random.default_rng(42).standard_normal((n_samples, n_factors, n_states)) * 0.01 + projected_values = belief_states + noise + + projections = { + "layer_0_factor_0/projected": projected_values[:, 0, :], + "layer_0_factor_1/projected": projected_values[:, 1, :], + } + + metadata_columns = { + "analysis": np.array(["test"] * n_samples), + "step": np.array([10] * n_samples), + "sample_index": np.arange(n_samples), + } + + config = ActivationVisualizationConfig( + name="test_combined", + backend="altair", + plot=None, + data_mapping=ActivationVisualizationDataMapping( + combined=[ + CombinedMappingSection( + label="prediction", + mappings={ + f"factor_*_prob_{i}": ActivationVisualizationFieldRef( + source="projections", key="factor_*/projected", component=i, group_as="factor" + ) + for i in range(n_states) + }, + ), + CombinedMappingSection( + label="ground_truth", + mappings={ + f"factor_*_prob_{i}": ActivationVisualizationFieldRef( + source="belief_states", factor="*", component=i, group_as="factor" + ) + for i in range(n_states) + }, + ), + ], + combine_as="data_type", + ), + ) + + df = _build_dataframe( + viz_cfg=config, + metadata_columns=metadata_columns, + projections=projections, + scalars={}, + scalar_history={}, + scalar_history_step=None, + belief_states=belief_states, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + + assert "data_type" in df.columns + assert "factor" in df.columns + assert len(df) == 2 * n_factors * n_samples + + def test_combined_mode_multiple_layers(self): + """Test that multiple layers appear correctly in combined mode.""" + n_samples = 3 + n_layers = 4 + n_factors = 2 + n_states = 3 + + belief_states = np.random.rand(n_samples, n_factors, n_states) + projections = { + f"layer_{layer_idx}_factor_{factor_idx}/projected": np.random.rand(n_samples, n_states) + for layer_idx in range(n_layers) + for factor_idx in range(n_factors) + } + + metadata_columns = { + "analysis": np.array(["test"] * n_samples), + "step": np.array([10] * n_samples), + "sample_index": np.arange(n_samples), + } + + config = ActivationVisualizationConfig( + name="test_multilayer", + backend="altair", + plot=None, + data_mapping=ActivationVisualizationDataMapping( + combined=[ + CombinedMappingSection( + label="prediction", + mappings={ + "factor_*_prob_0": ActivationVisualizationFieldRef( + source="projections", key="factor_*/projected", component=0, group_as="factor" + ), + }, + ), + CombinedMappingSection( + label="ground_truth", + mappings={ + "factor_*_prob_0": ActivationVisualizationFieldRef( + source="belief_states", factor="*", component=0, group_as="factor" + ), + }, + ), + ], + combine_as="data_type", + ), + ) + + df = _build_dataframe( + viz_cfg=config, + metadata_columns=metadata_columns, + projections=projections, + scalars={}, + scalar_history={}, + scalar_history_step=None, + belief_states=belief_states, + analysis_concat_layers=False, + layer_names=[f"layer_{i}" for i in range(n_layers)], + ) + + pred_df = df[df["data_type"] == "prediction"] + gt_df = df[df["data_type"] == "ground_truth"] + assert set(np.unique(np.asarray(pred_df["layer"]))) == {f"layer_{i}" for i in range(n_layers)} + assert set(np.unique(np.asarray(gt_df["layer"]))) == {"_no_layer_"} + + def test_full_visualization_pipeline_factored_vs_nonfactored(self): + """Test that factored and non-factored projections produce same results for single factor.""" + projection_data = np.array([[0.7, 0.2, 0.1], [0.1, 0.8, 0.1], [0.2, 0.2, 0.6]]) + metadata = {"step": np.array([1, 1, 1]), "sample_index": np.arange(3)} + + nf_df = _build_dataframe_for_mappings( + mappings={"prob_0": ActivationVisualizationFieldRef(source="projections", key="projected", component=0)}, + metadata_columns=metadata, + projections={"layer_0_projected": projection_data}, + scalars={}, + belief_states=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + f_df = _build_dataframe_for_mappings( + mappings={ + "factor_*_prob_0": ActivationVisualizationFieldRef( + source="projections", key="factor_*/projected", component=0, group_as="factor" + ) + }, + metadata_columns=metadata, + projections={"layer_0_factor_0/projected": projection_data}, + scalars={}, + belief_states=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + + nf_sorted = pd.DataFrame(nf_df).sort_values(by="sample_index") + f_filtered = pd.DataFrame(f_df[f_df["factor"] == "0"]).sort_values(by="sample_index") + np.testing.assert_array_almost_equal( + np.asarray(nf_sorted["prob_0"]), + np.asarray(f_filtered["prob_0"]), + ) + + def test_linear_regression_projections_match_beliefs(self): + """Test that linear regression projections closely match original beliefs.""" + n_samples, n_features, n_factors, n_states = 50, 10, 3, 3 + + rng = np.random.default_rng(42) + ds = rng.standard_normal((n_samples, n_features)).astype(np.float32) + beliefs_combined = ds @ rng.standard_normal((n_features, n_factors * n_states)).astype(np.float32) * 0.1 + beliefs_softmax = np.exp(beliefs_combined.reshape(n_samples, n_factors, n_states)) + beliefs_softmax = beliefs_softmax / beliefs_softmax.sum(axis=2, keepdims=True) + + belief_states = tuple(jnp.array(beliefs_softmax[:, f, :]) for f in range(n_factors)) + scalars, projections = layer_linear_regression_svd( + jnp.array(ds), jnp.ones(n_samples) / n_samples, belief_states, to_factors=True + ) + + for f in range(n_factors): + assert scalars[f"factor_{f}/r2"] > 0.8, f"Factor {f} R² too low" + diff = np.abs(np.asarray(projections[f"factor_{f}/projected"]) - np.asarray(belief_states[f])) + assert diff.max() < 0.2, f"Factor {f} projections differ too much from beliefs" diff --git a/tests/activations/test_field_expansion.py b/tests/activations/test_field_expansion.py new file mode 100644 index 00000000..7eabf844 --- /dev/null +++ b/tests/activations/test_field_expansion.py @@ -0,0 +1,984 @@ +"""Tests for field expansion and pattern parsing in activation visualizations.""" + +import numpy as np +import pytest + +from simplexity.activations.visualization.dataframe_builders import ( + _extract_base_column_name, +) +from simplexity.activations.visualization.field_resolution import ( + _resolve_belief_states, +) +from simplexity.activations.visualization.pattern_expansion import ( + _expand_belief_factor_mapping, + _expand_field_mapping, + _expand_projection_key_pattern, + _get_component_count, + _has_field_pattern, + _has_key_pattern, + _parse_component_spec, +) +from simplexity.activations.visualization.preprocessing import ( + _expand_preprocessing_fields, +) +from simplexity.activations.visualization_configs import ( + ActivationVisualizationDataMapping, + ActivationVisualizationFieldRef, + CombinedMappingSection, +) +from simplexity.exceptions import ConfigValidationError + + +class TestPatternParsing: + """Test pattern detection and parsing.""" + + def test_parse_wildcard(self): + """Test parsing of wildcard component pattern.""" + spec_type, start, end = _parse_component_spec("*") + assert spec_type == "wildcard" + assert start is None + assert end is None + + def test_parse_range(self): + """Test parsing of range component pattern.""" + spec_type, start, end = _parse_component_spec("0...10") + assert spec_type == "range" + assert start == 0 + assert end == 10 + + def test_parse_range_non_zero_start(self): + """Test parsing of range component pattern with non-zero start.""" + spec_type, start, end = _parse_component_spec("5...20") + assert spec_type == "range" + assert start == 5 + assert end == 20 + + def test_parse_single_component(self): + """Test parsing of single component pattern.""" + spec_type, start, end = _parse_component_spec(5) + assert spec_type == "single" + assert start == 5 + assert end is None + + def test_parse_none(self): + """Test parsing of None component pattern.""" + spec_type, start, end = _parse_component_spec(None) + assert spec_type == "none" + assert start is None + assert end is None + + def test_parse_invalid_range_wrong_order(self): + """Test parsing of invalid range with start greater than end.""" + with pytest.raises(ConfigValidationError, match="start must be < end"): + _parse_component_spec("10...5") + + def test_parse_invalid_range_equal(self): + """Test parsing of invalid range with start equal to end.""" + with pytest.raises(ConfigValidationError, match="start must be < end"): + _parse_component_spec("5...5") + + def test_parse_invalid_range_format(self): + """Test parsing of invalid range format.""" + with pytest.raises(ConfigValidationError, match="Unrecognized component pattern"): + _parse_component_spec("0..10") + + def test_parse_invalid_range_single_value(self): + """Test parsing of invalid range with single value.""" + with pytest.raises(ConfigValidationError, match="Invalid range"): + _parse_component_spec("10...") + + def test_parse_invalid_range_non_numeric(self): + """Test parsing of invalid range with non-numeric values.""" + with pytest.raises(ConfigValidationError, match="Invalid range"): + _parse_component_spec("a...b") + + def test_parse_invalid_pattern(self): + """Test parsing of completely invalid component pattern.""" + with pytest.raises(ConfigValidationError, match="Unrecognized component pattern"): + _parse_component_spec("invalid") + + def test_is_expansion_pattern_star(self): + """Test detection of wildcard expansion patterns.""" + assert _has_field_pattern("prob_*") + assert _has_field_pattern("*_prob") + assert _has_field_pattern("prob_*_normalized") + + def test_is_expansion_pattern_range(self): + """Test detection of range expansion patterns.""" + assert _has_field_pattern("prob_0...10") + assert _has_field_pattern("pc_5...20") + + def test_is_expansion_pattern_no_pattern(self): + """Test detection of non-expansion patterns.""" + assert not _has_field_pattern("prob_0") + assert not _has_field_pattern("probability") + assert not _has_field_pattern("pc_component") + + def test_is_expansion_pattern_multiple_patterns(self): + """Test detection of invalid multiple patterns in field name.""" + with pytest.raises(ConfigValidationError, match="multiple patterns"): + _has_field_pattern("prob_*_layer_*") + + with pytest.raises(ConfigValidationError, match="multiple patterns"): + _has_field_pattern("prob_*_0...5") + + +class TestComponentCount: + """Test component count determination.""" + + def test_get_component_count_projections_2d(self): + """Test getting component count from 2D projections.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca") + projections = {"layer_0_pca": np.random.randn(100, 10)} + count = _get_component_count(ref, "layer_0", projections, None, False) + assert count == 10 + + def test_get_component_count_projections_different_sizes(self): + """Test getting component count from 2D projections with different sizes.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca") + projections = {"layer_0_pca": np.random.randn(50, 15)} + count = _get_component_count(ref, "layer_0", projections, None, False) + assert count == 15 + + def test_get_component_count_projections_concat_layers(self): + """Test getting component count from concatenated layer projections.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca") + projections = {"pca": np.random.randn(200, 20)} + count = _get_component_count(ref, "any_layer", projections, None, True) + assert count == 20 + + def test_get_component_count_projections_1d_raises(self): + """Test that 1D projections raise an error when getting component count.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca") + projections = {"layer_0_pca": np.random.randn(100)} + with pytest.raises(ConfigValidationError, match="1D projection"): + _get_component_count(ref, "layer_0", projections, None, False) + + def test_get_component_count_projections_3d_raises(self): + """Test that 3D projections raise an error when getting component count.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca") + projections = {"layer_0_pca": np.random.randn(10, 10, 10)} + with pytest.raises(ConfigValidationError, match="1D or 2D"): + _get_component_count(ref, "layer_0", projections, None, False) + + def test_get_component_count_belief_states(self): + """Test getting component count from belief states.""" + ref = ActivationVisualizationFieldRef(source="belief_states") + belief_states = np.random.randn(100, 3) + count = _get_component_count(ref, "layer_0", {}, belief_states, False) + assert count == 3 + + def test_get_component_count_belief_states_different_size(self): + """Test getting component count from belief states with different size.""" + ref = ActivationVisualizationFieldRef(source="belief_states") + belief_states = np.random.randn(50, 7) + count = _get_component_count(ref, "layer_0", {}, belief_states, False) + assert count == 7 + + def test_get_component_count_belief_states_none_raises(self): + """Test that None belief states raise an error when getting component count.""" + ref = ActivationVisualizationFieldRef(source="belief_states") + with pytest.raises(ConfigValidationError, match="not available"): + _get_component_count(ref, "layer_0", {}, None, False) + + def test_get_component_count_belief_states_1d_raises(self): + """Test that 1D belief states raise an error when getting component count.""" + ref = ActivationVisualizationFieldRef(source="belief_states") + belief_states = np.random.randn(100) + with pytest.raises(ConfigValidationError, match="2D"): + _get_component_count(ref, "layer_0", {}, belief_states, False) + + def test_get_component_count_unsupported_source(self): + """Test that unsupported sources raise an error when getting component count.""" + ref = ActivationVisualizationFieldRef(source="scalars", key="some_scalar") + with pytest.raises(ConfigValidationError, match="not supported"): + _get_component_count(ref, "layer_0", {}, None, False) + + +class TestFieldExpansion: + """Test field mapping expansion.""" + + def test_wildcard_expansion_projections(self): + """Test detection of wildcard expansion patterns.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") + projections = {"layer_0_pca": np.random.randn(50, 3)} + + expanded = _expand_field_mapping("pc_*", ref, "layer_0", projections, {}, None, False) + + assert len(expanded) == 3 + assert "pc_0" in expanded + assert "pc_1" in expanded + assert "pc_2" in expanded + assert expanded["pc_0"].component == 0 + assert expanded["pc_1"].component == 1 + assert expanded["pc_2"].component == 2 + assert all(r.key == "pca" for r in expanded.values()) + assert all(r.source == "projections" for r in expanded.values()) + + def test_wildcard_expansion_belief_states(self): + """Test detection of wildcard expansion patterns.""" + ref = ActivationVisualizationFieldRef(source="belief_states", component="*") + belief_states = np.random.randn(50, 4) + + expanded = _expand_field_mapping("belief_*", ref, "layer_0", {}, {}, belief_states, False) + + assert len(expanded) == 4 + assert "belief_0" in expanded + assert "belief_3" in expanded + assert expanded["belief_0"].component == 0 + assert expanded["belief_3"].component == 3 + assert all(r.source == "belief_states" for r in expanded.values()) + + def test_range_expansion(self): + """Test detection of range expansion patterns.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="0...5") + projections = {"layer_0_pca": np.random.randn(50, 10)} + + expanded = _expand_field_mapping("pc_0...5", ref, "layer_0", projections, {}, None, False) + + assert len(expanded) == 5 + assert "pc_0" in expanded + assert "pc_4" in expanded + assert "pc_5" not in expanded + assert expanded["pc_0"].component == 0 + assert expanded["pc_4"].component == 4 + + def test_range_expansion_with_offset(self): + """Test detection of range expansion patterns with offset.""" + ref = ActivationVisualizationFieldRef(source="projections", key="projected", component="2...5") + projections = {"layer_0_projected": np.random.randn(50, 10)} + + expanded = _expand_field_mapping("prob_2...5", ref, "layer_0", projections, {}, None, False) + + assert len(expanded) == 3 + assert "prob_2" in expanded + assert "prob_3" in expanded + assert "prob_4" in expanded + assert "prob_5" not in expanded + assert expanded["prob_2"].component == 2 + assert expanded["prob_4"].component == 4 + + def test_wildcard_in_middle_of_name(self): + """Test detection of wildcard expansion patterns.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") + projections = {"layer_0_pca": np.random.randn(50, 3)} + + expanded = _expand_field_mapping("component_*_normalized", ref, "layer_0", projections, {}, None, False) + + assert len(expanded) == 3 + assert "component_0_normalized" in expanded + assert "component_1_normalized" in expanded + assert "component_2_normalized" in expanded + + def test_no_expansion_needed(self): + """Test that no expansion occurs when component is a specific integer.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca", component=0) + projections = {"layer_0_pca": np.random.randn(50, 5)} + + expanded = _expand_field_mapping("pc_0", ref, "layer_0", projections, {}, None, False) + + assert len(expanded) == 1 + assert "pc_0" in expanded + assert expanded["pc_0"].component == 0 + + def test_no_expansion_none_component(self): + """Test that no expansion occurs when component is None.""" + ref = ActivationVisualizationFieldRef(source="metadata", key="step") + projections = {} + + expanded = _expand_field_mapping("step", ref, "layer_0", projections, {}, None, False) + + assert len(expanded) == 1 + assert "step" in expanded + assert expanded["step"].component is None + + def test_field_pattern_without_component_pattern_raises(self): + """Test that a field pattern without a component pattern raises an error.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca", component=0) + projections = {"layer_0_pca": np.random.randn(50, 5)} + + with pytest.raises(ConfigValidationError, match="has pattern but component is not"): + _expand_field_mapping("pc_*", ref, "layer_0", projections, {}, None, False) + + def test_component_pattern_without_field_pattern_raises(self): + """Test that a component pattern without a field pattern raises an error.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") + projections = {"layer_0_pca": np.random.randn(50, 5)} + + with pytest.raises(ConfigValidationError, match="requires field name pattern"): + _expand_field_mapping("pc_0", ref, "layer_0", projections, {}, None, False) + + def test_range_exceeds_available_components(self): + """Test that a range exceeding available components raises an error.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="0...20") + projections = {"layer_0_pca": np.random.randn(50, 10)} + + with pytest.raises(ConfigValidationError, match="exceeds available components"): + _expand_field_mapping("pc_0...20", ref, "layer_0", projections, {}, None, False) + + def test_range_partially_exceeds_available_components(self): + """Test that a range partially exceeding available components raises an error.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="5...15") + projections = {"layer_0_pca": np.random.randn(50, 10)} + + with pytest.raises(ConfigValidationError, match="exceeds available components"): + _expand_field_mapping("pc_5...15", ref, "layer_0", projections, {}, None, False) + + def test_expansion_preserves_reducer(self): + """Test that expansion preserves the reducer attribute.""" + ref = ActivationVisualizationFieldRef(source="belief_states", component="*", reducer="l2_norm") + belief_states = np.random.randn(50, 3) + + expanded = _expand_field_mapping("belief_*", ref, "layer_0", {}, {}, belief_states, False) + + assert all(r.reducer == "l2_norm" for r in expanded.values()) + + def test_expansion_with_concat_layers(self): + """Test expansion when projections are concatenated across layers.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") + projections = {"pca": np.random.randn(50, 5)} + + expanded = _expand_field_mapping("pc_*", ref, "layer_0", projections, {}, None, True) + + assert len(expanded) == 5 + assert all(f"pc_{i}" in expanded for i in range(5)) + + +class TestFieldRefValidation: + """Test ActivationVisualizationFieldRef validation.""" + + def test_valid_wildcard_projections(self): + """Test that wildcard patterns in projections are valid.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") + assert ref.component == "*" + + def test_valid_range_projections(self): + """Test that range patterns in projections are valid.""" + ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="0...10") + assert ref.component == "0...10" + + def test_valid_wildcard_belief_states(self): + """Test that wildcard patterns in belief_states are valid.""" + ref = ActivationVisualizationFieldRef(source="belief_states", component="*") + assert ref.component == "*" + + def test_invalid_pattern_format(self): + """Test that invalid pattern formats raise a ConfigValidationError.""" + with pytest.raises(ConfigValidationError, match="invalid"): + ActivationVisualizationFieldRef(source="projections", key="pca", component="invalid_pattern") + + def test_invalid_range_wrong_separator(self): + """Test that invalid range separators raise a ConfigValidationError.""" + with pytest.raises(ConfigValidationError, match="invalid"): + ActivationVisualizationFieldRef(source="projections", key="pca", component="0..10") + + def test_pattern_on_unsupported_source_scalars(self): + """Test that pattern expansion is not supported for scalars source.""" + with pytest.raises(ConfigValidationError, match="only supported for projections/belief_states"): + ActivationVisualizationFieldRef(source="scalars", key="some_scalar", component="*") + + def test_pattern_on_unsupported_source_metadata(self): + """Test that pattern expansion is not supported for metadata source.""" + with pytest.raises(ConfigValidationError, match="only supported for projections/belief_states"): + ActivationVisualizationFieldRef(source="metadata", key="step", component="*") + + def test_pattern_on_unsupported_source_weights(self): + """Test that pattern expansion is not supported for weights source.""" + with pytest.raises(ConfigValidationError, match="only supported for projections/belief_states"): + ActivationVisualizationFieldRef(source="weights", component="*") + + +class TestPreprocessingFieldExpansion: + """Test wildcard expansion for preprocessing input_fields.""" + + def test_wildcard_expansion(self): + """Test that wildcard patterns in preprocessing fields are expanded correctly.""" + columns = ["belief_0", "belief_1", "belief_2", "belief_3", "step", "layer"] + patterns = ["belief_*"] + + expanded = _expand_preprocessing_fields(patterns, columns) + + assert expanded == ["belief_0", "belief_1", "belief_2", "belief_3"] + + def test_range_expansion(self): + """Test that range patterns in preprocessing fields are expanded correctly.""" + columns = ["prob_0", "prob_1", "prob_2", "prob_3", "prob_4", "prob_5"] + patterns = ["prob_0...3"] + + expanded = _expand_preprocessing_fields(patterns, columns) + + assert expanded == ["prob_0", "prob_1", "prob_2"] + + def test_range_expansion_with_offset(self): + """Test that range patterns with offsets in preprocessing fields are expanded correctly.""" + columns = ["pc_0", "pc_1", "pc_2", "pc_3", "pc_4", "pc_5", "pc_6"] + patterns = ["pc_2...5"] + + expanded = _expand_preprocessing_fields(patterns, columns) + + assert expanded == ["pc_2", "pc_3", "pc_4"] + + def test_mixed_patterns_and_literals(self): + """Test that mixed wildcard patterns and literal fields are expanded correctly.""" + columns = ["belief_0", "belief_1", "belief_2", "prob_0", "prob_1", "step"] + patterns = ["belief_*", "step"] + + expanded = _expand_preprocessing_fields(patterns, columns) + + assert expanded == ["belief_0", "belief_1", "belief_2", "step"] + + def test_multiple_wildcards(self): + """Test that multiple wildcard patterns in preprocessing fields are expanded correctly.""" + columns = ["belief_0", "belief_1", "prob_0", "prob_1", "prob_2"] + patterns = ["belief_*", "prob_*"] + + expanded = _expand_preprocessing_fields(patterns, columns) + + assert expanded == ["belief_0", "belief_1", "prob_0", "prob_1", "prob_2"] + + def test_wildcard_no_matches_raises(self): + """Test that a wildcard pattern with no matches raises a ConfigValidationError.""" + columns = ["step", "layer", "sequence"] + patterns = ["belief_*"] + + with pytest.raises(ConfigValidationError, match="did not match any columns"): + _expand_preprocessing_fields(patterns, columns) + + def test_range_missing_column_raises(self): + """Test that a range pattern with missing columns raises a ConfigValidationError.""" + columns = ["prob_0", "prob_1"] # Missing prob_2 + patterns = ["prob_0...3"] + + with pytest.raises(ConfigValidationError, match="column not found"): + _expand_preprocessing_fields(patterns, columns) + + def test_literal_fields_preserved(self): + """Test that literal fields in preprocessing fields are preserved.""" + columns = ["field_a", "field_b", "field_c"] + patterns = ["field_a", "field_c"] + + expanded = _expand_preprocessing_fields(patterns, columns) + + assert expanded == ["field_a", "field_c"] + + def test_wildcard_sorts_numerically(self): + """Test that wildcard patterns in preprocessing fields are sorted numerically.""" + columns = ["item_10", "item_2", "item_1", "item_20"] + patterns = ["item_*"] + + expanded = _expand_preprocessing_fields(patterns, columns) + + # Should be sorted by numeric value, not lexicographic + assert expanded == ["item_1", "item_2", "item_10", "item_20"] + + def test_pattern_in_middle_of_name(self): + """Test that patterns in the middle of field names are expanded correctly.""" + columns = ["component_0_norm", "component_1_norm", "component_2_norm"] + patterns = ["component_*_norm"] + + expanded = _expand_preprocessing_fields(patterns, columns) + + assert expanded == ["component_0_norm", "component_1_norm", "component_2_norm"] + + def test_empty_patterns_list(self): + """Test that an empty patterns list returns an empty list.""" + columns = ["field_a", "field_b"] + patterns = [] + + expanded = _expand_preprocessing_fields(patterns, columns) + + assert not expanded + + def test_range_pattern_in_middle(self): + """Test that range patterns in the middle of field names are expanded correctly.""" + columns = ["feature_0_scaled", "feature_1_scaled", "feature_2_scaled"] + patterns = ["feature_0...2_scaled"] + + expanded = _expand_preprocessing_fields(patterns, columns) + + assert expanded == ["feature_0_scaled", "feature_1_scaled"] + + +class TestKeyPatternExpansion: + """Test projection key pattern expansion (e.g., factor_*/projected).""" + + def test_has_key_pattern_wildcard(self): + """Test that _has_key_pattern detects wildcard patterns correctly.""" + assert _has_key_pattern("factor_*/projected") + assert _has_key_pattern("*/projected") + assert _has_key_pattern("factor_*") + + def test_has_key_pattern_range(self): + """Test that _has_key_pattern detects range patterns correctly.""" + assert _has_key_pattern("factor_0...3/projected") + assert _has_key_pattern("0...5/projected") + + def test_has_key_pattern_none(self): + """Test that _has_key_pattern returns False for non-pattern keys.""" + assert not _has_key_pattern(None) + assert not _has_key_pattern("projected") + assert not _has_key_pattern("factor_0/projected") + + def test_has_key_pattern_multiple_raises(self): + """Test that _has_key_pattern raises an error for multiple patterns.""" + with pytest.raises(ConfigValidationError, match="multiple patterns"): + _has_key_pattern("factor_*/layer_*/projected") + + def test_expand_projection_key_pattern_wildcard(self): + """Test that _expand_projection_key_pattern expands wildcard patterns correctly.""" + projections = { + "layer_0_factor_0/projected": np.random.randn(10, 3), + "layer_0_factor_1/projected": np.random.randn(10, 3), + "layer_0_factor_2/projected": np.random.randn(10, 3), + } + + result = _expand_projection_key_pattern("factor_*/projected", "layer_0", projections, False) + + assert len(result) == 3 + assert result["0"] == "factor_0/projected" + assert result["1"] == "factor_1/projected" + assert result["2"] == "factor_2/projected" + + def test_expand_projection_key_pattern_range(self): + """Test that _expand_projection_key_pattern expands range patterns correctly.""" + projections = { + "layer_0_factor_0/projected": np.random.randn(10, 3), + "layer_0_factor_1/projected": np.random.randn(10, 3), + "layer_0_factor_2/projected": np.random.randn(10, 3), + } + + result = _expand_projection_key_pattern("factor_0...2/projected", "layer_0", projections, False) + + assert len(result) == 2 + assert result["0"] == "factor_0/projected" + assert result["1"] == "factor_1/projected" + + def test_expand_projection_key_pattern_concat_layers(self): + """Test that _expand_projection_key_pattern works with concatenated layers.""" + projections = { + "factor_0/projected": np.random.randn(10, 3), + "factor_1/projected": np.random.randn(10, 3), + } + + result = _expand_projection_key_pattern("factor_*/projected", "any_layer", projections, True) + + assert len(result) == 2 + assert result["0"] == "factor_0/projected" + assert result["1"] == "factor_1/projected" + + def test_expand_projection_key_pattern_no_matches_raises(self): + """Test that _expand_projection_key_pattern raises an error when no keys match.""" + projections = {"layer_0_pca": np.random.randn(10, 3)} + + with pytest.raises(ConfigValidationError, match="No projection keys found"): + _expand_projection_key_pattern("factor_*/projected", "layer_0", projections, False) + + def test_field_mapping_with_key_pattern(self): + """Test that field mappings with key patterns are expanded correctly.""" + ref = ActivationVisualizationFieldRef( + source="projections", + key="factor_*/projected", + component=0, + group_as="factor", + ) + projections = { + "layer_0_factor_0/projected": np.random.randn(10, 3), + "layer_0_factor_1/projected": np.random.randn(10, 3), + } + + expanded = _expand_field_mapping("factor_*_prob", ref, "layer_0", projections, {}, None, False) + + assert len(expanded) == 2 + assert "factor_0_prob" in expanded + assert "factor_1_prob" in expanded + assert expanded["factor_0_prob"].key == "factor_0/projected" + assert expanded["factor_1_prob"].key == "factor_1/projected" + assert expanded["factor_0_prob"]._group_value == "0" # pylint: disable=protected-access + assert expanded["factor_1_prob"]._group_value == "1" # pylint: disable=protected-access + assert expanded["factor_0_prob"].group_as == "factor" + + def test_field_mapping_with_key_and_component_patterns(self): + """Test that field mappings with key and component patterns are expanded correctly.""" + ref = ActivationVisualizationFieldRef( + source="projections", + key="factor_*/projected", + component="*", + group_as="factor", + ) + projections = { + "layer_0_factor_0/projected": np.random.randn(10, 3), + "layer_0_factor_1/projected": np.random.randn(10, 3), + } + + expanded = _expand_field_mapping("factor_*_prob_*", ref, "layer_0", projections, {}, None, False) + + # Cross-product: 2 factors * 3 components = 6 expanded fields + assert len(expanded) == 6 + assert "factor_0_prob_0" in expanded + assert "factor_0_prob_1" in expanded + assert "factor_0_prob_2" in expanded + assert "factor_1_prob_0" in expanded + assert "factor_1_prob_1" in expanded + assert "factor_1_prob_2" in expanded + + # Check that components are correct + assert expanded["factor_0_prob_0"].component == 0 + assert expanded["factor_0_prob_1"].component == 1 + assert expanded["factor_1_prob_2"].component == 2 + + # Check that keys and group values are correct + assert expanded["factor_0_prob_0"].key == "factor_0/projected" + assert expanded["factor_1_prob_0"].key == "factor_1/projected" + assert expanded["factor_0_prob_0"]._group_value == "0" # pylint: disable=protected-access + assert expanded["factor_1_prob_0"]._group_value == "1" # pylint: disable=protected-access + + def test_key_pattern_without_field_pattern_raises(self): + """Test that a key pattern without a field pattern raises an error.""" + ref = ActivationVisualizationFieldRef( + source="projections", + key="factor_*/projected", + component=0, + group_as="factor", + ) + projections = {"layer_0_factor_0/projected": np.random.randn(10, 3)} + + with pytest.raises(ConfigValidationError, match="requires field name pattern"): + _expand_field_mapping("prob_0", ref, "layer_0", projections, {}, None, False) + + +class TestGroupAsValidation: + """Test group_as parameter validation.""" + + def test_key_pattern_requires_group_as(self): + """Test that a key pattern requires the group_as parameter.""" + with pytest.raises(ConfigValidationError, match="requires `group_as`"): + ActivationVisualizationFieldRef( + source="projections", + key="factor_*/projected", + component=0, + ) + + def test_group_as_only_for_projections(self): + """Test that group_as is only valid for projections source.""" + with pytest.raises(ConfigValidationError, match="only supported for projections"): + ActivationVisualizationFieldRef( + source="scalars", + key="some_key", + group_as="factor", + ) + + def test_valid_key_pattern_with_group_as(self): + """Test that a valid key pattern with group_as is accepted.""" + ref = ActivationVisualizationFieldRef( + source="projections", + key="factor_*/projected", + component=0, + group_as="factor", + ) + assert ref.group_as == "factor" + assert ref.key == "factor_*/projected" + + def test_valid_key_pattern_with_list_group_as(self): + """Test that a valid key pattern with list group_as is accepted.""" + ref = ActivationVisualizationFieldRef( + source="projections", + key="factor_*/projected", + component=0, + group_as=["factor", "layer"], + ) + assert ref.group_as == ["factor", "layer"] + + +class TestExtractBaseColumnName: + """Test base column name extraction for group expansion.""" + + def test_extract_prefix_pattern(self): + """Test that base column names are correctly extracted from prefixed patterns.""" + assert _extract_base_column_name("factor_0_prob_0", "0") == "prob_0" + assert _extract_base_column_name("factor_1_prob_0", "1") == "prob_0" + assert _extract_base_column_name("factor_2_belief", "2") == "belief" + + def test_extract_suffix_only_pattern_returns_original(self): + """Test that base column names are unchanged when no prefix pattern is present.""" + # Columns without a _N_suffix pattern are returned unchanged + # This ensures we don't strip meaningful parts of column names + assert _extract_base_column_name("factor_0", "0") == "factor_0" + assert _extract_base_column_name("group_1", "1") == "group_1" + + def test_no_pattern_match_returns_original(self): + """Test that base column names are unchanged when no pattern match is found.""" + assert _extract_base_column_name("prob_0", "0") == "prob_0" + assert _extract_base_column_name("some_column", "1") == "some_column" + + +class TestCombinedMappingSection: + """Test CombinedMappingSection validation.""" + + def test_valid_combined_section(self): + """Test that a valid CombinedMappingSection is accepted.""" + section = CombinedMappingSection( + label="prediction", + mappings={ + "prob_0": ActivationVisualizationFieldRef(source="projections", key="proj", component=0), + }, + ) + assert section.label == "prediction" + assert len(section.mappings) == 1 + + def test_empty_mappings_raises(self): + """Test that an empty mappings dictionary raises a ConfigValidationError.""" + with pytest.raises(ConfigValidationError, match="must have at least one mapping"): + CombinedMappingSection(label="empty", mappings={}) + + +class TestCombinedDataMapping: + """Test ActivationVisualizationDataMapping with combined sections.""" + + def test_valid_combined_mapping(self): + """Test that a valid ActivationVisualizationDataMapping with combined sections is accepted.""" + mapping = ActivationVisualizationDataMapping( + combined=[ + CombinedMappingSection( + label="prediction", + mappings={"prob_0": ActivationVisualizationFieldRef(source="projections", key="proj", component=0)}, + ), + CombinedMappingSection( + label="ground_truth", + mappings={"prob_0": ActivationVisualizationFieldRef(source="belief_states", component=0)}, + ), + ], + combine_as="data_type", + ) + assert mapping.combined is not None + assert len(mapping.combined) == 2 + assert mapping.combine_as == "data_type" + + def test_combined_without_combine_as_raises(self): + """Test that an ActivationVisualizationDataMapping without 'combine_as' raises a ConfigValidationError.""" + with pytest.raises(ConfigValidationError, match="'combine_as' is required"): + ActivationVisualizationDataMapping( + combined=[ + CombinedMappingSection( + label="prediction", + mappings={ + "prob_0": ActivationVisualizationFieldRef(source="projections", key="proj", component=0) + }, + ), + ], + ) + + def test_combined_with_mappings_raises(self): + """Test that a DataMapping with both 'mappings' and 'combined' raises ConfigValidationError.""" + with pytest.raises(ConfigValidationError, match="Cannot use both"): + ActivationVisualizationDataMapping( + mappings={"prob_0": ActivationVisualizationFieldRef(source="projections", key="proj", component=0)}, + combined=[ + CombinedMappingSection( + label="prediction", + mappings={ + "prob_1": ActivationVisualizationFieldRef(source="projections", key="proj", component=1) + }, + ), + ], + combine_as="data_type", + ) + + +class TestBeliefStateFactorPatterns: + """Test belief state factor pattern expansion for 3D belief states.""" + + def test_factor_field_only_for_belief_states(self): + """Test that factor field is only supported for belief_states source.""" + with pytest.raises(ConfigValidationError, match="only supported for belief_states"): + ActivationVisualizationFieldRef( + source="projections", + key="proj", + factor=0, + ) + + def test_factor_pattern_requires_group_as(self): + """Test that factor patterns require the group_as parameter.""" + with pytest.raises(ConfigValidationError, match="requires `group_as`"): + ActivationVisualizationFieldRef( + source="belief_states", + factor="*", + component=0, + ) + + def test_valid_factor_with_group_as(self): + """Test that a valid factor pattern with group_as is accepted.""" + ref = ActivationVisualizationFieldRef( + source="belief_states", + factor="*", + component=0, + group_as="factor", + ) + assert ref.factor == "*" + assert ref.group_as == "factor" + + def test_valid_single_factor(self): + """Test that a valid single factor is accepted.""" + ref = ActivationVisualizationFieldRef( + source="belief_states", + factor=0, + component=0, + ) + assert ref.factor == 0 + + def test_expand_belief_factor_mapping_wildcard(self): + """Test expanding belief factor mapping with wildcard pattern.""" + ref = ActivationVisualizationFieldRef( + source="belief_states", + factor="*", + component=0, + group_as="factor", + ) + # 3D beliefs: (samples, factors, states) + beliefs = np.random.randn(10, 3, 4) + + expanded = _expand_belief_factor_mapping("factor_*_prob", ref, beliefs) + + assert len(expanded) == 3 + assert "factor_0_prob" in expanded + assert "factor_1_prob" in expanded + assert "factor_2_prob" in expanded + assert expanded["factor_0_prob"].factor == 0 + assert expanded["factor_1_prob"].factor == 1 + assert expanded["factor_2_prob"].factor == 2 + assert expanded["factor_0_prob"]._group_value == "0" # pylint: disable=protected-access + assert expanded["factor_1_prob"]._group_value == "1" # pylint: disable=protected-access + + def test_expand_belief_factor_mapping_range(self): + """Test expanding belief factor mapping with range pattern.""" + ref = ActivationVisualizationFieldRef( + source="belief_states", + factor="0...2", + component=0, + group_as="factor", + ) + beliefs = np.random.randn(10, 5, 4) + + expanded = _expand_belief_factor_mapping("factor_0...2_prob", ref, beliefs) + + assert len(expanded) == 2 + assert "factor_0_prob" in expanded + assert "factor_1_prob" in expanded + assert "factor_2_prob" not in expanded + + def test_expand_belief_factor_and_component_patterns(self): + """Test expanding belief factor mapping with both factor and component patterns.""" + ref = ActivationVisualizationFieldRef( + source="belief_states", + factor="*", + component="*", + group_as="factor", + ) + beliefs = np.random.randn(10, 2, 3) + + expanded = _expand_belief_factor_mapping("factor_*_state_*", ref, beliefs) + + # Cross-product: 2 factors * 3 states = 6 + assert len(expanded) == 6 + assert "factor_0_state_0" in expanded + assert "factor_0_state_1" in expanded + assert "factor_0_state_2" in expanded + assert "factor_1_state_0" in expanded + assert "factor_1_state_1" in expanded + assert "factor_1_state_2" in expanded + assert expanded["factor_0_state_0"].factor == 0 + assert expanded["factor_0_state_0"].component == 0 + assert expanded["factor_1_state_2"].factor == 1 + assert expanded["factor_1_state_2"].component == 2 + + def test_expand_belief_factor_mapping_2d_raises(self): + """Test that expanding belief factor mapping with 2D beliefs raises an error.""" + ref = ActivationVisualizationFieldRef( + source="belief_states", + factor="*", + component=0, + group_as="factor", + ) + beliefs = np.random.randn(10, 4) # 2D, not 3D + + with pytest.raises(ConfigValidationError, match="require 3D beliefs"): + _expand_belief_factor_mapping("factor_*_prob", ref, beliefs) + + def test_expand_belief_factor_range_exceeds_raises(self): + """Test that expanding belief factor mapping with out-of-bounds range raises an error.""" + ref = ActivationVisualizationFieldRef( + source="belief_states", + factor="0...10", + component=0, + group_as="factor", + ) + beliefs = np.random.randn(10, 3, 4) + + with pytest.raises(ConfigValidationError, match="exceeds available factors"): + _expand_belief_factor_mapping("factor_0...10_prob", ref, beliefs) + + +class TestResolveBeliefStates: + """Test belief state resolution with factor dimension.""" + + def test_resolve_2d_belief_states(self): + """Test resolving 2D belief states without factor dimension.""" + ref = ActivationVisualizationFieldRef(source="belief_states", component=1) + beliefs = np.array([[0.1, 0.2, 0.7], [0.3, 0.4, 0.3]]) + + result = _resolve_belief_states(beliefs, ref) + + np.testing.assert_array_almost_equal(result, [0.2, 0.4]) + + def test_resolve_3d_belief_states_with_factor(self): + """Test resolving 3D belief states with specified factor.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor=1, component=2) + # Shape: (samples=2, factors=3, states=4) + beliefs = np.random.randn(2, 3, 4) + + result = _resolve_belief_states(beliefs, ref) + + # Should select factor 1, component 2 + np.testing.assert_array_almost_equal(result, beliefs[:, 1, 2]) + + def test_resolve_3d_without_factor_raises(self): + """Test resolving 3D belief states without specifying factor raises an error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", component=0) + beliefs = np.random.randn(10, 3, 4) + + with pytest.raises(ConfigValidationError, match="no `factor` was specified"): + _resolve_belief_states(beliefs, ref) + + def test_resolve_2d_with_factor_raises(self): + """Test resolving 2D belief states with factor specified raises an error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor=0, component=0) + beliefs = np.random.randn(10, 4) + + with pytest.raises(ConfigValidationError, match="2D but `factor=0` was specified"): + _resolve_belief_states(beliefs, ref) + + def test_resolve_3d_factor_out_of_bounds_raises(self): + """Test resolving 3D belief states with out-of-bounds factor raises an error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor=5, component=0) + beliefs = np.random.randn(10, 3, 4) + + with pytest.raises(ConfigValidationError, match="out of bounds"): + _resolve_belief_states(beliefs, ref) + + def test_resolve_3d_with_reducer_argmax(self): + """Test resolving 3D belief states with argmax reducer.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor=0, reducer="argmax") + beliefs = np.array([[[0.1, 0.2, 0.7], [0.3, 0.4, 0.3]], [[0.8, 0.1, 0.1], [0.2, 0.6, 0.2]]]) + + result = _resolve_belief_states(beliefs, ref) + + # Factor 0: [[0.1, 0.2, 0.7], [0.8, 0.1, 0.1]] -> argmax = [2, 0] + np.testing.assert_array_equal(result, [2, 0]) + + def test_resolve_3d_with_reducer_l2_norm(self): + """Test resolving 3D belief states with l2_norm reducer.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor=0, reducer="l2_norm") + beliefs = np.array([[[3.0, 4.0, 0.0]], [[1.0, 0.0, 0.0]]]) + + result = _resolve_belief_states(beliefs, ref) + + np.testing.assert_array_almost_equal(result, [5.0, 1.0]) diff --git a/tests/activations/test_scalar_history.py b/tests/activations/test_scalar_history.py new file mode 100644 index 00000000..f95c6615 --- /dev/null +++ b/tests/activations/test_scalar_history.py @@ -0,0 +1,405 @@ +"""Tests for step-wise scalar history tracking in ActivationTracker.""" + +import jax.numpy as jnp +import numpy as np +import pytest + +from simplexity.activations.activation_analyses import PcaAnalysis +from simplexity.activations.activation_tracker import ActivationTracker +from simplexity.activations.visualization.dataframe_builders import _build_dataframe +from simplexity.activations.visualization_configs import ( + ActivationVisualizationConfig, + ActivationVisualizationDataMapping, + ActivationVisualizationFieldRef, +) +from simplexity.exceptions import ConfigValidationError + + +@pytest.fixture +def synthetic_data(): + """Create synthetic data for testing.""" + batch_size = 4 + seq_len = 5 + belief_dim = 3 + d_layer0 = 8 + d_layer1 = 12 + + inputs = jnp.array( + [ + [1, 2, 3, 4, 5], + [1, 2, 3, 6, 7], + [1, 2, 8, 9, 10], + [1, 2, 3, 4, 11], + ] + ) + + beliefs = jnp.ones((batch_size, seq_len, belief_dim)) * 0.5 + probs = jnp.ones((batch_size, seq_len)) * 0.1 + + activations = { + "layer_0": jnp.ones((batch_size, seq_len, d_layer0)) * 0.3, + "layer_1": jnp.ones((batch_size, seq_len, d_layer1)) * 0.7, + } + + return { + "inputs": inputs, + "beliefs": beliefs, + "probs": probs, + "activations": activations, + } + + +class TestScalarHistory: + """Tests for scalar history tracking functionality.""" + + def test_scalar_history_empty_on_init(self): + """Scalar history should be empty when tracker is initialized.""" + analysis = PcaAnalysis(n_components=3, last_token_only=True, concat_layers=False) + tracker = ActivationTracker({"pca": analysis}) + + df = tracker.get_scalar_history_df() + assert df.empty + assert list(df.columns) == ["metric", "step", "value"] + + def test_scalar_history_without_step_parameter(self, synthetic_data): + """Calling analyze without step parameter should not accumulate history.""" + analysis = PcaAnalysis(n_components=3, last_token_only=True, concat_layers=False) + tracker = ActivationTracker({"pca": analysis}) + + scalars, _, _ = tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + ) + + assert scalars # Scalars should still be returned + df = tracker.get_scalar_history_df() + assert df.empty # But history should remain empty + + def test_scalar_history_single_step(self, synthetic_data): + """Scalar history should record values when step is provided.""" + analysis = PcaAnalysis(n_components=3, last_token_only=True, concat_layers=False) + tracker = ActivationTracker({"pca": analysis}) + + scalars, _, _ = tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + step=0, + ) + + df = tracker.get_scalar_history_df() + assert not df.empty + assert list(df.columns) == ["metric", "step", "value"] + assert len(df) == len(scalars) + assert all(df["step"] == 0) + + for metric_name, scalar_value in scalars.items(): + metric_rows = df[df["metric"] == metric_name] + assert len(metric_rows) == 1 + assert metric_rows.iloc[0]["value"] == float(scalar_value) + + def test_scalar_history_multiple_steps(self, synthetic_data): + """Scalar history should accumulate across multiple steps.""" + analysis = PcaAnalysis(n_components=3, last_token_only=True, concat_layers=False) + tracker = ActivationTracker({"pca": analysis}) + + steps = [0, 10, 20, 30] + for step in steps: + tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + step=step, + ) + + df = tracker.get_scalar_history_df() + assert not df.empty + assert df["step"].nunique() == len(steps) + assert sorted(df["step"].unique()) == steps + + # Each scalar metric should have one entry per step + for metric_name in df["metric"].unique(): + metric_rows = df[df["metric"] == metric_name] + assert len(metric_rows) == len(steps) + + def test_scalar_history_mixed_with_and_without_step(self, synthetic_data): + """Mixing calls with and without step parameter should work correctly.""" + analysis = PcaAnalysis(n_components=3, last_token_only=True, concat_layers=False) + tracker = ActivationTracker({"pca": analysis}) + + # Call without step + tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + ) + + df = tracker.get_scalar_history_df() + assert df.empty + + # Call with step + tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + step=5, + ) + + df = tracker.get_scalar_history_df() + assert not df.empty + assert all(df["step"] == 5) + + # Call without step again + tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + ) + + df = tracker.get_scalar_history_df() + assert not df.empty + assert all(df["step"] == 5) # History should still only contain step 5 + + def test_scalar_history_multiple_analyses(self, synthetic_data): + """Scalar history should track metrics from multiple analyses separately.""" + pca_analysis = PcaAnalysis(n_components=3, last_token_only=True, concat_layers=False) + pca_analysis2 = PcaAnalysis(n_components=5, last_token_only=False, concat_layers=False) + + tracker = ActivationTracker({"pca": pca_analysis, "pca_alt": pca_analysis2}) + + tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + step=0, + ) + + df = tracker.get_scalar_history_df() + assert not df.empty + + # Check that metrics from both analyses are present + pca_metrics = df[df["metric"].str.startswith("pca/")] + pca_alt_metrics = df[df["metric"].str.startswith("pca_alt/")] + + assert len(pca_metrics) > 0 + assert len(pca_alt_metrics) > 0 + + def test_scalar_history_dataframe_structure(self, synthetic_data): + """Validate the structure of the scalar history DataFrame.""" + analysis = PcaAnalysis(n_components=3, last_token_only=True, concat_layers=False) + tracker = ActivationTracker({"pca": analysis}) + + for step in [0, 1, 2]: + tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + step=step, + ) + + df = tracker.get_scalar_history_df() + + # Check column types + assert df["metric"].dtype == object + assert df["step"].dtype in [int, "int64", "int32"] + assert df["value"].dtype in [float, "float64", "float32"] + + # Check no null values + assert bool(df["metric"].notnull().all()) + assert bool(df["step"].notnull().all()) + assert bool(df["value"].notnull().all()) + + def test_scalar_history_preserves_order(self, synthetic_data): + """Scalar history should preserve the order of steps.""" + analysis = PcaAnalysis(n_components=3, last_token_only=True, concat_layers=False) + tracker = ActivationTracker({"pca": analysis}) + + # Add steps in non-sequential order + steps = [5, 1, 10, 3, 7] + for step in steps: + tracker.analyze( + inputs=synthetic_data["inputs"], + beliefs=synthetic_data["beliefs"], + probs=synthetic_data["probs"], + activations=synthetic_data["activations"], + step=step, + ) + + df = tracker.get_scalar_history_df() + + # For each metric, steps should appear in the order they were added + for metric_name in df["metric"].unique(): + metric_df = df[df["metric"] == metric_name] + recorded_steps = metric_df["step"].tolist() + assert recorded_steps == steps + + +class TestScalarHistoryVisualizations: + """Ensure scalar_history visualizations leverage step-aware accumulation.""" + + def _viz_cfg(self) -> ActivationVisualizationConfig: + return ActivationVisualizationConfig( + name="history", + data_mapping=ActivationVisualizationDataMapping( + mappings={ + "rmse": ActivationVisualizationFieldRef( + source="scalar_pattern", + key="layer_*_rmse", + ) + } + ), + ) + + def _metadata(self) -> dict[str, np.ndarray]: + return { + "analysis": np.asarray(["analysis"], dtype=object), + "step": np.asarray([1], dtype=np.int32), + } + + def test_scalar_history_dataframe_requires_step(self): + """Test that scalar_history source requires step parameter in analyze().""" + viz_cfg = self._viz_cfg() + with pytest.raises( + ConfigValidationError, + match=r"scalar_pattern/scalar_history source but analyze\(\) was called without the `step` parameter", + ): + _build_dataframe( + viz_cfg, + self._metadata(), + projections={}, + scalars={"analysis/layer_0_rmse": 0.1}, + scalar_history={}, + scalar_history_step=None, + belief_states=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + + def test_scalar_history_dataframe_uses_current_step(self): + """Test that scalar_history dataframe uses the current step value.""" + viz_cfg = self._viz_cfg() + df = _build_dataframe( + viz_cfg, + self._metadata(), + projections={}, + scalars={"analysis/layer_0_rmse": 0.42}, + scalar_history={}, + scalar_history_step=7, + belief_states=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + + assert list(df["step"]) == [7] + assert list(df["layer"]) == ["layer_0"] + assert list(df["rmse"]) == [0.42] + assert list(df["metric"]) == ["analysis/layer_0_rmse"] + + def test_scalar_history_pattern_matches_complex_keys(self): + """Test that scalar_pattern matches complex layer hook keys.""" + viz_cfg = ActivationVisualizationConfig( + name="history", + data_mapping=ActivationVisualizationDataMapping( + mappings={ + "rmse": ActivationVisualizationFieldRef( + source="scalar_pattern", + key="blocks.*.hook_resid_*_rmse", + ) + } + ), + ) + scalars = { + "analysis/blocks.0.hook_resid_pre_rmse": 0.1, + "analysis/blocks.0.hook_resid_mid_rmse": 0.2, + "analysis/blocks.1.hook_resid_post_rmse": 0.3, + } + + df = _build_dataframe( + viz_cfg, + self._metadata(), + projections={}, + scalars=scalars, + scalar_history={}, + scalar_history_step=11, + belief_states=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + + sorted_rows = df.sort_values("metric").reset_index(drop=True) + ordered_keys = sorted(scalars.keys()) + assert list(sorted_rows["metric"]) == ordered_keys + assert list(sorted_rows["rmse"]) == [scalars[key] for key in ordered_keys] + assert sorted(df["layer"].unique()) == [ + "blocks.0.hook_resid_mid_rmse", + "blocks.0.hook_resid_pre_rmse", + "blocks.1.hook_resid_post_rmse", + ] + + def test_scalar_history_pattern_matches_without_prefix(self): + """Test that scalar_pattern matches keys without analysis prefix.""" + viz_cfg = ActivationVisualizationConfig( + name="history", + data_mapping=ActivationVisualizationDataMapping( + mappings={ + "rmse": ActivationVisualizationFieldRef( + source="scalar_pattern", + key="blocks.*.hook_resid_*_rmse", + ) + } + ), + ) + scalars = { + "blocks.0.hook_resid_pre_rmse": 0.5, + "blocks.1.hook_resid_pre_rmse": 0.6, + } + + df = _build_dataframe( + viz_cfg, + self._metadata(), + projections={}, + scalars=scalars, + scalar_history={}, + scalar_history_step=3, + belief_states=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) + + assert sorted(df["metric"]) == sorted(scalars.keys()) + + def test_scalar_history_pattern_requires_match(self): + """Test that scalar_pattern raises error when no scalars match.""" + viz_cfg = ActivationVisualizationConfig( + name="history", + data_mapping=ActivationVisualizationDataMapping( + mappings={ + "rmse": ActivationVisualizationFieldRef( + source="scalar_pattern", + key="blocks.*.hook_resid_*_rmse", + ) + } + ), + ) + + with pytest.raises(ConfigValidationError, match="No scalar pattern keys found matching pattern"): + _build_dataframe( + viz_cfg, + self._metadata(), + projections={}, + scalars={"analysis/other_metric": 0.1}, + scalar_history={}, + scalar_history_step=0, + belief_states=None, + analysis_concat_layers=False, + layer_names=["layer_0"], + ) diff --git a/tests/activations/test_scalar_wildcard_expansion.py b/tests/activations/test_scalar_wildcard_expansion.py new file mode 100644 index 00000000..b01a6156 --- /dev/null +++ b/tests/activations/test_scalar_wildcard_expansion.py @@ -0,0 +1,182 @@ +"""Tests for scalar wildcard expansion in activation visualizations.""" + +import pytest + +from simplexity.activations.visualization.pattern_expansion import _expand_scalar_keys +from simplexity.exceptions import ConfigValidationError + + +class TestScalarWildcardExpansion: + """Tests for _expand_scalar_keys function.""" + + def test_scalar_no_pattern_returns_identity(self): + """Scalars without patterns should return as-is.""" + scalars = {"layer_0_rmse": 0.5} + result = _expand_scalar_keys("rmse", "layer_0_rmse", "layer_0", scalars) + + assert result == {"rmse": "layer_0_rmse"} + + def test_scalar_wildcard_expansion(self): + """Wildcard in scalar key should expand to all matching keys.""" + scalars = { + "cumvar_0": 0.8, + "cumvar_1": 0.9, + "cumvar_2": 0.95, + "cumvar_3": 0.99, + "other_metric": 1.0, + } + result = _expand_scalar_keys("cumvar_*", "cumvar_*", "layer_0", scalars) + + assert len(result) == 4 + assert result == { + "cumvar_0": "cumvar_0", + "cumvar_1": "cumvar_1", + "cumvar_2": "cumvar_2", + "cumvar_3": "cumvar_3", + } + + def test_scalar_wildcard_with_prefix_suffix(self): + """Wildcard pattern with prefix and suffix should match correctly.""" + scalars = { + "layer_0_cumvar_0": 0.8, + "layer_0_cumvar_1": 0.9, + "layer_0_cumvar_2": 0.95, + "layer_1_cumvar_0": 0.7, + "other": 1.0, + } + result = _expand_scalar_keys("cv_*", "layer_0_cumvar_*", "layer_0", scalars) + + assert len(result) == 3 + assert result == { + "cv_0": "layer_0_cumvar_0", + "cv_1": "layer_0_cumvar_1", + "cv_2": "layer_0_cumvar_2", + } + + def test_scalar_range_expansion(self): + """Range pattern should expand to specified indices.""" + scalars = { + "cumvar_0": 0.8, + "cumvar_1": 0.9, + "cumvar_2": 0.95, + "cumvar_3": 0.99, + "cumvar_4": 0.995, + } + result = _expand_scalar_keys("cumvar_1...4", "cumvar_1...4", "layer_0", scalars) + + assert len(result) == 3 + assert result == { + "cumvar_1": "cumvar_1", + "cumvar_2": "cumvar_2", + "cumvar_3": "cumvar_3", + } + + def test_scalar_wildcard_no_matches_raises_error(self): + """Wildcard with no matches should raise an error.""" + scalars = {"other_metric": 1.0} + + with pytest.raises(ConfigValidationError, match="No keys found matching pattern"): + _expand_scalar_keys("cumvar_*", "cumvar_*", "layer_0", scalars) + + def test_scalar_wildcard_requires_key_pattern(self): + """Wildcard expansion without a key should raise an error.""" + scalars = {"metric": 1.0} + + with pytest.raises(ConfigValidationError, match="Scalar wildcard expansion requires a key pattern"): + _expand_scalar_keys("field_*", None, "layer_0", scalars) + + def test_scalar_expansion_sorts_indices(self): + """Expanded scalar keys should be sorted by index.""" + scalars = { + "var_5": 0.5, + "var_1": 0.1, + "var_3": 0.3, + "var_2": 0.2, + } + result = _expand_scalar_keys("v_*", "var_*", "layer_0", scalars) + + # Check that keys are in sorted order + keys = list(result.keys()) + assert keys == ["v_1", "v_2", "v_3", "v_5"] + + def test_scalar_wildcard_field_name_pattern_mismatch(self): + """Field pattern but no key pattern should be handled in parent function.""" + # This test verifies that _expand_scalar_keys expects both patterns together + # The validation happens in _expand_field_mapping, not here + scalars = {"metric": 1.0} + + # _expand_scalar_keys just returns identity if no pattern in key + result = _expand_scalar_keys("field_*", "metric", "layer_0", scalars) + assert result == {"field_*": "metric"} + + def test_scalar_range_invalid_format_returns_identity(self): + """Invalid range format (two dots instead of three) should return as-is.""" + scalars = {"metric_1..4": 1.0} + + # Two dots instead of three - not a valid range pattern, returns identity + result = _expand_scalar_keys("field_1..4", "metric_1..4", "layer_0", scalars) + assert result == {"field_1..4": "metric_1..4"} + + def test_scalar_wildcard_with_non_numeric_ignored(self): + """Keys with non-numeric wildcards should be ignored.""" + scalars = { + "metric_0": 0.0, + "metric_1": 0.1, + "metric_abc": 0.2, + "metric_xyz": 0.3, + } + result = _expand_scalar_keys("m_*", "metric_*", "layer_0", scalars) + + # Only numeric indices should be included + assert len(result) == 2 + assert result == { + "m_0": "metric_0", + "m_1": "metric_1", + } + + def test_scalar_expansion_deduplicates_indices(self): + """Duplicate indices should be deduplicated.""" + # In practice this wouldn't happen with scalar keys, but test for robustness + scalars = { + "var_1": 0.1, + "var_01": 0.1, # This would match as index 1 if not carefully handled + } + # This test verifies basic behavior - exact matching prevents this issue + result = _expand_scalar_keys("v_*", "var_*", "layer_0", scalars) + + # Should only match exact numeric patterns + assert "v_1" in result + + def test_scalar_range_expansion_with_field_pattern(self): + """Range in both field and key should expand correctly.""" + scalars = { + "metric_0": 0.0, + "metric_1": 0.1, + "metric_2": 0.2, + "metric_3": 0.3, + } + result = _expand_scalar_keys("m_0...3", "metric_0...3", "layer_0", scalars) + + assert len(result) == 3 + assert result == { + "m_0": "metric_0", + "m_1": "metric_1", + "m_2": "metric_2", + } + + def test_scalar_wildcard_complex_key_pattern(self): + """Complex patterns with multiple underscores should work.""" + scalars = { + "layer_0_pca_cumvar_0": 0.8, + "layer_0_pca_cumvar_1": 0.9, + "layer_0_pca_cumvar_2": 0.95, + "layer_1_pca_cumvar_0": 0.7, + } + result = _expand_scalar_keys("pc_cv_*", "layer_0_pca_cumvar_*", "layer_0", scalars) + + assert len(result) == 3 + assert result == { + "pc_cv_0": "layer_0_pca_cumvar_0", + "pc_cv_1": "layer_0_pca_cumvar_1", + "pc_cv_2": "layer_0_pca_cumvar_2", + } diff --git a/tests/activations/test_visualization_modules.py b/tests/activations/test_visualization_modules.py new file mode 100644 index 00000000..9fd566f9 --- /dev/null +++ b/tests/activations/test_visualization_modules.py @@ -0,0 +1,974 @@ +"""Tests for visualization submodules to improve coverage.""" + +from typing import Any, cast + +import numpy as np +import pandas as pd +import pytest + +from simplexity.activations.visualization.data_structures import PreparedMetadata +from simplexity.activations.visualization.dataframe_builders import ( + _apply_sampling, + _build_dataframe, + _build_dataframe_for_mappings, + _build_metadata_columns, + _build_scalar_dataframe, + _build_scalar_series_dataframe, + _extract_base_column_name, + _infer_scalar_series_indices, + _scalar_series_metadata, +) +from simplexity.activations.visualization.field_resolution import ( + _lookup_projection_array, + _lookup_scalar_value, + _maybe_component, + _resolve_belief_states, + _resolve_field, +) +from simplexity.activations.visualization.pattern_expansion import ( + _expand_belief_factor_mapping, + _expand_field_mapping, + _expand_pattern_to_indices, + _expand_projection_key_pattern, + _expand_scalar_pattern_ranges, + _get_component_count, + _parse_component_spec, +) +from simplexity.activations.visualization.preprocessing import ( + _apply_preprocessing, + _combine_rgb, + _expand_preprocessing_fields, + _project_to_simplex, +) +from simplexity.activations.visualization_configs import ( + ActivationVisualizationConfig, + ActivationVisualizationDataMapping, + ActivationVisualizationFieldRef, + ActivationVisualizationPreprocessStep, + CombinedMappingSection, + SamplingConfig, + ScalarSeriesMapping, +) +from simplexity.exceptions import ConfigValidationError + + +# pylint: disable=too-many-public-methods +class TestFieldResolution: + """Tests for field_resolution.py functions.""" + + def test_lookup_projection_array_none_key(self): + """Test that None key raises error.""" + with pytest.raises(ConfigValidationError, match="must supply a `key` value"): + _lookup_projection_array({}, "layer_0", None, False) + + def test_lookup_projection_array_not_found(self): + """Test that missing projection raises error.""" + projections = {"layer_0_other": np.array([1, 2, 3])} + with pytest.raises(ConfigValidationError, match="not available for layer"): + _lookup_projection_array(projections, "layer_0", "missing", False) + + def test_lookup_projection_array_concat_layers_exact_match(self): + """Test exact key match with concat_layers.""" + projections = {"my_key": np.array([1, 2, 3])} + result = _lookup_projection_array(projections, "layer_0", "my_key", True) + np.testing.assert_array_equal(result, [1, 2, 3]) + + def test_lookup_projection_array_concat_layers_suffix_match(self): + """Test suffix match with concat_layers.""" + projections = {"prefix_my_key": np.array([4, 5, 6])} + result = _lookup_projection_array(projections, "layer_0", "my_key", True) + np.testing.assert_array_equal(result, [4, 5, 6]) + + def test_lookup_scalar_value_concat_layers_exact(self): + """Test scalar lookup with concat_layers exact match.""" + scalars = {"my_scalar": 0.5} + result = _lookup_scalar_value(scalars, "layer_0", "my_scalar", True) + assert result == 0.5 + + def test_lookup_scalar_value_concat_layers_suffix(self): + """Test scalar lookup with concat_layers suffix match.""" + scalars = {"prefix_my_scalar": 0.7} + result = _lookup_scalar_value(scalars, "layer_0", "my_scalar", True) + assert result == 0.7 + + def test_lookup_scalar_value_not_found(self): + """Test that missing scalar raises error.""" + with pytest.raises(ConfigValidationError, match="not available for layer"): + _lookup_scalar_value({"other": 1.0}, "layer_0", "missing", False) + + def test_maybe_component_1d_with_component(self): + """Test that 1D array with component raises error.""" + with pytest.raises(ConfigValidationError, match="invalid for 1D"): + _maybe_component(np.array([1, 2, 3]), 0) + + def test_maybe_component_wrong_dim(self): + """Test that 3D array raises error.""" + with pytest.raises(ConfigValidationError, match="must be 1D or 2D"): + _maybe_component(np.ones((2, 3, 4)), None) + + def test_maybe_component_2d_no_component(self): + """Test that 2D array without component raises error.""" + with pytest.raises(ConfigValidationError, match="must specify `component`"): + _maybe_component(np.ones((3, 4)), None) + + def test_maybe_component_out_of_bounds(self): + """Test that out of bounds component raises error.""" + with pytest.raises(ConfigValidationError, match="out of bounds"): + _maybe_component(np.ones((3, 4)), 10) + + def test_resolve_belief_states_wrong_dim(self): + """Test that 1D belief states raise error.""" + ref = ActivationVisualizationFieldRef(source="belief_states") + with pytest.raises(ConfigValidationError, match="must be 2D or 3D"): + _resolve_belief_states(np.array([1, 2, 3]), ref) + + def test_resolve_belief_states_3d_no_factor(self): + """Test that 3D beliefs without factor raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor=None) + with pytest.raises(ConfigValidationError, match="no `factor` was specified"): + _resolve_belief_states(np.ones((5, 3, 4)), ref) + + def test_resolve_belief_states_2d_with_factor(self): + """Test that 2D beliefs with factor raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor=0) + with pytest.raises(ConfigValidationError, match="Factor selection requires 3D"): + _resolve_belief_states(np.ones((5, 4)), ref) + + def test_resolve_belief_states_factor_out_of_bounds(self): + """Test that out of bounds factor raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor=10) + with pytest.raises(ConfigValidationError, match="out of bounds"): + _resolve_belief_states(np.ones((5, 3, 4)), ref) + + def test_resolve_belief_states_component_out_of_bounds(self): + """Test that out of bounds component raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", component=10) + with pytest.raises(ConfigValidationError, match="out of bounds"): + _resolve_belief_states(np.ones((5, 4)), ref) + + def test_resolve_field_metadata_existing_key(self): + """Test metadata source with existing key.""" + ref = ActivationVisualizationFieldRef(source="metadata", key="sample_index") + metadata = {"sample_index": np.array([0, 1, 2])} + result = _resolve_field(ref, "layer_0", {}, {}, None, False, 3, metadata) + np.testing.assert_array_equal(result, [0, 1, 2]) + + def test_resolve_field_metadata_layer(self): + """Test metadata source with layer key.""" + ref = ActivationVisualizationFieldRef(source="metadata", key="layer") + result = _resolve_field(ref, "layer_0", {}, {}, None, False, 3, {}) + assert list(result) == ["layer_0", "layer_0", "layer_0"] + + def test_resolve_field_metadata_missing_key(self): + """Test metadata source with missing key.""" + ref = ActivationVisualizationFieldRef(source="metadata", key="missing") + with pytest.raises(ConfigValidationError, match="not available"): + _resolve_field(ref, "layer_0", {}, {}, None, False, 3, {}) + + def test_resolve_field_weights_missing(self): + """Test weights source when not available.""" + ref = ActivationVisualizationFieldRef(source="weights") + with pytest.raises(ConfigValidationError, match="unavailable"): + _resolve_field(ref, "layer_0", {}, {}, None, False, 3, {}) + + def test_resolve_field_belief_states_missing(self): + """Test belief_states source when not available.""" + ref = ActivationVisualizationFieldRef(source="belief_states") + with pytest.raises(ConfigValidationError, match="were not retained"): + _resolve_field(ref, "layer_0", {}, {}, None, False, 3, {}) + + def test_resolve_field_scalars_success(self): + """Test scalars source returns repeated value.""" + ref = ActivationVisualizationFieldRef(source="scalars", key="my_scalar") + scalars = {"layer_0_my_scalar": 0.42} + result = _resolve_field(ref, "layer_0", {}, scalars, None, False, 3, {}) + np.testing.assert_array_equal(result, [0.42, 0.42, 0.42]) + + def test_resolve_field_unsupported_source(self): + """Test unsupported source raises error.""" + ref = ActivationVisualizationFieldRef(source=cast(Any, "unknown")) + with pytest.raises(ConfigValidationError, match="Unsupported field source"): + _resolve_field(ref, "layer_0", {}, {}, None, False, 3, {}) + + +# pylint: disable=too-many-public-methods +class TestPatternExpansion: + """Tests for pattern_expansion.py functions.""" + + def test_parse_component_spec_invalid_range_parts(self): + """Test that malformed range raises error.""" + with pytest.raises(ConfigValidationError, match="Invalid range"): + _parse_component_spec("1...2...3") + + def test_parse_component_spec_range_not_ascending(self): + """Test that descending range raises error.""" + with pytest.raises(ConfigValidationError, match="start must be < end"): + _parse_component_spec("5...3") + + def test_parse_component_spec_non_numeric_range(self): + """Test that non-numeric range raises error.""" + with pytest.raises(ConfigValidationError, match="Invalid range"): + _parse_component_spec("a...b") + + def test_parse_component_spec_unrecognized(self): + """Test that unrecognized pattern raises error.""" + with pytest.raises(ConfigValidationError, match="Unrecognized component pattern"): + _parse_component_spec("invalid") + + def test_expand_pattern_to_indices_no_pattern(self): + """Test that pattern without wildcards raises error.""" + with pytest.raises(ConfigValidationError, match="has no wildcard or range"): + _expand_pattern_to_indices("plain_key", ["key_0", "key_1"]) + + def test_expand_pattern_to_indices_no_matches(self): + """Test that no matches raises error.""" + with pytest.raises(ConfigValidationError, match="No keys found"): + _expand_pattern_to_indices("missing_*", ["key_0", "key_1"]) + + def test_expand_pattern_to_indices_non_numeric_ignored(self): + """Test that non-numeric matches are ignored.""" + keys = ["item_0", "item_1", "item_abc"] + result = _expand_pattern_to_indices("item_*", keys) + assert result == [0, 1] + + def test_get_component_count_projection_success(self): + """Test getting component count from 2D projection.""" + ref = ActivationVisualizationFieldRef(source="projections", key="proj", component="*") + projections = {"layer_0_proj": np.ones((10, 5))} + result = _get_component_count(ref, "layer_0", projections, None, False) + assert result == 5 + + def test_get_component_count_1d_projection(self): + """Test that 1D projection raises error for expansion.""" + ref = ActivationVisualizationFieldRef(source="projections", key="proj") + projections = {"layer_0_proj": np.array([1, 2, 3])} + with pytest.raises(ConfigValidationError, match="Cannot expand 1D"): + _get_component_count(ref, "layer_0", projections, None, False) + + def test_get_component_count_belief_states_missing(self): + """Test that missing belief states raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states") + with pytest.raises(ConfigValidationError, match="not available"): + _get_component_count(ref, "layer_0", {}, None, False) + + def test_get_component_count_belief_states_wrong_dim(self): + """Test that non-2D belief states raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states") + with pytest.raises(ConfigValidationError, match="must be 2D"): + _get_component_count(ref, "layer_0", {}, np.ones((2, 3, 4)), False) + + def test_get_component_count_unsupported_source(self): + """Test that unsupported source raises error.""" + ref = ActivationVisualizationFieldRef(source="metadata", key="test") + with pytest.raises(ConfigValidationError, match="not supported"): + _get_component_count(ref, "layer_0", {}, None, False) + + def test_expand_projection_key_pattern_invalid(self): + """Test that invalid key pattern raises error.""" + with pytest.raises(ConfigValidationError, match="Invalid key pattern"): + _expand_projection_key_pattern("plain_key", "layer_0", {}, False) + + def test_expand_projection_key_pattern_invalid_range(self): + """Test that invalid range in key pattern raises error.""" + with pytest.raises(ConfigValidationError, match="Invalid range"): + _expand_projection_key_pattern("key_5...3", "layer_0", {}, False) + + def test_expand_projection_key_pattern_no_matches(self): + """Test that no matching projections raises error.""" + projections = {"layer_0_other": np.ones((3, 4))} + with pytest.raises(ConfigValidationError, match="No projection keys found"): + _expand_projection_key_pattern("key_*", "layer_0", projections, False) + + def test_expand_belief_factor_mapping_wrong_dim(self): + """Test that non-3D beliefs for factor expansion raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor=0, component=0) + # Manually set factor to pattern string to bypass validation + object.__setattr__(ref, "factor", "*") + with pytest.raises(ConfigValidationError, match="require 3D beliefs"): + _expand_belief_factor_mapping("field_*", ref, np.ones((5, 4))) + + def test_expand_belief_factor_mapping_invalid_factor(self): + """Test that invalid factor pattern raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor=0, component=0) + # Manually set factor to invalid string to bypass validation + object.__setattr__(ref, "factor", "invalid") + with pytest.raises(ConfigValidationError, match="Invalid factor pattern"): + _expand_belief_factor_mapping("field_*", ref, np.ones((5, 3, 4))) + + def test_expand_belief_factor_mapping_factor_out_of_bounds(self): + """Test that out of bounds factor range raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor="0...10", group_as="factor") + with pytest.raises(ConfigValidationError, match="exceeds available factors"): + _expand_belief_factor_mapping("field_*", ref, np.ones((5, 3, 4))) + + def test_expand_belief_factor_mapping_component_out_of_bounds(self): + """Test that out of bounds component range raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor="*", component="0...10", group_as="factor") + with pytest.raises(ConfigValidationError, match="exceeds states"): + _expand_belief_factor_mapping("f_*_c_*", ref, np.ones((5, 2, 4))) + + def test_expand_scalar_pattern_ranges_invalid(self): + """Test that invalid range in scalar pattern raises error.""" + with pytest.raises(ConfigValidationError, match="Invalid range pattern"): + _expand_scalar_pattern_ranges("metric_5...3") + + def test_expand_field_mapping_projection_no_field_pattern(self): + """Test projection key pattern without field pattern raises error.""" + ref = ActivationVisualizationFieldRef(source="projections", key="factor_*", group_as="factor") + with pytest.raises(ConfigValidationError, match="requires field name pattern"): + _expand_field_mapping("plain_field", ref, "layer_0", {}, {}, None, False) + + def test_expand_field_mapping_projection_too_many_patterns(self): + """Test projection with too many field patterns raises error.""" + ref = ActivationVisualizationFieldRef(source="projections", key="factor_*", group_as="factor") + with pytest.raises(ConfigValidationError, match="too many patterns"): + _expand_field_mapping("f_*_g_*_h_*", ref, "layer_0", {}, {}, None, False) + + def test_expand_field_mapping_belief_no_field_pattern(self): + """Test belief factor pattern without field pattern raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor="*", group_as="factor") + beliefs = np.ones((5, 3, 4)) + with pytest.raises(ConfigValidationError, match="requires field name pattern"): + _expand_field_mapping("plain_field", ref, "layer_0", {}, {}, beliefs, False) + + def test_expand_field_mapping_belief_too_many_patterns(self): + """Test belief with too many field patterns raises error.""" + ref = ActivationVisualizationFieldRef(source="belief_states", factor="*", group_as="factor") + beliefs = np.ones((5, 3, 4)) + with pytest.raises(ConfigValidationError, match="too many patterns"): + _expand_field_mapping("f_*_g_*_h_*", ref, "layer_0", {}, {}, beliefs, False) + + def test_expand_field_mapping_scalar_field_pattern_no_key_pattern(self): + """Test scalar with field pattern but no key pattern raises error.""" + ref = ActivationVisualizationFieldRef(source="scalars", key="plain_key") + with pytest.raises(ConfigValidationError, match="has pattern but scalar key has no pattern"): + _expand_field_mapping("field_*", ref, "layer_0", {}, {"plain_key": 1.0}, None, False) + + def test_expand_field_mapping_scalar_key_pattern_no_field_pattern(self): + """Test scalar with key pattern but no field pattern raises error.""" + ref = ActivationVisualizationFieldRef(source="scalars", key="metric_*") + with pytest.raises(ConfigValidationError, match="requires field name pattern"): + _expand_field_mapping("plain_field", ref, "layer_0", {}, {"metric_0": 1.0}, None, False) + + +class TestPreprocessing: + """Tests for preprocessing.py functions.""" + + def test_expand_preprocessing_fields_no_matches(self): + """Test that wildcard with no matches raises error.""" + with pytest.raises(ConfigValidationError, match="did not match any columns"): + _expand_preprocessing_fields(["missing_*"], ["col_a", "col_b"]) + + def test_expand_preprocessing_fields_range_missing_column(self): + """Test that range expanding to missing column raises error.""" + with pytest.raises(ConfigValidationError, match="column not found"): + _expand_preprocessing_fields(["col_0...3"], ["col_0", "col_1"]) + + def test_apply_preprocessing_output_pattern_error(self): + """Test that output field with pattern raises error.""" + df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}) + step = ActivationVisualizationPreprocessStep( + type="project_to_simplex", input_fields=["a", "b", "c"], output_fields=["out_*", "out_y"] + ) + with pytest.raises(ConfigValidationError, match="cannot contain patterns"): + _apply_preprocessing(df, [step]) + + def test_apply_preprocessing_output_range_pattern_error(self): + """Test that output field with range pattern raises error.""" + df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}) + step = ActivationVisualizationPreprocessStep( + type="project_to_simplex", input_fields=["a", "b", "c"], output_fields=["out_0...3", "out_y"] + ) + with pytest.raises(ConfigValidationError, match="cannot contain patterns"): + _apply_preprocessing(df, [step]) + + def test_project_to_simplex_missing_column(self): + """Test that missing column raises error.""" + df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]}) + step = ActivationVisualizationPreprocessStep( + type="project_to_simplex", input_fields=["a", "b", "missing"], output_fields=["x", "y"] + ) + with pytest.raises(ConfigValidationError, match="missing from the dataframe"): + _project_to_simplex(df, step) + + def test_project_to_simplex_success(self): + """Test successful simplex projection.""" + df = pd.DataFrame({"p0": [0.5, 0.3], "p1": [0.3, 0.4], "p2": [0.2, 0.3]}) + step = ActivationVisualizationPreprocessStep( + type="project_to_simplex", input_fields=["p0", "p1", "p2"], output_fields=["x", "y"] + ) + result = _project_to_simplex(df, step) + assert "x" in result.columns + assert "y" in result.columns + # x = p1 + 0.5 * p2 + np.testing.assert_allclose(result["x"], [0.3 + 0.1, 0.4 + 0.15]) + # y = sqrt(3)/2 * p2 + np.testing.assert_allclose(result["y"], [0.2 * np.sqrt(3) / 2, 0.3 * np.sqrt(3) / 2]) + + def test_combine_rgb_wrong_output_count(self): + """Test that combine_rgb with wrong output count raises error.""" + df = pd.DataFrame({"r": [0.5], "g": [0.5], "b": [0.5]}) + # Create step manually to bypass validation + step = ActivationVisualizationPreprocessStep.__new__(ActivationVisualizationPreprocessStep) + object.__setattr__(step, "type", "combine_rgb") + object.__setattr__(step, "input_fields", ["r", "g", "b"]) + object.__setattr__(step, "output_fields", ["color1", "color2"]) + with pytest.raises(ConfigValidationError, match="exactly one output_field"): + _combine_rgb(df, step) + + def test_combine_rgb_too_few_inputs(self): + """Test that combine_rgb with <3 inputs raises error.""" + df = pd.DataFrame({"r": [0.5], "g": [0.5]}) + # Create step manually to bypass validation + step = ActivationVisualizationPreprocessStep.__new__(ActivationVisualizationPreprocessStep) + object.__setattr__(step, "type", "combine_rgb") + object.__setattr__(step, "input_fields", ["r", "g"]) + object.__setattr__(step, "output_fields", ["color"]) + with pytest.raises(ConfigValidationError, match="at least three"): + _combine_rgb(df, step) + + def test_combine_rgb_missing_column(self): + """Test that missing column raises error.""" + df = pd.DataFrame({"r": [0.5], "g": [0.5]}) + step = ActivationVisualizationPreprocessStep( + type="combine_rgb", input_fields=["r", "g", "missing"], output_fields=["color"] + ) + with pytest.raises(ConfigValidationError, match="missing from the dataframe"): + _combine_rgb(df, step) + + def test_combine_rgb_3_inputs(self): + """Test combine_rgb with exactly 3 inputs. + + Note: combine_rgb performs per-column min-max normalization, so to get + expected colors we need data where each column spans [0, 1]. + """ + df = pd.DataFrame({"r": [0.0, 1.0, 0.5], "g": [0.0, 1.0, 0.5], "b": [0.0, 1.0, 0.5]}) + step = ActivationVisualizationPreprocessStep( + type="combine_rgb", input_fields=["r", "g", "b"], output_fields=["color"] + ) + result = _combine_rgb(df, step) + assert result["color"].iloc[0] == "#000000" # black + assert result["color"].iloc[1] == "#ffffff" # white + assert result["color"].iloc[2] == "#808080" # gray + + def test_combine_rgb_more_than_3_inputs_pca(self): + """Test combine_rgb with >3 inputs triggers PCA path.""" + # Create data with 4 features + np.random.seed(42) + df = pd.DataFrame( + {"f0": np.random.rand(10), "f1": np.random.rand(10), "f2": np.random.rand(10), "f3": np.random.rand(10)} + ) + step = ActivationVisualizationPreprocessStep( + type="combine_rgb", input_fields=["f0", "f1", "f2", "f3"], output_fields=["color"] + ) + result = _combine_rgb(df, step) + assert "color" in result.columns + # All colors should be valid hex colors + for color in result["color"]: + assert color.startswith("#") + assert len(color) == 7 + + def test_combine_rgb_pca_few_samples(self): + """Test combine_rgb PCA path with fewer samples than components.""" + # Create 2 samples with 4 features - PCA will have <3 components + df = pd.DataFrame({"f0": [0.1, 0.9], "f1": [0.2, 0.8], "f2": [0.3, 0.7], "f3": [0.4, 0.6]}) + step = ActivationVisualizationPreprocessStep( + type="combine_rgb", input_fields=["f0", "f1", "f2", "f3"], output_fields=["color"] + ) + result = _combine_rgb(df, step) + assert "color" in result.columns + assert len(result) == 2 + + def test_apply_preprocessing_project_to_simplex(self): + """Test full preprocessing pipeline with project_to_simplex.""" + df = pd.DataFrame({"p0": [0.5, 0.3], "p1": [0.3, 0.4], "p2": [0.2, 0.3]}) + steps = [ + ActivationVisualizationPreprocessStep( + type="project_to_simplex", input_fields=["p0", "p1", "p2"], output_fields=["x", "y"] + ) + ] + result = _apply_preprocessing(df, steps) + assert "x" in result.columns + assert "y" in result.columns + + def test_apply_preprocessing_combine_rgb(self): + """Test full preprocessing pipeline with combine_rgb.""" + df = pd.DataFrame({"r": [0.5], "g": [0.5], "b": [0.5]}) + steps = [ + ActivationVisualizationPreprocessStep( + type="combine_rgb", input_fields=["r", "g", "b"], output_fields=["color"] + ) + ] + result = _apply_preprocessing(df, steps) + assert "color" in result.columns + + def test_apply_preprocessing_with_pattern_expansion(self): + """Test preprocessing with pattern expansion in input fields.""" + df = pd.DataFrame({"val_0": [0.2], "val_1": [0.3], "val_2": [0.5]}) + steps = [ + ActivationVisualizationPreprocessStep( + type="project_to_simplex", input_fields=["val_*"], output_fields=["x", "y"] + ) + ] + result = _apply_preprocessing(df, steps) + assert "x" in result.columns + assert "y" in result.columns + + +# pylint: disable=too-many-public-methods +class TestDataframeBuilders: + """Tests for dataframe_builders.py functions.""" + + def test_extract_base_column_name_with_group_pattern(self): + """Test extracting base column name with group value pattern.""" + result = _extract_base_column_name("factor_0_projected", "0") + assert result == "projected" + + def test_extract_base_column_name_no_pattern(self): + """Test extracting base column name when no pattern.""" + result = _extract_base_column_name("my_column", "0") + assert result == "my_column" + + def test_extract_base_column_name_no_match(self): + """Test extracting base column name when pattern doesn't match.""" + result = _extract_base_column_name("other_column", "0") + assert result == "other_column" + + def test_scalar_series_metadata_with_arrays(self): + """Test extracting metadata from arrays.""" + metadata = {"step": np.array([10]), "name": np.array(["test"])} + result = _scalar_series_metadata(metadata) + assert result["step"] == 10 + assert result["name"] == "test" + + def test_scalar_series_metadata_with_empty_array(self): + """Test that empty arrays are skipped.""" + metadata = {"step": np.array([10]), "empty": np.array([])} + result = _scalar_series_metadata(metadata) + assert result["step"] == 10 + assert "empty" not in result + + def test_scalar_series_metadata_with_scalar(self): + """Test extracting metadata from scalar values.""" + metadata = {"step": 10, "name": "test"} + result = _scalar_series_metadata(metadata) + assert result["step"] == 10 + assert result["name"] == "test" + + def test_infer_scalar_series_indices_success(self): + """Test inferring scalar series indices from available keys.""" + mapping = ScalarSeriesMapping( + key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" + ) + scalars = { + "analysis/layer_0_cumvar_0": 0.5, + "analysis/layer_0_cumvar_1": 0.7, + "analysis/layer_0_cumvar_2": 0.9, + } + result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") + assert result == [0, 1, 2] + + def test_infer_scalar_series_indices_empty_body(self): + """Test that empty body between prefix and suffix is skipped.""" + mapping = ScalarSeriesMapping( + key_template="{layer}_pc{index}_var", index_field="component", value_field="variance" + ) + # Key that matches prefix and suffix but has empty body + scalars = { + "analysis/layer_0_pc_var": 0.5, # Empty between pc and _var + "analysis/layer_0_pc0_var": 0.3, + } + result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") + assert result == [0] # Only numeric index included + + def test_infer_scalar_series_indices_no_matches(self): + """Test that no matching indices raises error.""" + mapping = ScalarSeriesMapping( + key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" + ) + scalars = {"analysis/other_metric": 1.0} + with pytest.raises(ConfigValidationError, match="could not infer indices"): + _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") + + def test_infer_scalar_series_indices_with_suffix(self): + """Test inferring indices when template has suffix after index.""" + mapping = ScalarSeriesMapping( + key_template="{layer}_pc{index}_var", index_field="component", value_field="variance" + ) + scalars = { + "analysis/layer_0_pc0_var": 0.5, + "analysis/layer_0_pc1_var": 0.3, + "analysis/layer_0_pc2_var": 0.2, + "analysis/layer_0_other": 1.0, # Should not match + } + result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") + assert result == [0, 1, 2] + + def test_infer_scalar_series_indices_non_numeric_skipped(self): + """Test that non-numeric values are skipped.""" + mapping = ScalarSeriesMapping(key_template="{layer}_item_{index}", index_field="idx", value_field="val") + scalars = { + "analysis/layer_0_item_0": 0.5, + "analysis/layer_0_item_abc": 0.7, # Non-numeric, should be skipped + "analysis/layer_0_item_1": 0.9, + } + result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") + assert result == [0, 1] + + def test_build_scalar_series_dataframe_success(self): + """Test building scalar series dataframe.""" + mapping = ScalarSeriesMapping( + key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" + ) + metadata = {"step": np.array([10]), "analysis": np.array(["pca"])} + scalars = { + "analysis/layer_0_cumvar_0": 0.5, + "analysis/layer_0_cumvar_1": 0.7, + "analysis/layer_1_cumvar_0": 0.6, + } + result = _build_scalar_series_dataframe(mapping, metadata, scalars, ["layer_0", "layer_1"], "analysis") + assert len(result) == 3 + assert "component" in result.columns + assert "cumvar" in result.columns + assert "layer" in result.columns + + def test_build_scalar_series_dataframe_no_matches(self): + """Test that no matching scalars raises error.""" + mapping = ScalarSeriesMapping( + key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" + ) + metadata = {"step": np.array([10])} + scalars = {"analysis/other_metric": 1.0} + # Error comes from _infer_scalar_series_indices when no indices are found + with pytest.raises(ConfigValidationError, match="could not infer indices"): + _build_scalar_series_dataframe(mapping, metadata, scalars, ["layer_0"], "analysis") + + def test_build_scalar_series_dataframe_with_explicit_indices(self): + """Test building scalar series dataframe with explicit index_values.""" + mapping = ScalarSeriesMapping( + key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar", index_values=[0, 1] + ) + metadata = {"step": np.array([10])} + scalars = { + "analysis/layer_0_cumvar_0": 0.5, + "analysis/layer_0_cumvar_1": 0.7, + "analysis/layer_0_cumvar_2": 0.9, # Not in index_values, should be skipped + } + result = _build_scalar_series_dataframe(mapping, metadata, scalars, ["layer_0"], "analysis") + assert len(result) == 2 + assert list(result["component"]) == [0, 1] + + def test_build_scalar_dataframe_scalar_pattern(self): + """Test building scalar dataframe with scalar_pattern source.""" + mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="layer_*_rmse")} + scalars = { + "analysis/layer_0_rmse": 0.1, + "analysis/layer_1_rmse": 0.2, + } + result = _build_scalar_dataframe(mappings, scalars, {}, "analysis", 5) + assert len(result) == 2 + assert "step" in result.columns + assert "rmse" in result.columns + assert all(result["step"] == 5) + + def test_build_scalar_dataframe_scalar_history(self): + """Test building scalar dataframe with scalar_history source.""" + mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_history", key="metric")} + scalars = {} + scalar_history = {"analysis/metric": [(0, 0.5), (10, 0.3), (20, 0.1)]} + result = _build_scalar_dataframe(mappings, scalars, scalar_history, "analysis", 20) + assert len(result) == 3 + assert list(result["step"]) == [0, 10, 20] + + def test_build_scalar_dataframe_scalar_history_fallback(self): + """Test scalar_history falls back to current scalars when no history.""" + mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_history", key="metric")} + scalars = {"analysis/metric": 0.42} + scalar_history = {} + result = _build_scalar_dataframe(mappings, scalars, scalar_history, "analysis", 5) + assert len(result) == 1 + assert result["step"].iloc[0] == 5 + assert result["rmse"].iloc[0] == 0.42 + + def test_build_scalar_dataframe_no_matches(self): + """Test that no matching scalars raises error.""" + mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="missing_*")} + scalars = {"analysis/other": 1.0} + with pytest.raises(ConfigValidationError, match="No scalar pattern keys found"): + _build_scalar_dataframe(mappings, scalars, {}, "analysis", 5) + + def test_build_scalar_dataframe_non_scalar_source_skipped(self): + """Test that non-scalar sources are skipped.""" + mappings = { + "proj": ActivationVisualizationFieldRef(source="projections", key="my_proj"), + "rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="layer_*_rmse"), + } + scalars = {"analysis/layer_0_rmse": 0.1} + result = _build_scalar_dataframe(mappings, scalars, {}, "analysis", 5) + # Only scalar_pattern should be in result + assert "rmse" in result.columns + assert len(result) == 1 + + def test_build_scalar_dataframe_simple_key(self): + """Test scalar_pattern with non-pattern key.""" + # Use field name "value" to avoid conflict with hardcoded "metric" column + mappings = {"value": ActivationVisualizationFieldRef(source="scalar_pattern", key="my_metric")} + scalars = {"analysis/my_metric": 0.42} + result = _build_scalar_dataframe(mappings, scalars, {}, "analysis", 10) + assert len(result) == 1 + assert result["value"].iloc[0] == 0.42 + assert result["metric"].iloc[0] == "analysis/my_metric" # Check the metric key column + + def test_build_scalar_dataframe_key_none(self): + """Test that scalar_pattern with key=None raises error.""" + ref = ActivationVisualizationFieldRef(source="scalar_pattern", key="placeholder") + # Bypass validation to set key to None + object.__setattr__(ref, "key", None) + mappings = {"value": ref} + with pytest.raises(ConfigValidationError, match="must specify a key"): + _build_scalar_dataframe(mappings, {"analysis/test": 1.0}, {}, "analysis", 5) + + def test_build_scalar_dataframe_no_matching_values(self): + """Test that no matching values raises error with pattern.""" + mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="layer_*_missing")} + # Scalars exist but don't match the pattern + scalars = {"analysis/layer_0_other": 0.1, "analysis/something_else": 0.2} + with pytest.raises(ConfigValidationError, match="No scalar pattern keys found"): + _build_scalar_dataframe(mappings, scalars, {}, "analysis", 5) + + def test_build_metadata_columns(self): + """Test building metadata columns.""" + + sequences: list[tuple[int, ...]] = [(1, 2, 3), (4, 5)] + steps = np.array([3, 2]) + metadata = PreparedMetadata(sequences=sequences, steps=steps, select_last_token=False) + weights = np.array([1.0, 0.5]) + result = _build_metadata_columns("my_analysis", metadata, weights) + assert "analysis" in result + assert "step" in result + assert "sequence_length" in result + assert "sequence" in result + assert "sample_index" in result + assert "weight" in result + assert list(result["analysis"]) == ["my_analysis", "my_analysis"] + assert list(result["step"]) == [3, 2] + assert list(result["weight"]) == [1.0, 0.5] + + def test_build_dataframe_for_mappings_simple(self): + """Test _build_dataframe_for_mappings with simple projection mapping.""" + mappings = {"x": ActivationVisualizationFieldRef(source="projections", key="pca", component=0)} + metadata = {"step": np.array([1, 2]), "analysis": np.array(["test", "test"])} + projections = {"layer_0_pca": np.array([[0.1, 0.2], [0.3, 0.4]])} + result = _build_dataframe_for_mappings(mappings, metadata, projections, {}, None, False, ["layer_0"]) + assert "x" in result.columns + assert "layer" in result.columns + assert len(result) == 2 + + def test_build_dataframe_for_mappings_belief_only(self): + """Test _build_dataframe_for_mappings with belief_states only (no layer iteration).""" + mappings = {"belief": ActivationVisualizationFieldRef(source="belief_states", component=0)} + metadata = {"step": np.array([1, 2])} + beliefs = np.array([[0.8, 0.2], [0.6, 0.4]]) + result = _build_dataframe_for_mappings(mappings, metadata, {}, {}, beliefs, False, ["layer_0"]) + assert "belief" in result.columns + assert len(result) == 2 + # Belief-only mode uses "_no_layer_" placeholder + assert result["layer"].iloc[0] == "_no_layer_" + + def test_build_dataframe_for_mappings_with_groups(self): + """Test _build_dataframe_for_mappings with group expansion.""" + # Use belief_states with factor pattern to trigger group expansion + # field_name has one *, factor has one *, so component expansion happens + mappings = { + "prob_*": ActivationVisualizationFieldRef( + source="belief_states", factor="*", component=0, group_as="factor" + ) + } + metadata = {"step": np.array([1])} + # 3D beliefs: (samples, factors, states) + beliefs = np.array([[[0.8, 0.2], [0.6, 0.4]]]) # 1 sample, 2 factors, 2 states + result = _build_dataframe_for_mappings(mappings, metadata, {}, {}, beliefs, False, ["layer_0"]) + assert "factor" in result.columns + # Factor expansion creates separate prob_0 and prob_1 columns + assert "prob_0" in result.columns or "prob_1" in result.columns + # Should have 2 rows (one per factor group) + assert len(result) == 2 + + def test_build_dataframe_for_mappings_error_wrapping(self): + """Test that errors from _expand_field_mapping are wrapped with context.""" + # Create a mapping with a key pattern that will fail expansion due to no matching projections + # The key "factor_*" is a pattern that needs expansion, which fails when no projections match + mappings = {"x_*": ActivationVisualizationFieldRef(source="projections", key="factor_*", group_as="factor")} + metadata = {"step": np.array([1])} + with pytest.raises(ConfigValidationError, match="Error expanding 'x_\\*' for layer"): + _build_dataframe_for_mappings(mappings, metadata, {}, {}, None, False, ["layer_0"]) + + def test_build_dataframe_with_scalar_pattern(self): + """Test _build_dataframe with scalar_pattern source.""" + data_mapping = ActivationVisualizationDataMapping( + mappings={"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="layer_*_rmse")} + ) + viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) + metadata = {"step": np.array([1]), "analysis": np.array(["test"])} + scalars = {"test/layer_0_rmse": 0.1, "test/layer_1_rmse": 0.2} + result = _build_dataframe(viz_cfg, metadata, {}, scalars, {}, 10, None, False, ["layer_0", "layer_1"]) + assert "rmse" in result.columns + assert len(result) == 2 + + def test_build_dataframe_with_scalar_series(self): + """Test _build_dataframe with scalar_series source.""" + scalar_series = ScalarSeriesMapping( + key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" + ) + data_mapping = ActivationVisualizationDataMapping(mappings={}, scalar_series=scalar_series) + viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) + metadata = {"step": np.array([1]), "analysis": np.array(["test"])} + scalars = {"test/layer_0_cumvar_0": 0.5, "test/layer_0_cumvar_1": 0.7} + result = _build_dataframe(viz_cfg, metadata, {}, scalars, {}, None, None, False, ["layer_0"]) + assert "component" in result.columns + assert "cumvar" in result.columns + + def test_build_dataframe_combined_mappings(self): + """Test _build_dataframe with combined mappings.""" + combined = [ + CombinedMappingSection( + label="projected", + mappings={"x": ActivationVisualizationFieldRef(source="projections", key="pca", component=0)}, + ), + CombinedMappingSection( + label="raw", + mappings={"x": ActivationVisualizationFieldRef(source="projections", key="raw", component=0)}, + ), + ] + data_mapping = ActivationVisualizationDataMapping(mappings={}, combined=combined, combine_as="source") + viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) + metadata = {"step": np.array([1])} + projections = { + "layer_0_pca": np.array([[0.1, 0.2]]), + "layer_0_raw": np.array([[0.5, 0.6]]), + } + result = _build_dataframe(viz_cfg, metadata, projections, {}, {}, None, None, False, ["layer_0"]) + assert "source" in result.columns + assert set(result["source"]) == {"projected", "raw"} + assert len(result) == 2 + + def test_build_dataframe_scalar_pattern_no_step(self): + """Test that scalar_pattern without step raises error.""" + data_mapping = ActivationVisualizationDataMapping( + mappings={"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="metric")} + ) + viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) + metadata = {"step": np.array([1]), "analysis": np.array(["test"])} + with pytest.raises(ConfigValidationError, match="without the `step` parameter"): + _build_dataframe(viz_cfg, metadata, {}, {"test/metric": 0.1}, {}, None, None, False, []) + + def test_build_dataframe_scalar_pattern_no_analysis(self): + """Test that scalar_pattern without analysis metadata raises error.""" + data_mapping = ActivationVisualizationDataMapping( + mappings={"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="metric")} + ) + viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) + metadata = {"step": np.array([1])} # No "analysis" key + with pytest.raises(ConfigValidationError, match="requires 'analysis'"): + _build_dataframe(viz_cfg, metadata, {}, {"test/metric": 0.1}, {}, 10, None, False, []) + + def test_build_dataframe_scalar_series_no_analysis(self): + """Test that scalar_series without analysis metadata raises error.""" + scalar_series = ScalarSeriesMapping( + key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" + ) + data_mapping = ActivationVisualizationDataMapping(mappings={}, scalar_series=scalar_series) + viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) + metadata = {"step": np.array([1])} # No "analysis" key + with pytest.raises(ConfigValidationError, match="requires 'analysis'"): + _build_dataframe(viz_cfg, metadata, {}, {}, {}, None, None, False, ["layer_0"]) + + +class TestSampling: + """Tests for DataFrame sampling functionality.""" + + def test_sampling_reduces_size_no_facets(self): + """Test that sampling reduces DataFrame size when no facet columns present.""" + df = pd.DataFrame({"a": range(100), "b": range(100)}) + config = SamplingConfig(max_points=20, seed=42) + result = _apply_sampling(df, config, facet_columns=[]) + assert len(result) == 20 + + def test_sampling_no_reduction_when_under_limit(self): + """Test that sampling returns original DataFrame when size <= max_points.""" + df = pd.DataFrame({"a": range(10), "b": range(10)}) + config = SamplingConfig(max_points=20, seed=42) + result = _apply_sampling(df, config, facet_columns=[]) + assert len(result) == 10 + pd.testing.assert_frame_equal(result, df) + + def test_sampling_per_facet_group(self): + """Test that sampling applies per facet group.""" + df = pd.DataFrame( + { + "factor": ["0"] * 50 + ["1"] * 50 + ["2"] * 50, + "value": range(150), + } + ) + config = SamplingConfig(max_points=10, seed=42) + result = _apply_sampling(df, config, facet_columns=["factor"]) + + assert len(result) == 30 # 10 per factor * 3 factors + for factor in ["0", "1", "2"]: + factor_count = len(result[result["factor"] == factor]) + assert factor_count == 10 + + def test_sampling_multiple_facet_columns(self): + """Test sampling with multiple facet columns.""" + df = pd.DataFrame( + { + "layer": ["layer_0"] * 40 + ["layer_1"] * 40, + "factor": (["0"] * 20 + ["1"] * 20) * 2, + "value": range(80), + } + ) + config = SamplingConfig(max_points=5, seed=42) + result = _apply_sampling(df, config, facet_columns=["layer", "factor"]) + + # Should have 4 groups (2 layers * 2 factors), each with max 5 points + assert len(result) == 20 + for layer in ["layer_0", "layer_1"]: + for factor in ["0", "1"]: + group_count = len(result[(result["layer"] == layer) & (result["factor"] == factor)]) + assert group_count == 5 + + def test_sampling_ignores_missing_facet_columns(self): + """Test that non-existent facet columns are ignored.""" + df = pd.DataFrame({"a": range(100), "value": range(100)}) + config = SamplingConfig(max_points=20, seed=42) + # facet_columns includes "factor" which doesn't exist + result = _apply_sampling(df, config, facet_columns=["factor", "layer"]) + # Should sample globally since no facet columns exist + assert len(result) == 20 + + def test_sampling_seed_reproducibility(self): + """Test that seed produces reproducible results.""" + df = pd.DataFrame({"a": range(100), "b": range(100)}) + config = SamplingConfig(max_points=20, seed=42) + + result1 = _apply_sampling(df, config, facet_columns=[]) + result2 = _apply_sampling(df, config, facet_columns=[]) + + pd.testing.assert_frame_equal(result1.reset_index(drop=True), result2.reset_index(drop=True)) + + def test_sampling_none_max_points_returns_original(self): + """Test that None max_points returns DataFrame unchanged.""" + df = pd.DataFrame({"a": range(100), "b": range(100)}) + config = SamplingConfig(max_points=None) + result = _apply_sampling(df, config, facet_columns=[]) + pd.testing.assert_frame_equal(result, df) + + def test_sampling_config_validation_negative(self): + """Test that negative max_points raises error.""" + with pytest.raises(ConfigValidationError, match="positive integer"): + SamplingConfig(max_points=-1) + + def test_sampling_config_validation_zero(self): + """Test that zero max_points raises error.""" + with pytest.raises(ConfigValidationError, match="positive integer"): + SamplingConfig(max_points=0) diff --git a/tests/activations/test_visualization_persistence.py b/tests/activations/test_visualization_persistence.py new file mode 100644 index 00000000..58f782f9 --- /dev/null +++ b/tests/activations/test_visualization_persistence.py @@ -0,0 +1,89 @@ +"""Tests for visualization persistence helpers.""" + +from __future__ import annotations + +import pandas as pd + +from simplexity.activations.activation_visualizations import ( + ActivationVisualizationPayload, + VisualizationControlDetail, + VisualizationControlsState, + render_visualization, +) +from simplexity.activations.visualization_persistence import save_visualization_payloads +from simplexity.visualization.history import history_paths +from simplexity.visualization.structured_configs import ( + AestheticsConfig, + ChannelAestheticsConfig, + DataConfig, + GeometryConfig, + LayerConfig, + PlotConfig, + PlotLevelGuideConfig, + PlotSizeConfig, +) + + +def _plot_config() -> PlotConfig: + layer = LayerConfig( + geometry=GeometryConfig(type="line"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="step", type="quantitative"), + y=ChannelAestheticsConfig(field="value", type="quantitative"), + ), + ) + return PlotConfig( + backend="altair", + data=DataConfig(source="main"), + layers=[layer], + size=PlotSizeConfig(), + guides=PlotLevelGuideConfig(), + ) + + +def _payload(dataframe: pd.DataFrame) -> ActivationVisualizationPayload: + cfg = _plot_config() + controls = VisualizationControlsState( + slider=VisualizationControlDetail( + type="slider", + field="step", + options=list(pd.unique(dataframe["step"])) if "step" in dataframe else [], + ) + ) + figure = render_visualization(cfg, dataframe, controls) + return ActivationVisualizationPayload( + analysis="analysis", + name="viz", + backend="altair", + figure=figure, + dataframe=dataframe, + controls=controls, + plot_config=cfg, + ) + + +def test_save_visualization_payloads_accumulates_step_history(tmp_path): + """Test that visualization payloads accumulate history across steps.""" + df_first = pd.DataFrame({"step": [0, 0], "value": [0.1, 0.2]}) + payload_one = _payload(df_first) + + save_visualization_payloads({"analysis/viz": payload_one}, tmp_path, step=1) + + data_path, _ = history_paths(tmp_path, "analysis_viz") + assert data_path.exists() + history_df = pd.read_json(data_path, orient="records", lines=True) + assert len(history_df) == len(df_first) + assert set(history_df["step"]) == {1} + assert set(history_df["sequence_step"]) == {0} + assert (tmp_path / "step_00001" / "analysis" / "viz.html").exists() + + df_second = pd.DataFrame({"step": [1], "value": [0.5]}) + payload_two = _payload(df_second) + + save_visualization_payloads({"analysis/viz": payload_two}, tmp_path, step=2) + + history_df = pd.read_json(data_path, orient="records", lines=True) + assert len(history_df) == len(df_first) + len(df_second) + assert set(history_df["step"]) == {1, 2} + assert set(history_df["sequence_step"]) == {0, 1} + assert (tmp_path / "step_00002" / "analysis" / "viz.html").exists() diff --git a/tests/analysis/test_layerwise_analysis.py b/tests/analysis/test_layerwise_analysis.py index 7524647d..c0d1a839 100644 --- a/tests/analysis/test_layerwise_analysis.py +++ b/tests/analysis/test_layerwise_analysis.py @@ -1,6 +1,5 @@ """Tests for the LayerwiseAnalysis orchestrator.""" -import jax import jax.numpy as jnp import pytest @@ -8,7 +7,7 @@ @pytest.fixture -def analysis_inputs() -> tuple[dict[str, jax.Array], jax.Array, jax.Array]: +def analysis_inputs() -> tuple[dict[str, jnp.ndarray], jnp.ndarray, jnp.ndarray]: """Provides sample activations, weights, and belief states for analysis tests.""" activations = { "layer_a": jnp.array([[1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]), @@ -162,30 +161,3 @@ def test_layerwise_analysis_property_accessors() -> None: assert analysis.concat_layers assert not analysis.use_probs_as_weights assert not analysis.requires_belief_states - - -def test_linear_regression_accepts_to_factors() -> None: - """linear_regression validator should accept to_factors parameter.""" - validator = ANALYSIS_REGISTRY["linear_regression"].validator - params = validator({"fit_intercept": False, "to_factors": True}) - - assert params["fit_intercept"] is False - assert params["to_factors"] is True - - -def test_linear_regression_svd_accepts_to_factors() -> None: - """linear_regression_svd validator should accept to_factors parameter.""" - validator = ANALYSIS_REGISTRY["linear_regression_svd"].validator - params = validator({"fit_intercept": True, "to_factors": True, "rcond_values": [1e-3]}) - - assert params["fit_intercept"] is True - assert params["to_factors"] is True - assert params["rcond_values"] == (0.001,) - - -def test_linear_regression_to_factors_defaults_false() -> None: - """to_factors should default to False when not provided.""" - validator = ANALYSIS_REGISTRY["linear_regression"].validator - params = validator({"fit_intercept": True}) - - assert params["to_factors"] is False diff --git a/tests/analysis/test_linear_regression.py b/tests/analysis/test_linear_regression.py index bca9f6bf..f6bfe084 100644 --- a/tests/analysis/test_linear_regression.py +++ b/tests/analysis/test_linear_regression.py @@ -321,3 +321,68 @@ def test_layer_linear_regression_to_factors_false_works() -> None: assert "r2" in scalars assert "projected" in projections assert projections["projected"].shape == (3, 5) + + +def test_factored_regression_perfect_linear_fit() -> None: + """Test factored regression with perfectly linear targets achieves perfect fit. + + Uses targets that are exact linear combinations of features to verify + the regression machinery works correctly for the factored case. + """ + # 5 samples, 4 features + x = jnp.array( + [ + [1.0, 2.0, 3.0, 4.0], + [2.0, 3.0, 4.0, 5.0], + [3.0, 4.0, 5.0, 6.0], + [4.0, 5.0, 6.0, 7.0], + [5.0, 6.0, 7.0, 8.0], + ] + ) + weights = jnp.ones(5) / 5.0 + + # Factor 0: 3 states, exact linear combination (with intercept) + # y0 = [x0 + 1, x1 + 2, x2 + 3] + factor_0 = jnp.stack([x[:, 0] + 1, x[:, 1] + 2, x[:, 2] + 3], axis=1) + + # Factor 1: 2 states, exact linear combination + # y1 = [x1, x3] + factor_1 = jnp.stack([x[:, 1], x[:, 3]], axis=1) + + scalars, projections = layer_linear_regression(x, weights, (factor_0, factor_1), to_factors=True) + + # Should achieve perfect R² since targets are exact linear combinations + assert scalars["factor_0/r2"] > 0.99, f"factor_0 R² too low: {scalars['factor_0/r2']}" + assert scalars["factor_1/r2"] > 0.99, f"factor_1 R² too low: {scalars['factor_1/r2']}" + + # Projections should match targets very closely + chex.assert_trees_all_close(projections["factor_0/projected"], factor_0, atol=1e-4) + chex.assert_trees_all_close(projections["factor_1/projected"], factor_1, atol=1e-4) + + +def test_factored_regression_different_state_counts() -> None: + """Test factored regression with factors having different numbers of states. + + This reproduces a scenario where factors have different dimensionality, + which is common in factored generative processes. + """ + x = jnp.arange(24.0).reshape(6, 4) # 6 samples, 4 features + weights = jnp.ones(6) / 6.0 + + # Factor 0: 3 states (like "mess3") + factor_0_raw = x[:, :3] + factor_0 = factor_0_raw / factor_0_raw.sum(axis=1, keepdims=True) + + # Factor 1: 2 states (like "tom quantum") + factor_1_raw = x[:, :2] + factor_1 = factor_1_raw / factor_1_raw.sum(axis=1, keepdims=True) + + scalars, projections = layer_linear_regression(x, weights, (factor_0, factor_1), to_factors=True) + + # Verify shapes are correct + assert projections["factor_0/projected"].shape == (6, 3) + assert projections["factor_1/projected"].shape == (6, 2) + + # Both should achieve reasonable fit + assert scalars["factor_0/r2"] > 0.5, f"factor_0 R² too low: {scalars['factor_0/r2']}" + assert scalars["factor_1/r2"] > 0.5, f"factor_1 R² too low: {scalars['factor_1/r2']}" diff --git a/tests/end_to_end/configs/activation_tracker/default.yaml b/tests/end_to_end/configs/activation_tracker/default.yaml index 5158ba6d..1950e7ee 100644 --- a/tests/end_to_end/configs/activation_tracker/default.yaml +++ b/tests/end_to_end/configs/activation_tracker/default.yaml @@ -17,4 +17,4 @@ instance: last_token_only: false concat_layers: true use_probs_as_weights: true - rcond_values: [1e-15, 1e-10, 1e-8, 1e-6, 1e-4, 1e-2] \ No newline at end of file + rcond_values: [1e-15, 1e-10, 1e-8, 1e-6, 1e-4, 1e-2] diff --git a/tests/end_to_end/configs/activation_tracker/rmse_over_time_example.yaml b/tests/end_to_end/configs/activation_tracker/rmse_over_time_example.yaml new file mode 100644 index 00000000..784c45ff --- /dev/null +++ b/tests/end_to_end/configs/activation_tracker/rmse_over_time_example.yaml @@ -0,0 +1,37 @@ +# Example: Plot RMSE value over training steps, split by layer +# This demonstrates the scalar_pattern feature for temporal metric visualization + +name: tracker_with_rmse_tracking +instance: + _target_: simplexity.activations.activation_tracker.ActivationTracker + analyses: + regression: + instance: + _target_: simplexity.activations.activation_analyses.LinearRegressionSVDAnalysis + last_token_only: false + concat_layers: false + use_probs_as_weights: true + rcond_values: [1e-15, 1e-10, 1e-8, 1e-6, 1e-4, 1e-2] + visualizations: + # Temporal visualization: RMSE over training steps + - name: rmse_over_time + controls: + accumulate_steps: true + dropdown: layer # User can filter by layer in UI + data_mapping: + mappings: + rmse: {source: scalar_pattern, key: "blocks.*.hook_resid_post_rmse"} + backend: altair + layer: + geometry: + type: line + props: {} + aesthetics: + x: {field: step, type: quantitative, title: "Training Step"} + y: {field: rmse, type: quantitative, title: "RMSE"} + color: {field: layer, type: nominal, title: "Layer"} + strokeDash: {field: layer, type: nominal} + size: {width: 800, height: 400} + guides: + title: "RMSE Evolution Across Training" + subtitle: "Tracking model convergence by layer" diff --git a/tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml b/tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml new file mode 100644 index 00000000..8a9156c5 --- /dev/null +++ b/tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml @@ -0,0 +1,162 @@ +name: tracker_with_factor_projections +instance: + _target_: simplexity.activations.activation_tracker.ActivationTracker + analyses: + # Linear regression with to_factors=true produces per-factor projections + # Keys are namespaced as: {layer}_factor_{idx}/projected + regression_factored: + instance: + _target_: simplexity.activations.activation_analyses.LinearRegressionAnalysis + last_token_only: false + concat_layers: false + use_probs_as_weights: false + to_factors: true # Enable per-factor projections + skip_first_token: true + visualizations: + # 2x5 grid: Top row = projections, Bottom row = ground truth beliefs + # Uses combined mappings to merge two data sources with a data_type column + - name: prediction_vs_truth_grid + controls: + slider: step + dropdown: layer + data_mapping: + # Combined mappings allow merging projections + belief states + sampling: + max_points: 2000 + seed: 42 + combined: + - label: prediction + mappings: + factor_*_prob_0: + source: projections + key: "factor_*/projected" + component: 0 + group_as: factor + factor_*_prob_1: + source: projections + key: "factor_*/projected" + component: 1 + group_as: factor + factor_*_prob_2: + source: projections + key: "factor_*/projected" + component: 2 + group_as: factor + - label: ground_truth + mappings: + # Belief state factor patterns expand across factors (3D beliefs) + factor_*_prob_0: + source: belief_states + factor: "*" + component: 0 + group_as: factor + factor_*_prob_1: + source: belief_states + factor: "*" + component: 1 + group_as: factor + factor_*_prob_2: + source: belief_states + factor: "*" + component: 2 + group_as: factor + combine_as: data_type + preprocessing: + - type: project_to_simplex + input_fields: [prob_0, prob_1, prob_2] + output_fields: [simplex_x, simplex_y] + - type: combine_rgb + input_fields: [prob_0, prob_1, prob_2] + output_fields: [point_color] + backend: plotly + plot: + facet: + column: factor # 5 columns (one per factor) + row: data_type # 2 rows (prediction vs ground_truth) + layers: + - geometry: {type: point } + aesthetics: + x: {field: simplex_x, type: quantitative, title: "Simplex X"} + y: {field: simplex_y, type: quantitative, title: "Simplex Y"} + color: {field: point_color, type: nominal} + size: {value: 3} + size: {width: 200, height: 200} + guides: + title: "Prediction vs Ground Truth (Per Factor)" + subtitle: "Top: Ground truth | Bottom: Model belief states" + # 3D scatter plot of belief states (no simplex projection) + - name: belief_states_3d + controls: + slider: step + dropdown: layer + data_mapping: + sampling: + max_points: 2000 + seed: 42 + combined: + - label: prediction + mappings: + factor_*_prob_0: + source: projections + key: "factor_*/projected" + component: 0 + group_as: factor + factor_*_prob_1: + source: projections + key: "factor_*/projected" + component: 1 + group_as: factor + factor_*_prob_2: + source: projections + key: "factor_*/projected" + component: 2 + group_as: factor + - label: ground_truth + mappings: + factor_*_prob_0: + source: belief_states + factor: "*" + component: 0 + group_as: factor + factor_*_prob_1: + source: belief_states + factor: "*" + component: 1 + group_as: factor + factor_*_prob_2: + source: belief_states + factor: "*" + component: 2 + group_as: factor + combine_as: data_type + preprocessing: + - type: combine_rgb + input_fields: [prob_0, prob_1, prob_2] + output_fields: [point_color] + backend: plotly + plot: + facet: + column: factor # 5 columns (one per factor) + row: data_type # 2 rows (prediction vs ground_truth) + layers: + - geometry: {type: point} + aesthetics: + # scale.domain sets axis ranges to [0, 1] for probability space + # This prevents auto-scaling artifacts when data is planar + x: {field: prob_0, type: quantitative, title: "State 0", scale: {domain: [0, 1]}} + y: {field: prob_1, type: quantitative, title: "State 1"} + z: {field: prob_2, type: quantitative, title: "State 2"} + color: {field: point_color, type: nominal} + size: {value: 3} + size: {width: 200, height: 200} + guides: + title: "3D Belief States (Prediction vs Ground Truth)" + subtitle: "Top: Prediction | Bottom: Ground Truth" + # PCA analysis for visualizing first 3 principal components in 3D + pca_analysis: + instance: + _target_: simplexity.activations.activation_analyses.PcaAnalysis + n_components: 10 + last_token_only: false + concat_layers: false + use_probs_as_weights: false diff --git a/tests/end_to_end/configs/activation_tracker/with_visuals.yaml b/tests/end_to_end/configs/activation_tracker/with_visuals.yaml new file mode 100644 index 00000000..be729909 --- /dev/null +++ b/tests/end_to_end/configs/activation_tracker/with_visuals.yaml @@ -0,0 +1,230 @@ +name: tracker_with_projections +instance: + _target_: simplexity.activations.activation_tracker.ActivationTracker + analyses: + pca_all_tokens: # <- THIS RETURNS SCALARS AND PROJECTIONS + instance: + _target_: simplexity.activations.activation_analyses.PcaAnalysis + n_components: null + last_token_only: false + concat_layers: false + use_probs_as_weights: true + variance_thresholds: [0.80, 0.90, 0.95, 0.99] + visualizations: # <- THIS IS OPTIONAL + - name: pca_3d_scatter # <- NEW VISUALIZATION + controls: + slider: step + dropdown: layer + cumulative: false + data_mapping: + mappings: + pc_*: {source: projections, key: pca, component: "*"} + belief_*: {source: belief_states, component: "*"} + preprocessing: + - type: combine_rgb + input_fields: [belief_*] + output_fields: [belief_color] + backend: plotly + layer: + geometry: + type: point + props: {size: 3} + aesthetics: + x: {field: pc_0, type: quantitative, title: "PC 1"} + y: {field: pc_1, type: quantitative, title: "PC 2"} + z: {field: pc_2, type: quantitative, title: "PC 3"} + color: + field: belief_color + type: nominal + opacity: {value: 0.85} + size: {width: 800, height: 600} + guides: + title: "PCA Projection (3D)" + subtitle: "All tokens, weighted by prefix probability" + - name: cumulative_explained_variance # <- NEW VISUALIZATION + controls: + dropdown: layer + accumulate_steps: true + cumulative: false + data_mapping: + scalar_series: + key_template: "{layer}_cumvar_{index}" + index_field: n_components + value_field: cumulative_explained_variance + backend: altair + layer: + geometry: + type: line + props: {} + aesthetics: + x: {field: n_components, type: quantitative, title: "Number of PCA Components"} + y: {field: cumulative_explained_variance, type: quantitative, title: "Cumulative Explained Variance"} + color: {field: step, type: nominal, title: Step} + size: {width: 600, height: 400} + guides: + title: "Cumulative Explained Variance by PCA Components" + subtitle: "All tokens, weighted by probability" + + regression: # <- THIS RETURNS SCALARS AND PROJECTIONS + instance: + _target_: simplexity.activations.activation_analyses.LinearRegressionAnalysis + last_token_only: false + concat_layers: false + use_probs_as_weights: true + # rcond_values: [1e-15, 1e-10, 1e-8, 1e-6, 1e-4, 1e-2] + visualizations: # <- THIS IS OPTIONAL + - name: regression_3d # <- NEW VISUALIZATION + controls: + slider: step + dropdown: layer + cumulative: false + data_mapping: + mappings: + prob_0: {source: projections, key: projected, component: 0} + prob_1: {source: projections, key: projected, component: 1} + prob_2: {source: projections, key: projected, component: 2} + belief_r: {source: belief_states, component: 0} + belief_g: {source: belief_states, component: 1} + belief_b: {source: belief_states, component: 2} + preprocessing: + - type: combine_rgb + input_fields: [belief_r, belief_g, belief_b] + output_fields: [belief_color] + backend: plotly + layer: + geometry: + type: point + props: {size: 4} + aesthetics: + x: {field: prob_0, type: quantitative, title: "P(State 0)"} + y: {field: prob_1, type: quantitative, title: "P(State 1)"} + z: {field: prob_2, type: quantitative, title: "P(State 2)"} + color: + field: belief_color + type: nominal + opacity: {value: 0.7} + size: {width: 800, height: 600} + guides: + title: "Regression Projection (3D)" + - name: regression_to_simplex # <- NEW VISUALIZATION + controls: + slider: step + dropdown: layer + cumulative: false + data_mapping: + mappings: + prob_0: {source: projections, key: projected, component: 0} + prob_1: {source: projections, key: projected, component: 1} + prob_2: {source: projections, key: projected, component: 2} + preprocessing: + - type: combine_rgb + input_fields: [prob_0, prob_1, prob_2] + output_fields: [prediction_color] + - type: project_to_simplex + input_fields: [prob_0, prob_1, prob_2] + output_fields: [simplex_x, simplex_y] + backend: plotly + layer: + geometry: + type: point + props: {size: 4} + aesthetics: + x: {field: simplex_x, type: quantitative, title: "Simplex X"} + y: {field: simplex_y, type: quantitative, title: "Simplex Y"} + color: + field: prediction_color + type: nominal + opacity: {value: 0.7} + size: {width: 800, height: 600} + guides: + title: "Regression Projection (Simplex)" + - name: ground_truth_simplex + data_mapping: + mappings: + belief_r: {source: belief_states, component: 0} + belief_g: {source: belief_states, component: 1} + belief_b: {source: belief_states, component: 2} + preprocessing: + - type: combine_rgb + input_fields: [belief_r, belief_g, belief_b] + output_fields: [belief_color] + - type: project_to_simplex + input_fields: [belief_r, belief_g, belief_b] + output_fields: [simplex_x, simplex_y] + backend: plotly + layer: + geometry: + type: point + props: {size: 4} + aesthetics: + x: {field: simplex_x, type: quantitative, title: "Simplex X"} + y: {field: simplex_y, type: quantitative, title: "Simplex Y"} + color: + field: belief_color + type: nominal + opacity: {value: 0.7} + size: {width: 800, height: 600} + guides: + title: "Regression Projection (Simplex)" + # Temporal visualization: RMSE over training steps + - name: rmse_over_time + controls: + accumulate_steps: true + dropdown: layer # User can filter by layer in UI + data_mapping: + mappings: + rmse: {source: scalar_pattern, key: "blocks.*.hook_resid_post_rmse"} # Wildcard expands to all layers + backend: altair + layer: + geometry: + type: line + props: {} + aesthetics: + x: {field: step, type: quantitative, title: "Training Step"} + y: {field: rmse, type: quantitative, title: "RMSE"} + color: {field: layer, type: nominal, title: "Layer"} + strokeDash: {field: layer, type: nominal} + size: {width: 800, height: 400} + guides: + title: "RMSE Evolution Across Training" + subtitle: "Tracking model convergence by layer" + regression_concat: # <- THIS RETURNS SCALARS AND PROJECTIONS + instance: + _target_: simplexity.activations.activation_analyses.LinearRegressionAnalysis + last_token_only: false + concat_layers: true + use_probs_as_weights: true + # rcond_values: [1e-15, 1e-10, 1e-8, 1e-6, 1e-4, 1e-2] + visualizations: # <- THIS IS OPTIONAL + - name: regression_to_simplex_concat # <- NEW VISUALIZATION + controls: + slider: step + dropdown: layer + cumulative: false + data_mapping: + mappings: + prob_0: {source: projections, key: projected, component: 0} + prob_1: {source: projections, key: projected, component: 1} + prob_2: {source: projections, key: projected, component: 2} + preprocessing: + - type: combine_rgb + input_fields: [prob_0, prob_1, prob_2] + output_fields: [prediction_color] + - type: project_to_simplex + input_fields: [prob_0, prob_1, prob_2] + output_fields: [simplex_x, simplex_y] + backend: plotly + layer: + geometry: + type: point + props: {size: 4} + aesthetics: + x: {field: simplex_x, type: quantitative, title: "Simplex X"} + y: {field: simplex_y, type: quantitative, title: "Simplex Y"} + color: + field: prediction_color + type: nominal + opacity: {value: 0.7} + size: {width: 800, height: 600} + guides: + title: "Regression Projection (Simplex)" \ No newline at end of file diff --git a/tests/end_to_end/configs/generative_process/rrxor.yaml b/tests/end_to_end/configs/generative_process/rrxor.yaml new file mode 100644 index 00000000..621fc547 --- /dev/null +++ b/tests/end_to_end/configs/generative_process/rrxor.yaml @@ -0,0 +1,13 @@ +name: rrxor +instance: + _target_: simplexity.generative_processes.builder.build_hidden_markov_model + process_name: rrxor + p1: 0.5 + p2: 0.5 + +base_vocab_size: ??? +bos_token: ??? +eos_token: null +vocab_size: ??? +sequence_len: 10 +batch_size: 1024 diff --git a/tests/end_to_end/configs/generative_process/unified_chain_example.yaml b/tests/end_to_end/configs/generative_process/unified_chain_example.yaml index 7f034cd6..7e8db6f1 100644 --- a/tests/end_to_end/configs/generative_process/unified_chain_example.yaml +++ b/tests/end_to_end/configs/generative_process/unified_chain_example.yaml @@ -54,4 +54,4 @@ batch_size: 1024 # Example interpretation: # - Factor 0 always uses variant 0 # - Factor 1: parent_token 0->variant 0, token 1->variant 1, token 2->variant 0 -# - Factor 2: parent_token 0->variant 1, token 1->variant 0, token 2->variant 1 \ No newline at end of file +# - Factor 2: parent_token 0->variant 1, token 1->variant 0, token 2->variant 1 diff --git a/tests/end_to_end/configs/generative_process/unified_independent_example.yaml b/tests/end_to_end/configs/generative_process/unified_independent_example.yaml index 8abf8aa2..27fe2fea 100644 --- a/tests/end_to_end/configs/generative_process/unified_independent_example.yaml +++ b/tests/end_to_end/configs/generative_process/unified_independent_example.yaml @@ -29,7 +29,7 @@ instance: variants: - process_name: mess3 process_params: - x: 0.5 + x: 0.15 a: 0.6 # Factor 2: mess3 process @@ -37,12 +37,25 @@ instance: variants: - process_name: mess3 process_params: - x: 0.2 - a: 0.7 + x: 0.15 + a: 0.6 + + - component_type: ghmm + variants: + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 4.0 + + - component_type: ghmm + variants: + - process_name: tom_quantum + process_params: + alpha: 1.0 + beta: 4.0 bos_token: ??? eos_token: null -batch_size: 1024 # Example interpretation: # - Each factor operates completely independently diff --git a/tests/end_to_end/configs/generative_process/unified_independent_example_crazy.yaml b/tests/end_to_end/configs/generative_process/unified_independent_example_crazy.yaml new file mode 100644 index 00000000..ea4b5d48 --- /dev/null +++ b/tests/end_to_end/configs/generative_process/unified_independent_example_crazy.yaml @@ -0,0 +1,68 @@ +# Unified API: Independent Structure +# Independent structure has no conditional dependencies: +# Each factor operates independently, always using variant 0. +# Joint distribution is simply the product of independent factor distributions. + +name: unified_independent_example +base_vocab_size: ??? # Will auto-infer as 3 * 3 * 3 = 27 +vocab_size: ??? + +instance: + _target_: simplexity.generative_processes.builder.build_factored_process_from_spec + + # Structure type - determines how factors interact + structure_type: independent + + # Component specifications + # Each factor has exactly one variant (independent factors don't need multiple variants) + spec: + # Factor 0: mess3 process + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + + # Factor 1: mess3 process + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + + # Factor 2: mess3 process + - component_type: hmm + variants: + - process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + + - component_type: ghmm + variants: + - process_name: tom_quantum + initial_state: [1, 0.41, -0.58] + process_params: + alpha: 0.6 + beta: 5.5 + + - component_type: ghmm + variants: + - process_name: tom_quantum + initial_state: [1, 0.41, -0.58] + process_params: + alpha: 0.6 + beta: 5.5 + +bos_token: ??? +eos_token: null + +# Example interpretation: +# - Each factor operates completely independently +# - Factor 0 always emits from mess3(x=0.15, a=0.6) +# - Factor 1 always emits from mess3(x=0.5, a=0.6) +# - Factor 2 always emits from mess3(x=0.2, a=0.7) +# - Joint distribution: P(t0,t1,t2) = P(t0) * P(t1) * P(t2) +# - No control maps needed - factors don't depend on each other diff --git a/tests/end_to_end/configs/lr_scheduler/reduce_lr_on_plateau.yaml b/tests/end_to_end/configs/lr_scheduler/reduce_lr_on_plateau.yaml new file mode 100644 index 00000000..b5546e6e --- /dev/null +++ b/tests/end_to_end/configs/lr_scheduler/reduce_lr_on_plateau.yaml @@ -0,0 +1,10 @@ +name: lr_scheduler_reduce_on_plateau +instance: + _target_: torch.optim.lr_scheduler.ReduceLROnPlateau + mode: min + factor: 0.5 + patience: 100000 + threshold: 1e-6 + threshold_mode: rel + cooldown: 200 + min_lr: 1e-6 \ No newline at end of file diff --git a/tests/end_to_end/configs/lr_scheduler/windowed_reduce_lr_on_plateau.yaml b/tests/end_to_end/configs/lr_scheduler/windowed_reduce_lr_on_plateau.yaml new file mode 100644 index 00000000..61cb2e9b --- /dev/null +++ b/tests/end_to_end/configs/lr_scheduler/windowed_reduce_lr_on_plateau.yaml @@ -0,0 +1,12 @@ +name: lr_scheduler_windowed_reduce_on_plateau +instance: + _target_: simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau + window_size: 10 + update_every: 100 + mode: min + factor: 0.5 + patience: 1000 + threshold: 1e-6 + threshold_mode: rel + cooldown: 200 + min_lr: 1e-6 diff --git a/tests/end_to_end/configs/metric_tracker/basic.yaml b/tests/end_to_end/configs/metric_tracker/basic.yaml new file mode 100644 index 00000000..6b701aad --- /dev/null +++ b/tests/end_to_end/configs/metric_tracker/basic.yaml @@ -0,0 +1,14 @@ +# @package _global_ + +metric_tracker: + name: basic_tracker + instance: + _target_: simplexity.metrics.metric_tracker.MetricTracker + metric_names: + - tokens + - learning_rate + - loss + - parameter_norm + metric_kwargs: + ma_window_size: 100 + ema_gamma: 0.9 diff --git a/tests/end_to_end/configs/optimizer/pytorch_adam_factored.yaml b/tests/end_to_end/configs/optimizer/pytorch_adam_factored.yaml new file mode 100644 index 00000000..e304207b --- /dev/null +++ b/tests/end_to_end/configs/optimizer/pytorch_adam_factored.yaml @@ -0,0 +1,4 @@ +name: pytorch_adam +instance: + _target_: torch.optim.Adam + lr: 1e-4 diff --git a/tests/end_to_end/configs/predictive_model/tiny_transformer.yaml b/tests/end_to_end/configs/predictive_model/tiny_transformer.yaml index f0d3b9b0..758aae6c 100644 --- a/tests/end_to_end/configs/predictive_model/tiny_transformer.yaml +++ b/tests/end_to_end/configs/predictive_model/tiny_transformer.yaml @@ -3,12 +3,12 @@ instance: _target_: transformer_lens.HookedTransformer cfg: _target_: transformer_lens.HookedTransformerConfig - d_model: 1 + d_model: 3 d_head: 1 n_heads: 1 n_layers: 1 n_ctx: 128 - d_mlp: 1 + d_mlp: 3 d_vocab: ??? act_fn: "relu" normalization_type: "LN" diff --git a/tests/end_to_end/configs/predictive_model/transformer.yaml b/tests/end_to_end/configs/predictive_model/transformer.yaml index a2b69fd1..924c4987 100644 --- a/tests/end_to_end/configs/predictive_model/transformer.yaml +++ b/tests/end_to_end/configs/predictive_model/transformer.yaml @@ -3,12 +3,12 @@ instance: _target_: transformer_lens.HookedTransformer cfg: _target_: transformer_lens.HookedTransformerConfig - d_model: 64 - d_head: 16 - n_heads: 4 - n_layers: 2 - n_ctx: 64 - d_mlp: 256 + d_model: 384 + d_head: 64 + n_heads: 6 + n_layers: 1 + n_ctx: 9 + d_mlp: 1536 d_vocab: ??? act_fn: "relu" normalization_type: "LN" diff --git a/tests/end_to_end/configs/test_metric_tracker.yaml b/tests/end_to_end/configs/test_metric_tracker.yaml new file mode 100644 index 00000000..e736365d --- /dev/null +++ b/tests/end_to_end/configs/test_metric_tracker.yaml @@ -0,0 +1,15 @@ +# @package _global_ + +defaults: + - _self_ + - mlflow: databricks + - logging: mlflow_logger + - metric_tracker: basic + +experiment_name: metric_tracker_test +run_name: metric_tracker_test_${now:%Y%m%d_%H%M%S} +device: auto +seed: 42 +tags: + research_step: test + retention: temp diff --git a/tests/end_to_end/configs/training.yaml b/tests/end_to_end/configs/training.yaml index de164936..56cd1f81 100644 --- a/tests/end_to_end/configs/training.yaml +++ b/tests/end_to_end/configs/training.yaml @@ -6,8 +6,10 @@ defaults: - persistence: mlflow_persister - predictive_model: transformer - optimizer: pytorch_adam + - lr_scheduler: reduce_lr_on_plateau - metric_tracker@training_metric_tracker: default - metric_tracker@eval_metric_tracker: loss_only + - activation_tracker: with_factor_visuals - training: full experiment_name: training_test @@ -19,3 +21,4 @@ tags: retention: temp example_tag_1: value1 example_tag_2: value2 + diff --git a/tests/end_to_end/configs/training/full.yaml b/tests/end_to_end/configs/training/full.yaml index 8626ce04..64a4c72c 100644 --- a/tests/end_to_end/configs/training/full.yaml +++ b/tests/end_to_end/configs/training/full.yaml @@ -1,7 +1,9 @@ -num_steps: 100000 -batch_size: 64 -sequence_len: 65 +num_steps: 500000 +batch_size: 128 +sequence_len: 8 log_cheap_every: 10 -log_expensive_every: 1000 +log_expensive_every: 10000 evaluate_every: 100 checkpoint_every: 20000 +validation_multiplier: 10 +scheduler_every: 100 \ No newline at end of file diff --git a/tests/end_to_end/configs/training/minimal.yaml b/tests/end_to_end/configs/training/minimal.yaml index 8b19bdbb..da956e4d 100644 --- a/tests/end_to_end/configs/training/minimal.yaml +++ b/tests/end_to_end/configs/training/minimal.yaml @@ -1,7 +1,9 @@ num_steps: 10 -batch_size: 1 -sequence_len: 2 +batch_size: 3 +sequence_len: 3 log_cheap_every: 1 log_expensive_every: 2 evaluate_every: 2 -checkpoint_every: 5 \ No newline at end of file +checkpoint_every: 5 +validation_multiplier: 1 +scheduler_every: 1 \ No newline at end of file diff --git a/tests/end_to_end/configs/training_factored.yaml b/tests/end_to_end/configs/training_factored.yaml new file mode 100644 index 00000000..e6c29906 --- /dev/null +++ b/tests/end_to_end/configs/training_factored.yaml @@ -0,0 +1,24 @@ +defaults: + - _self_ + - mlflow: databricks + - logging: mlflow_logger + - generative_process: unified_independent_example + - persistence: mlflow_persister + - predictive_model: transformer + - optimizer: pytorch_adam_factored + - lr_scheduler: windowed_reduce_lr_on_plateau + - metric_tracker@training_metric_tracker: default + - metric_tracker@eval_metric_tracker: loss_only + - activation_tracker: with_factor_visuals + - training: full + +experiment_name: training_test_factored +run_name: training_test_factored_${now:%Y%m%d_%H%M%S} +device: auto +seed: 0 +tags: + research_step: demo + retention: temp + example_tag_1: value1 + example_tag_2: value2 + diff --git a/tests/end_to_end/configs/training_test.yaml b/tests/end_to_end/configs/training_test.yaml index 6a47cba9..a5ef2f37 100644 --- a/tests/end_to_end/configs/training_test.yaml +++ b/tests/end_to_end/configs/training_test.yaml @@ -6,8 +6,10 @@ defaults: - persistence: mlflow_persister - predictive_model: tiny_transformer - optimizer: pytorch_adam + - lr_scheduler: reduce_lr_on_plateau - metric_tracker@training_metric_tracker: default - metric_tracker@eval_metric_tracker: loss_only + - activation_tracker: with_visuals - training: minimal experiment_name: training_test diff --git a/tests/end_to_end/configs/visualization/3d_scatter.yaml b/tests/end_to_end/configs/visualization/3d_scatter.yaml new file mode 100644 index 00000000..c28699a3 --- /dev/null +++ b/tests/end_to_end/configs/visualization/3d_scatter.yaml @@ -0,0 +1,5 @@ +defaults: + - data: synthetic_cloud + - plot: scatter3d + +output_html: scatter3d_demo.html diff --git a/tests/end_to_end/configs/visualization/plot/scatter3d.yaml b/tests/end_to_end/configs/visualization/plot/scatter3d.yaml new file mode 100644 index 00000000..d5d2d7ae --- /dev/null +++ b/tests/end_to_end/configs/visualization/plot/scatter3d.yaml @@ -0,0 +1,26 @@ +backend: plotly +data: + source: cloud +layers: + - name: cluster_cloud + geometry: + type: point + props: + size: 8 + aesthetics: + x: { field: x, type: quantitative, title: "X position" } + y: { field: y, type: quantitative, title: "Y position" } + z: { field: z, type: quantitative, title: "Z position" } + color: { field: cluster, type: nominal, title: Cluster } + size: { field: magnitude, type: quantitative } + opacity: { value: 0.85 } + tooltip: + - { field: cluster, type: nominal, title: Cluster } + - { field: magnitude, type: quantitative, title: Magnitude } +size: + width: 800 + height: 600 +guides: + title: "Synthetic 3D Scatter" + subtitle: "Points sampled from multivariate Gaussians" + caption: "Configured entirely via Hydra YAML" diff --git a/tests/end_to_end/test_metric_tracker_integration.py b/tests/end_to_end/test_metric_tracker_integration.py new file mode 100644 index 00000000..ea9a5a2b --- /dev/null +++ b/tests/end_to_end/test_metric_tracker_integration.py @@ -0,0 +1,89 @@ +"""Test metric tracker integration without full demo dependencies.""" + +import logging +from pathlib import Path + +import torch +from hydra import compose, initialize_config_dir +from torch import nn + +import simplexity + +SIMPLEXITY_LOGGER = logging.getLogger("simplexity") +CONFIG_DIR = str(Path(__file__).parent / "configs") + + +class SimpleModel(nn.Module): + """Simple model for testing metric tracker.""" + + def __init__(self, vocab_size: int = 100, hidden_size: int = 64): + super().__init__() + self.embedding = nn.Embedding(vocab_size, hidden_size) + self.linear = nn.Linear(hidden_size, vocab_size) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward pass.""" + x = self.embedding(x) + return self.linear(x) + + +@simplexity.managed_run(strict=False, verbose=False) +def _run_metric_tracker_test(_cfg, components: simplexity.Components) -> None: # pylint: disable=unused-argument + """Run the metric tracker integration test.""" + SIMPLEXITY_LOGGER.info("Testing metric tracker integration") + + # Check that metric tracker was instantiated + assert components.metric_trackers is not None, "Metric trackers should be instantiated" + metric_tracker = components.get_metric_tracker() + assert metric_tracker is not None, "Metric tracker should be available" + + SIMPLEXITY_LOGGER.info("Metric tracker type: %s", type(metric_tracker)) + + # Create simple model and optimizer for testing + model = SimpleModel() + optimizer = torch.optim.Adam(model.parameters(), lr=0.001) + + # Override the model and optimizer in the metric tracker + metric_tracker.model = model + metric_tracker.optimizer = optimizer + + # Run a simple training loop + SIMPLEXITY_LOGGER.info("Running 5 test training steps") + for step in range(5): + # Generate random data + inputs = torch.randint(0, 100, (4, 10)) + targets = torch.randint(0, 100, (4, 10)) + + # Forward pass + outputs = model(inputs) + loss_fn = torch.nn.CrossEntropyLoss() + loss = loss_fn(outputs.view(-1, outputs.size(-1)), targets.view(-1)) + + # Backward pass + optimizer.zero_grad() + loss.backward() + optimizer.step() + + # Update metric tracker + metric_tracker.step(tokens=inputs.numel(), loss=loss.item()) + + # Get metrics + metrics = metric_tracker.get_metrics(group="all") + loss_val = metrics.get("loss", 0.0) + tokens_val = metrics.get("tokens/raw", "N/A") + SIMPLEXITY_LOGGER.info("Step %d metrics: loss=%.4f, tokens=%s", step, loss_val, tokens_val) + + SIMPLEXITY_LOGGER.info("Metric tracker integration test PASSED") + + +def test_metric_tracker(tmp_path: Path) -> None: + """Test the metric tracker integration.""" + mlflow_db = tmp_path / "mlflow.db" + mlflow_uri = f"sqlite:///{mlflow_db.absolute()}" + overrides = [ + f"mlflow.tracking_uri={mlflow_uri}", + f"mlflow.registry_uri={mlflow_uri}", + ] + with initialize_config_dir(CONFIG_DIR, version_base="1.2"): + cfg = compose(config_name="test_metric_tracker.yaml", overrides=overrides) + _run_metric_tracker_test(cfg) # pylint: disable=no-value-for-parameter diff --git a/tests/end_to_end/training.py b/tests/end_to_end/training.py index ea0b52ed..35dfaf56 100644 --- a/tests/end_to_end/training.py +++ b/tests/end_to_end/training.py @@ -12,6 +12,7 @@ import logging from dataclasses import dataclass from pathlib import Path +from tempfile import TemporaryDirectory from typing import Any import hydra @@ -19,16 +20,20 @@ import jax.numpy as jnp import mlflow import torch +import tqdm from torch.optim import Adam from transformer_lens import HookedTransformer import simplexity -from simplexity.generative_processes.hidden_markov_model import HiddenMarkovModel -from simplexity.generative_processes.torch_generator import generate_data_batch +from simplexity.generative_processes.factored_generative_process import FactoredGenerativeProcess +from simplexity.generative_processes.hidden_markov_model import GeneralizedHiddenMarkovModel, HiddenMarkovModel +from simplexity.generative_processes.torch_generator import generate_data_batch, generate_data_batch_with_full_history from simplexity.logging.mlflow_logger import MLFlowLogger from simplexity.metrics.metric_tracker import MetricTracker from simplexity.persistence.mlflow_persister import MLFlowPersister +from simplexity.structured_configs.activation_tracker import ActivationTrackerConfig from simplexity.structured_configs.generative_process import GenerativeProcessConfig +from simplexity.structured_configs.learning_rate_scheduler import LearningRateSchedulerConfig from simplexity.structured_configs.logging import LoggingConfig from simplexity.structured_configs.metric_tracker import MetricTrackerConfig from simplexity.structured_configs.mlflow import MLFlowConfig @@ -53,6 +58,7 @@ class TrainingConfig: log_expensive_every: int checkpoint_every: int evaluate_every: int + validation_multiplier: int @dataclass @@ -65,9 +71,11 @@ class TrainingRunConfig: persistence: PersistenceConfig predictive_model: PredictiveModelConfig optimizer: OptimizerConfig + learning_rate_scheduler: LearningRateSchedulerConfig training_metric_tracker: MetricTrackerConfig eval_metric_tracker: MetricTrackerConfig training: TrainingConfig + activation_tracker: ActivationTrackerConfig device: str experiment_name: str @@ -76,6 +84,16 @@ class TrainingRunConfig: tags: dict[str, str] +def _expand_init_state( + initial_state: jax.Array | tuple[jax.Array, ...], + batch_size: int, +) -> jax.Array | tuple[jax.Array, ...]: + """Expand the initial state to the batch size.""" + if isinstance(initial_state, tuple): + return tuple(jnp.repeat(s[None, :], batch_size, axis=0) for s in initial_state) + return jnp.repeat(initial_state[None, :], batch_size, axis=0) + + @simplexity.managed_run(strict=False, verbose=True) def train(cfg: TrainingRunConfig, components: simplexity.Components) -> None: """Test the managed run decorator.""" @@ -84,19 +102,28 @@ def train(cfg: TrainingRunConfig, components: simplexity.Components) -> None: logger = components.get_logger() assert isinstance(logger, MLFlowLogger) generative_process = components.get_generative_process() - assert isinstance(generative_process, HiddenMarkovModel) + assert isinstance(generative_process, (HiddenMarkovModel, GeneralizedHiddenMarkovModel, FactoredGenerativeProcess)) persister = components.get_persister() assert isinstance(persister, MLFlowPersister) predictive_model = components.get_predictive_model() assert isinstance(predictive_model, HookedTransformer) optimizer = components.get_optimizer() assert isinstance(optimizer, Adam) + learning_rate_scheduler = components.get_learning_rate_scheduler() + assert isinstance(learning_rate_scheduler, torch.optim.lr_scheduler.ReduceLROnPlateau) training_metric_tracker = components.get_metric_tracker("training_metric_tracker") assert isinstance(training_metric_tracker, MetricTracker) eval_metric_tracker = components.get_metric_tracker("eval_metric_tracker") assert isinstance(eval_metric_tracker, MetricTracker) + activation_tracker = components.get_activation_tracker() + assert activation_tracker is not None + + visualization_path = TemporaryDirectory() - gen_states = jnp.repeat(generative_process.initial_state[None, :], cfg.training.batch_size, axis=0) + gen_states = _expand_init_state( + generative_process.initial_state, + cfg.training.batch_size, + ) # Only need to specify device for MPS since JAX doesn't support it # (JAX will use CPU while PyTorch model is on MPS) @@ -106,14 +133,20 @@ def train(cfg: TrainingRunConfig, components: simplexity.Components) -> None: def generate(step: int) -> tuple[torch.Tensor, torch.Tensor]: key = jax.random.key(step) _, inputs, labels = generate_data_batch( - gen_states, generative_process, cfg.training.batch_size, cfg.training.sequence_len, key, device=device_arg + gen_states, + generative_process, + cfg.training.batch_size, + cfg.training.sequence_len, + key, + device=device_arg, + bos_token=cfg.generative_process.bos_token, ) return inputs, labels loss_fn = torch.nn.CrossEntropyLoss() def get_loss(outputs: torch.Tensor, labels: torch.Tensor) -> torch.Tensor: - return loss_fn(outputs.reshape(-1, outputs.shape[-1]), labels.reshape(-1).long()) + return loss_fn(outputs.reshape(-1, outputs.shape[-1]), labels.reshape(-1).long().to(outputs.device)) def train_step(step: int): predictive_model.train() @@ -124,6 +157,12 @@ def train_step(step: int): loss.backward() optimizer.step() training_metric_tracker.step(tokens=inputs, loss=loss) + old_lr = optimizer.param_groups[0]["lr"] + learning_rate_scheduler.step(loss.detach().item(), epoch=step) + new_lr = optimizer.param_groups[0]["lr"] + if new_lr != old_lr: + logging.info(f"Learning rate changed from {old_lr} to {new_lr} at step {step}") + logger.log_metrics(step, {"learning_rate": new_lr}) def log_step(step: int, group: str) -> None: metrics = training_metric_tracker.get_metrics(group) @@ -147,20 +186,57 @@ def eval_step(step: int) -> None: metrics = add_key_prefix(metrics, "eval") logger.log_metrics(step, metrics) + def activation_tracker_step(step: int) -> None: + predictive_model.eval() + outs = generate_data_batch_with_full_history( + _expand_init_state( + generative_process.initial_state, + int(cfg.training.batch_size * cfg.training.validation_multiplier), + ), + generative_process, + int(cfg.training.batch_size * cfg.training.validation_multiplier), + cfg.training.sequence_len, + jax.random.key(step), + device=device_arg, + bos_token=cfg.generative_process.bos_token, + ) + inputs = outs["inputs"] + assert isinstance(inputs, (jax.Array, torch.Tensor)) + prefix_probs = outs["prefix_probabilities"] + assert isinstance(prefix_probs, (jax.Array, torch.Tensor)) + _, act_cache = predictive_model.run_with_cache(inputs) + act_cache = {k: v.detach().cpu() for k, v in act_cache.items() if "resid" in k} + scalars, _, visualizations = activation_tracker.analyze( + inputs=inputs, + beliefs=outs["belief_states"], + probs=prefix_probs, + activations=act_cache, + step=step, + ) + visualization_paths = activation_tracker.save_visualizations( + visualizations, Path(visualization_path.name), step + ) + for key, path in visualization_paths.items(): + logger.log_artifact(str(path), artifact_path=f"activation_plots/{key.split('/')[0]}") + scalars = add_key_prefix(dict(scalars), "activations") + logger.log_metrics(step, scalars) + def checkpoint_step(step: int) -> None: persister.save_weights(predictive_model, step) - for step in range(cfg.training.num_steps + 1): + for step in tqdm.tqdm(range(cfg.training.num_steps + 1)): if step == 0: initial_loss = evaluate() training_metric_tracker.context.loss = initial_loss eval_metric_tracker.context.loss = initial_loss + activation_tracker_step(step) else: train_step(step) if step % cfg.training.log_cheap_every == 0: log_step(step, "cheap") if step % cfg.training.log_expensive_every == 0: log_step(step, "expensive") + activation_tracker_step(step) if step % cfg.training.evaluate_every == 0: eval_step(step) if step % cfg.training.checkpoint_every == 0: @@ -172,7 +248,12 @@ def checkpoint_step(step: int) -> None: step += 1 # pyright: ignore[reportPossiblyUnboundVariable] persister.save_model_to_registry(predictive_model, registered_model_name, model_inputs=sample_inputs, step=step) + visualization_path.cleanup() + if __name__ == "__main__": - main = hydra.main(config_path=CONFIG_DIR, config_name="training.yaml", version_base="1.2")(train) + main = hydra.main(config_path=CONFIG_DIR, config_name=CONFIG_NAME, version_base="1.2")(train) main() + import sys + + sys.exit(0) diff --git a/tests/end_to_end/visualization_3d_demo.py b/tests/end_to_end/visualization_3d_demo.py new file mode 100644 index 00000000..06766886 --- /dev/null +++ b/tests/end_to_end/visualization_3d_demo.py @@ -0,0 +1,201 @@ +"""Hydra-powered demo that renders a 3D scatter plot via PlotConfig YAML.""" + +from __future__ import annotations + +import types +from dataclasses import dataclass, field, fields, is_dataclass +from pathlib import Path +from typing import Any, Union, cast, get_args, get_origin, get_type_hints + +import hydra +import numpy as np +import pandas as pd +from hydra.utils import get_original_cwd +from omegaconf import DictConfig, OmegaConf + +from simplexity.visualization.altair_renderer import build_altair_chart +from simplexity.visualization.data_registry import DictDataRegistry +from simplexity.visualization.plotly_renderer import build_plotly_figure +from simplexity.visualization.structured_configs import PlotConfig + + +@dataclass +class SyntheticDataConfig: + """Configuration for generating synthetic 3D clusters.""" + + source_name: str = "cloud" + num_points: int = 600 + clusters: int = 4 + cluster_spread: float = 0.8 + seed: int = 11 + + +@dataclass +class Scatter3DDemoConfig: + """Root Hydra config for the demo.""" + + data: SyntheticDataConfig = field(default_factory=SyntheticDataConfig) + plot: PlotConfig = field(default_factory=PlotConfig) + output_html: str = "scatter3d_demo.html" + + +@hydra.main(version_base=None, config_path="configs/visualization", config_name="3d_scatter") +def main(cfg: DictConfig) -> None: + """Main entry point for the demo.""" + data_cfg = _convert_cfg(cfg.data, SyntheticDataConfig) + plot_cfg = _convert_cfg(cfg.plot, PlotConfig) + output_html = cast(str, cfg.get("output_html", "scatter3d_demo.html")) + dataframe = _generate_dataset(data_cfg) + registry = DictDataRegistry({data_cfg.source_name: dataframe}) + + if plot_cfg.backend == "plotly": + figure = build_plotly_figure(plot_cfg, registry) + _save_plotly_figure(figure, output_html) + else: + chart = build_altair_chart(plot_cfg, registry) + _save_altair_chart(chart, output_html) + + print(f"Saved interactive plot to {output_html}") # noqa: T201 - demo script output + + +def _generate_dataset(cfg: SyntheticDataConfig) -> pd.DataFrame: + rng = np.random.default_rng(cfg.seed) + points_per_cluster = max(1, cfg.num_points // cfg.clusters) + remainder = cfg.num_points % cfg.clusters + records: list[dict[str, float | int | str]] = [] + for cluster_idx in range(cfg.clusters): + center = rng.normal(0.0, cfg.cluster_spread * 3.0, size=3) + count = points_per_cluster + (1 if cluster_idx < remainder else 0) + for _ in range(count): + noise = rng.normal(0.0, cfg.cluster_spread, size=3) + x, y, z = center + noise + magnitude = float(np.sqrt(x**2 + y**2 + z**2)) + records.append( + { + "cluster": f"C{cluster_idx + 1}", + "x": float(x), + "y": float(y), + "z": float(z), + "magnitude": magnitude, + } + ) + return pd.DataFrame.from_records(records) + + +def _convert_cfg[T](cfg_section: DictConfig, schema: type[T]) -> T: + """Convert DictConfig to dataclass instance, handling nested dataclasses recursively.""" + # Convert DictConfig to plain dict to avoid OmegaConf's Union/Literal type validation issues + cfg_dict = OmegaConf.to_container(cfg_section, resolve=True) or {} + return _dict_to_dataclass(cfg_dict, schema) + + +def _convert_value_by_type(value: Any, field_type: Any) -> Any: + """Convert a value based on its expected type (handles lists, dataclasses, etc.).""" + origin = get_origin(field_type) + + # Handle list types + if origin is list: + args = get_args(field_type) + if isinstance(value, list) and args: + item_type = args[0] + if is_dataclass(item_type): + return [ + _dict_to_dataclass(item, item_type) if isinstance(item, dict) else item # type: ignore[arg-type] + for item in value + ] + return value + # Handle dataclass types + if isinstance(value, dict) and is_dataclass(field_type): + return _dict_to_dataclass(value, field_type) # type: ignore[arg-type] + + return value + + +def _dict_to_dataclass(data: dict[str, Any] | Any, schema: type[Any]) -> Any: # pylint: disable=too-many-branches + """Recursively convert dict to dataclass instance, handling nested structures.""" + if not isinstance(data, dict): + return data + + if not is_dataclass(schema): + return data + + # Get field types from the dataclass schema, resolving string annotations + try: + field_types = get_type_hints(schema) + except (TypeError, NameError): + # Fallback to field.type if get_type_hints fails (e.g., forward references) + field_types = {f.name: f.type for f in fields(schema)} + + # Convert nested dicts to their corresponding dataclass types + converted: dict[str, Any] = {} + for key, value in data.items(): + if key not in field_types: + converted[key] = value + continue + + field_type = field_types[key] + origin = get_origin(field_type) + + # Handle Optional types (Union[X, None] or X | None) + if origin is Union or origin is types.UnionType: + args = get_args(field_type) + # Handle Optional[X] -> Union[X, None] + if args and len(args) == 2 and types.NoneType in args: + if value is None: + converted[key] = None + else: + non_none_type = next((t for t in args if t is not types.NoneType), None) + if non_none_type: + # Recursively handle the non-None type (could be a list, dict, etc.) + converted[key] = _convert_value_by_type(value, non_none_type) + else: + converted[key] = value + elif args and isinstance(value, dict): + # For other Union types, try to find a dataclass type that matches + dataclass_type = next((t for t in args if is_dataclass(t)), None) + if dataclass_type: + converted[key] = _dict_to_dataclass(value, dataclass_type) # type: ignore[arg-type] + else: + converted[key] = value + else: + # For other Union types, try to convert based on the first non-None type + non_none_types = [t for t in args if t is not types.NoneType] if args else [] + if non_none_types and value is not None: + converted[key] = _convert_value_by_type(value, non_none_types[0]) + else: + converted[key] = value + # Handle list types + elif origin is list: + args = get_args(field_type) + if isinstance(value, list) and args: + item_type = args[0] + if is_dataclass(item_type): + converted[key] = [ + _dict_to_dataclass(item, item_type) if isinstance(item, dict) else item # type: ignore[arg-type] + for item in value + ] + else: + converted[key] = value + else: + converted[key] = value + # Handle direct dataclass types + elif isinstance(value, dict) and is_dataclass(field_type): + converted[key] = _dict_to_dataclass(value, field_type) # type: ignore[arg-type] + else: + converted[key] = value + + return schema(**converted) + + +def _save_plotly_figure(figure, filename: str) -> None: + output_path = Path(get_original_cwd()) / filename + figure.write_html(str(output_path), include_plotlyjs="cdn") + + +def _save_altair_chart(chart, filename: str) -> None: + output_path = Path(get_original_cwd()) / filename + chart.save(str(output_path)) + + +if __name__ == "__main__": + main() # pylint: disable=no-value-for-parameter diff --git a/tests/end_to_end/visualization_demo.py b/tests/end_to_end/visualization_demo.py new file mode 100644 index 00000000..2da72bb8 --- /dev/null +++ b/tests/end_to_end/visualization_demo.py @@ -0,0 +1,105 @@ +"""Standalone demo that renders a layered Altair chart via visualization configs.""" + +from __future__ import annotations + +from pathlib import Path + +import numpy as np +import pandas as pd + +from simplexity.visualization.altair_renderer import build_altair_chart +from simplexity.visualization.data_registry import DictDataRegistry +from simplexity.visualization.structured_configs import ( + AestheticsConfig, + ChannelAestheticsConfig, + DataConfig, + GeometryConfig, + LayerConfig, + PlotConfig, + PlotLevelGuideConfig, + PlotSizeConfig, + TransformConfig, +) + + +def main() -> None: + """Generate a toy dataset, build a PlotConfig, and save the rendered chart.""" + df = _create_demo_dataframe() + registry = DictDataRegistry({"metrics": df}) + plot_cfg = _build_plot_config() + chart = build_altair_chart(plot_cfg, registry) + + output_path = Path(__file__).with_name("visualization_demo.html") + chart.save(str(output_path)) + print(f"Wrote visualization demo to {output_path}") # noqa: T201 - simple example harness + + +def _create_demo_dataframe() -> pd.DataFrame: + rng = np.random.default_rng(7) + records: list[dict[str, float | str | int]] = [] + for run_idx in range(3): + run_id = f"run_{run_idx + 1}" + for epoch in range(1, 51): + base_loss = np.exp(-epoch / 25.0) + 0.1 * run_idx + jitter = rng.normal(0.0, 0.02) + loss = max(base_loss + jitter, 1e-4) + accuracy = 0.55 + 0.008 * epoch + rng.normal(0.0, 0.01) + records.append( + { + "run_id": run_id, + "epoch": epoch, + "loss": loss, + "accuracy": accuracy, + } + ) + return pd.DataFrame(records) + + +def _build_plot_config() -> PlotConfig: + log_transform = TransformConfig(op="calculate", as_field="log_loss", expr="log(loss)") + base_aesthetics = AestheticsConfig( + x=ChannelAestheticsConfig(field="epoch", type="quantitative", title="Epoch"), + y=ChannelAestheticsConfig(field="log_loss", type="quantitative", title="log(loss)"), + tooltip=[ + ChannelAestheticsConfig(field="run_id", type="nominal", title="Run"), + ChannelAestheticsConfig(field="epoch", type="quantitative", title="Epoch"), + ChannelAestheticsConfig(field="log_loss", type="quantitative", title="log(loss)"), + ], + ) + raw_layer = LayerConfig( + name="raw_runs", + geometry=GeometryConfig(type="line", props={"opacity": 0.4}), + aesthetics=AestheticsConfig( + x=base_aesthetics.x, + y=base_aesthetics.y, + color=ChannelAestheticsConfig(field="run_id", type="nominal", title="Run"), + tooltip=base_aesthetics.tooltip, + ), + ) + mean_layer = LayerConfig( + name="mean_line", + geometry=GeometryConfig(type="line", props={"strokeWidth": 3, "color": "#111111"}), + aesthetics=AestheticsConfig( + x=base_aesthetics.x, + y=ChannelAestheticsConfig( + field="log_loss", + type="quantitative", + aggregate="mean", + title="Mean log(loss)", + ), + ), + ) + return PlotConfig( + data=DataConfig(source="metrics"), + transforms=[log_transform], + layers=[raw_layer, mean_layer], + size=PlotSizeConfig(width=600, height=400), + guides=PlotLevelGuideConfig( + title="Training loss over epochs", + subtitle="Each line is a synthetic training run built from random noise.", + ), + ) + + +if __name__ == "__main__": + main() diff --git a/tests/generative_processes/test_generator.py b/tests/generative_processes/test_generator.py index c83c17c6..2da516bf 100644 --- a/tests/generative_processes/test_generator.py +++ b/tests/generative_processes/test_generator.py @@ -36,6 +36,7 @@ def test_generate_data_batch(): assert jnp.all(labels >= 0) assert jnp.all(labels < hmm.vocab_size) chex.assert_trees_all_equal(inputs[:, 1:], labels[:, :-1]) + assert isinstance(gen_states, jax.Array) assert gen_states.shape == (batch_size, *gen_state.shape) @@ -64,6 +65,7 @@ def test_generate_data_batch_with_bos_token(): assert jnp.all(labels >= 0) assert jnp.all(labels < bos_token) chex.assert_trees_all_equal(inputs[:, 1:], labels[:, :-1]) + assert isinstance(gen_states, jax.Array) assert gen_states.shape == (batch_size, *gen_state.shape) @@ -92,6 +94,7 @@ def test_generate_data_batch_with_eos_token(): assert jnp.all(labels[:, :-1] < eos_token) assert jnp.all(labels[:, -1] == eos_token) chex.assert_trees_all_equal(inputs[:, 1:], labels[:, :-1]) + assert isinstance(gen_states, jax.Array) assert gen_states.shape == (batch_size, *gen_state.shape) @@ -121,6 +124,44 @@ def test_generate_data_batch_with_full_history(): assert isinstance(inputs, jax.Array) assert isinstance(labels, jax.Array) + # Without BOS, belief_states is aligned with inputs (one less than sequence_len) + assert belief_states.shape == (batch_size, sequence_len - 1, gen_state.shape[0]) + assert prefix_probs.shape == (batch_size, inputs.shape[1]) + assert labels.shape == inputs.shape + + +def test_generate_data_batch_with_full_history_bos(): + """Ensure belief states align with inputs when BOS token is used.""" + hmm = build_hidden_markov_model("zero_one_random", process_params={"p": 0.5}) + batch_size = 4 + sequence_len = 6 + bos_token = 2 + gen_state: jax.Array = hmm.initial_state + states = jnp.repeat(gen_state[None, :], batch_size, axis=0) + key = jax.random.PRNGKey(0) + result = generate_data_batch_with_full_history( + states, + hmm, + batch_size, + sequence_len, + key, + bos_token=bos_token, + ) + belief_states = result["belief_states"] + prefix_probs = result["prefix_probabilities"] + inputs = result["inputs"] + labels = result["labels"] + + assert isinstance(belief_states, jax.Array) + assert isinstance(prefix_probs, jax.Array) + assert isinstance(inputs, jax.Array) + assert isinstance(labels, jax.Array) + + # With BOS, inputs has sequence_len positions (BOS + sequence_len-1 tokens) + # belief_states is aligned with inputs + assert inputs.shape == (batch_size, sequence_len) assert belief_states.shape == (batch_size, sequence_len, gen_state.shape[0]) assert prefix_probs.shape == (batch_size, inputs.shape[1]) assert labels.shape == inputs.shape + # First input should be BOS token + assert jnp.all(inputs[:, 0] == bos_token) diff --git a/tests/generative_processes/test_torch_generator.py b/tests/generative_processes/test_torch_generator.py index b532c5cc..ba6d4e0c 100644 --- a/tests/generative_processes/test_torch_generator.py +++ b/tests/generative_processes/test_torch_generator.py @@ -128,5 +128,40 @@ def test_generate_data_batch_with_full_history(): assert isinstance(prefix_probs, jax.Array) assert isinstance(inputs, torch.Tensor) + # Without BOS, belief_states is aligned with inputs (one less than sequence_len) + assert belief_states.shape == (batch_size, sequence_len - 1, gen_state.shape[0]) + assert prefix_probs.shape == (batch_size, inputs.shape[1]) + + +def test_generate_data_batch_with_full_history_bos(): + """Torch generator should align belief states with inputs when BOS is used.""" + hmm = build_hidden_markov_model("zero_one_random", process_params={"p": 0.5}) + batch_size = 3 + sequence_len = 5 + bos_token = 2 + gen_state: jax.Array = hmm.initial_state + states = jnp.repeat(gen_state[None, :], batch_size, axis=0) + key = jax.random.PRNGKey(123) + result = generate_data_batch_with_full_history( + states, + hmm, + batch_size, + sequence_len, + key, + bos_token=bos_token, + ) + belief_states = result["belief_states"] + prefix_probs = result["prefix_probabilities"] + inputs = result["inputs"] + + assert isinstance(belief_states, jax.Array) + assert isinstance(prefix_probs, jax.Array) + assert isinstance(inputs, torch.Tensor) + + # With BOS, inputs has sequence_len positions (BOS + sequence_len-1 tokens) + # belief_states is aligned with inputs + assert inputs.shape == (batch_size, sequence_len) assert belief_states.shape == (batch_size, sequence_len, gen_state.shape[0]) assert prefix_probs.shape == (batch_size, inputs.shape[1]) + # First input should be BOS token + assert torch.all(inputs[:, 0] == bos_token) diff --git a/tests/optimization/test_lr_schedulers.py b/tests/optimization/test_lr_schedulers.py new file mode 100644 index 00000000..dd7f6080 --- /dev/null +++ b/tests/optimization/test_lr_schedulers.py @@ -0,0 +1,224 @@ +"""Tests for custom learning rate schedulers.""" +# pylint: disable=protected-access + +import pytest +import torch +from torch.optim import SGD + +from simplexity.optimization.lr_schedulers import WindowedReduceLROnPlateau + + +@pytest.fixture +def optimizer() -> SGD: + """Create a simple optimizer for testing.""" + model = torch.nn.Linear(10, 1) + return SGD(model.parameters(), lr=0.1) + + +class TestWindowedReduceLROnPlateau: + """Tests for WindowedReduceLROnPlateau scheduler.""" + + def test_window_accumulation(self, optimizer: SGD): + """Test that losses accumulate in the window.""" + scheduler = WindowedReduceLROnPlateau(optimizer, window_size=5, update_every=1) + + for _i in range(3): + scheduler.step(1.0) + + assert len(scheduler._loss_window) == 3 + assert scheduler.get_window_average() is None # Window not full yet + + def test_window_full(self, optimizer: SGD): + """Test window average when window is full.""" + scheduler = WindowedReduceLROnPlateau(optimizer, window_size=5, update_every=1) + + for i in range(5): + scheduler.step(float(i)) # 0, 1, 2, 3, 4 + + assert len(scheduler._loss_window) == 5 + assert scheduler.get_window_average() == 2.0 # (0+1+2+3+4)/5 + + def test_window_sliding(self, optimizer: SGD): + """Test that window slides (old values pushed out).""" + scheduler = WindowedReduceLROnPlateau(optimizer, window_size=3, update_every=1) + + for i in range(5): + scheduler.step(float(i)) # Window should contain [2, 3, 4] + + assert list(scheduler._loss_window) == [2.0, 3.0, 4.0] + assert scheduler.get_window_average() == 3.0 + + def test_update_every_skips_updates(self, optimizer: SGD): + """Test that scheduler only updates every N steps.""" + scheduler = WindowedReduceLROnPlateau(optimizer, window_size=2, update_every=3, patience=0, factor=0.5) + + # Fill window with high loss + scheduler.step(10.0) + scheduler.step(10.0) + initial_lr = optimizer.param_groups[0]["lr"] + + # Step 3 should trigger update (window full, step_count=3) + scheduler.step(10.0) + # But patience=0 means it needs one more "bad" update to reduce + + # Steps 4, 5 - no update + scheduler.step(10.0) + scheduler.step(10.0) + assert optimizer.param_groups[0]["lr"] == initial_lr # No change yet + + # Step 6 should trigger update + scheduler.step(10.0) + # Now we should see LR reduction after patience is exhausted + + def test_lr_reduction_on_plateau(self, optimizer: SGD): + """Test that LR is reduced when loss plateaus.""" + scheduler = WindowedReduceLROnPlateau( + optimizer, + window_size=2, + update_every=1, + patience=2, + factor=0.5, + threshold=0.0, + ) + initial_lr = optimizer.param_groups[0]["lr"] + + # Fill window and trigger updates with constant loss + for _ in range(10): + scheduler.step(1.0) + + # After patience exhausted, LR should be reduced + assert optimizer.param_groups[0]["lr"] < initial_lr + + def test_lr_no_reduction_when_improving(self, optimizer: SGD): + """Test that LR is not reduced when loss is improving.""" + scheduler = WindowedReduceLROnPlateau( + optimizer, + window_size=2, + update_every=1, + patience=2, + factor=0.5, + ) + initial_lr = optimizer.param_groups[0]["lr"] + + # Continuously improving loss + for i in range(10, 0, -1): + scheduler.step(float(i)) + + # LR should not be reduced + assert optimizer.param_groups[0]["lr"] == initial_lr + + def test_state_dict_save_load(self, optimizer: SGD): + """Test that state can be saved and loaded.""" + scheduler = WindowedReduceLROnPlateau(optimizer, window_size=5, update_every=10) + + # Add some state + for i in range(3): + scheduler.step(float(i)) + + state = scheduler.state_dict() + assert state["window_size"] == 5 + assert state["update_every"] == 10 + assert state["loss_window"] == [0.0, 1.0, 2.0] + assert state["step_count"] == 3 + + # Create new scheduler and load state + new_scheduler = WindowedReduceLROnPlateau(optimizer, window_size=1, update_every=1) + new_scheduler.load_state_dict(state) + + assert new_scheduler.window_size == 5 + assert new_scheduler.update_every == 10 + assert list(new_scheduler._loss_window) == [0.0, 1.0, 2.0] + assert new_scheduler._step_count == 3 + + def test_mode_max(self, optimizer: SGD): + """Test scheduler works with mode='max'.""" + scheduler = WindowedReduceLROnPlateau( + optimizer, + window_size=2, + update_every=1, + patience=2, + factor=0.5, + mode="max", + threshold=0.0, + ) + initial_lr = optimizer.param_groups[0]["lr"] + + # Constant low metric (bad for max mode) + for _ in range(10): + scheduler.step(0.1) + + # LR should be reduced + assert optimizer.param_groups[0]["lr"] < initial_lr + + def test_cooldown(self, optimizer: SGD): + """Test that cooldown delays subsequent reductions.""" + scheduler = WindowedReduceLROnPlateau( + optimizer, + window_size=2, + update_every=1, + patience=1, + factor=0.5, + cooldown=10, + threshold=0.0, + ) + initial_lr = optimizer.param_groups[0]["lr"] + + # Trigger first reduction (need patience+1 bad updates after window fills) + for _ in range(5): + scheduler.step(1.0) + + # Should have had one reduction by now + lr_after_first_reduction = optimizer.param_groups[0]["lr"] + assert lr_after_first_reduction < initial_lr + + # Record how many reductions happened with cooldown + reduction_count_with_cooldown = 0 + for _ in range(15): + old_lr = optimizer.param_groups[0]["lr"] + scheduler.step(1.0) + if optimizer.param_groups[0]["lr"] < old_lr: + reduction_count_with_cooldown += 1 + + # Now test without cooldown - should reduce more frequently + optimizer2 = SGD(torch.nn.Linear(10, 1).parameters(), lr=0.1) + scheduler2 = WindowedReduceLROnPlateau( + optimizer2, + window_size=2, + update_every=1, + patience=1, + factor=0.5, + cooldown=0, + threshold=0.0, + ) + + # Same warmup + for _ in range(5): + scheduler2.step(1.0) + + reduction_count_without_cooldown = 0 + for _ in range(15): + old_lr = optimizer2.param_groups[0]["lr"] + scheduler2.step(1.0) + if optimizer2.param_groups[0]["lr"] < old_lr: + reduction_count_without_cooldown += 1 + + # With cooldown, should have fewer reductions + assert reduction_count_with_cooldown < reduction_count_without_cooldown + + def test_min_lr(self, optimizer: SGD): + """Test that LR does not go below min_lr.""" + scheduler = WindowedReduceLROnPlateau( + optimizer, + window_size=2, + update_every=1, + patience=1, + factor=0.1, + min_lr=0.01, + threshold=0.0, + ) + + # Many updates with constant loss to trigger multiple reductions + for _ in range(100): + scheduler.step(1.0) + + assert optimizer.param_groups[0]["lr"] >= 0.01 diff --git a/tests/structured_configs/test_activation_tracker_config.py b/tests/structured_configs/test_activation_tracker_config.py index 05312522..3db4dea0 100644 --- a/tests/structured_configs/test_activation_tracker_config.py +++ b/tests/structured_configs/test_activation_tracker_config.py @@ -4,6 +4,7 @@ import pytest from omegaconf import DictConfig, OmegaConf +from simplexity.exceptions import ConfigValidationError from simplexity.run_management.run_management import ( _instantiate_activation_tracker, _setup_activation_trackers, @@ -14,9 +15,6 @@ validate_activation_analysis_config, validate_activation_tracker_config, ) -from simplexity.structured_configs.base import ( - ConfigValidationError, -) @pytest.fixture @@ -252,7 +250,7 @@ def test_instantiate_activation_tracker_builds_analysis_objects(tracker_cfg: Dic probs = jnp.ones((1, 2), dtype=jnp.float32) * 0.5 activations = {"layer": jnp.ones((1, 2, 4), dtype=jnp.float32)} - scalars, projections = tracker.analyze( + scalars, projections, visualizations = tracker.analyze( inputs=inputs, beliefs=beliefs, probs=probs, @@ -260,3 +258,48 @@ def test_instantiate_activation_tracker_builds_analysis_objects(tracker_cfg: Dic ) assert "pca_custom/layer_cumvar_1" in scalars assert any(key.startswith("linear/") for key in projections) + assert visualizations == {} + + +def test_instantiate_activation_tracker_with_visuals(tracker_cfg: DictConfig, monkeypatch: pytest.MonkeyPatch) -> None: + """Tracker instantiation should preserve visualization configs.""" + + monkeypatch.setattr( + "simplexity.activations.activation_visualizations.build_altair_chart", + lambda plot_cfg, registry, controls=None: {"backend": "altair"}, + ) + monkeypatch.setattr( + "simplexity.activations.activation_visualizations.build_plotly_figure", + lambda plot_cfg, registry, controls=None: {"backend": "plotly"}, + ) + + tracker_cfg.activation_tracker.instance.analyses.pca.visualizations = [ + { + "name": "weights_only", + "data_mapping": { + "mappings": { + "weight": {"source": "weights"}, + } + }, + "layer": { + "geometry": {"type": "point"}, + "aesthetics": {"x": {"field": "weight", "type": "quantitative"}}, + }, + } + ] + + tracker = _instantiate_activation_tracker(tracker_cfg, "activation_tracker.instance") + + inputs = jnp.array([[0, 1]], dtype=jnp.int32) + beliefs = jnp.ones((1, 2, 2), dtype=jnp.float32) * 0.5 + probs = jnp.ones((1, 2), dtype=jnp.float32) * 0.5 + activations = {"layer": jnp.ones((1, 2, 4), dtype=jnp.float32)} + + _, _, visualizations = tracker.analyze( + inputs=inputs, + beliefs=beliefs, + probs=probs, + activations=activations, + ) + + assert "pca_custom/weights_only" in visualizations diff --git a/tests/structured_configs/test_learning_rate_scheduler.py b/tests/structured_configs/test_learning_rate_scheduler.py new file mode 100644 index 00000000..eb99324a --- /dev/null +++ b/tests/structured_configs/test_learning_rate_scheduler.py @@ -0,0 +1,329 @@ +"""Tests for learning rate scheduler configuration validation.""" + +import pytest +from omegaconf import OmegaConf + +from simplexity.exceptions import ConfigValidationError +from simplexity.structured_configs.learning_rate_scheduler import ( + is_lr_scheduler_config, + is_reduce_lr_on_plateau_config, + is_windowed_reduce_lr_on_plateau_config, + validate_lr_scheduler_config, + validate_reduce_lr_on_plateau_instance_config, + validate_windowed_reduce_lr_on_plateau_instance_config, +) + + +class TestIsReduceLROnPlateauConfig: + """Tests for is_reduce_lr_on_plateau_config.""" + + def test_is_reduce_lr_on_plateau_config(self): + """Test that ReduceLROnPlateau target is correctly identified.""" + cfg = OmegaConf.create({"_target_": "torch.optim.lr_scheduler.ReduceLROnPlateau"}) + assert is_reduce_lr_on_plateau_config(cfg) is True + + def test_is_reduce_lr_on_plateau_config_wrong_target(self): + """Test that non-ReduceLROnPlateau target returns False.""" + cfg = OmegaConf.create({"_target_": "torch.optim.lr_scheduler.StepLR"}) + assert is_reduce_lr_on_plateau_config(cfg) is False + + def test_is_reduce_lr_on_plateau_config_no_target(self): + """Test that missing _target_ returns False.""" + cfg = OmegaConf.create({}) + assert is_reduce_lr_on_plateau_config(cfg) is False + + +class TestIsWindowedReduceLROnPlateauConfig: + """Tests for is_windowed_reduce_lr_on_plateau_config.""" + + def test_is_windowed_reduce_lr_on_plateau_config(self): + """Test that WindowedReduceLROnPlateau target is correctly identified.""" + cfg = OmegaConf.create({"_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau"}) + assert is_windowed_reduce_lr_on_plateau_config(cfg) is True + + def test_is_windowed_reduce_lr_on_plateau_config_wrong_target(self): + """Test that non-WindowedReduceLROnPlateau target returns False.""" + cfg = OmegaConf.create({"_target_": "torch.optim.lr_scheduler.ReduceLROnPlateau"}) + assert is_windowed_reduce_lr_on_plateau_config(cfg) is False + + def test_is_windowed_reduce_lr_on_plateau_config_no_target(self): + """Test that missing _target_ returns False.""" + cfg = OmegaConf.create({}) + assert is_windowed_reduce_lr_on_plateau_config(cfg) is False + + +class TestIsLrSchedulerConfig: + """Tests for is_lr_scheduler_config.""" + + def test_is_lr_scheduler_config_reduce_on_plateau(self): + """Test is_lr_scheduler_config with ReduceLROnPlateau target.""" + cfg = OmegaConf.create({"_target_": "torch.optim.lr_scheduler.ReduceLROnPlateau"}) + assert is_lr_scheduler_config(cfg) is True + + def test_is_lr_scheduler_config_windowed(self): + """Test is_lr_scheduler_config with WindowedReduceLROnPlateau target.""" + cfg = OmegaConf.create({"_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau"}) + assert is_lr_scheduler_config(cfg) is True + + def test_is_lr_scheduler_config_other_scheduler(self): + """Test is_lr_scheduler_config with other scheduler target returns False.""" + cfg = OmegaConf.create({"_target_": "torch.optim.lr_scheduler.StepLR"}) + assert is_lr_scheduler_config(cfg) is False + + def test_is_lr_scheduler_config_optimizer(self): + """Test is_lr_scheduler_config with optimizer target returns False.""" + cfg = OmegaConf.create({"_target_": "torch.optim.Adam"}) + assert is_lr_scheduler_config(cfg) is False + + def test_is_lr_scheduler_config_no_target(self): + """Test is_lr_scheduler_config with missing _target_.""" + cfg = OmegaConf.create({}) + assert is_lr_scheduler_config(cfg) is False + + +class TestValidateReduceLROnPlateau: + """Tests for validate_reduce_lr_on_plateau_instance_config.""" + + def test_valid_config(self): + """Test validation passes with valid ReduceLROnPlateau config.""" + cfg = OmegaConf.create( + { + "_target_": "torch.optim.lr_scheduler.ReduceLROnPlateau", + "mode": "min", + "factor": 0.1, + "patience": 10, + "threshold": 1e-4, + "cooldown": 0, + "min_lr": 0.0, + "eps": 1e-8, + } + ) + validate_reduce_lr_on_plateau_instance_config(cfg) + + def test_valid_max_mode(self): + """Test validation passes with mode='max'.""" + cfg = OmegaConf.create( + { + "_target_": "torch.optim.lr_scheduler.ReduceLROnPlateau", + "mode": "max", + } + ) + validate_reduce_lr_on_plateau_instance_config(cfg) + + def test_invalid_mode(self): + """Test validation fails with invalid mode.""" + cfg = OmegaConf.create( + { + "_target_": "torch.optim.lr_scheduler.ReduceLROnPlateau", + "mode": "invalid", + } + ) + with pytest.raises(ConfigValidationError, match="mode must be 'min' or 'max'"): + validate_reduce_lr_on_plateau_instance_config(cfg) + + def test_invalid_factor(self): + """Test validation fails with zero factor.""" + cfg = OmegaConf.create( + { + "_target_": "torch.optim.lr_scheduler.ReduceLROnPlateau", + "factor": 0.0, + } + ) + with pytest.raises(ConfigValidationError): + validate_reduce_lr_on_plateau_instance_config(cfg) + + def test_invalid_patience(self): + """Test validation fails with negative patience.""" + cfg = OmegaConf.create( + { + "_target_": "torch.optim.lr_scheduler.ReduceLROnPlateau", + "patience": -1, + } + ) + with pytest.raises(ConfigValidationError): + validate_reduce_lr_on_plateau_instance_config(cfg) + + def test_invalid_cooldown(self): + """Test validation fails with negative cooldown.""" + cfg = OmegaConf.create( + { + "_target_": "torch.optim.lr_scheduler.ReduceLROnPlateau", + "cooldown": -5, + } + ) + with pytest.raises(ConfigValidationError): + validate_reduce_lr_on_plateau_instance_config(cfg) + + +class TestValidateWindowedReduceLROnPlateau: + """Tests for validate_windowed_reduce_lr_on_plateau_instance_config.""" + + def test_valid_config(self): + """Test validation passes with valid WindowedReduceLROnPlateau config.""" + cfg = OmegaConf.create( + { + "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "window_size": 10, + "update_every": 100, + "mode": "min", + "factor": 0.1, + "patience": 10, + "threshold": 1e-4, + "cooldown": 0, + "min_lr": 0.0, + "eps": 1e-8, + } + ) + validate_windowed_reduce_lr_on_plateau_instance_config(cfg) + + def test_valid_max_mode(self): + """Test validation passes with mode='max'.""" + cfg = OmegaConf.create( + { + "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "mode": "max", + } + ) + validate_windowed_reduce_lr_on_plateau_instance_config(cfg) + + def test_invalid_mode(self): + """Test validation fails with invalid mode.""" + cfg = OmegaConf.create( + { + "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "mode": "invalid", + } + ) + with pytest.raises(ConfigValidationError, match="mode must be 'min' or 'max'"): + validate_windowed_reduce_lr_on_plateau_instance_config(cfg) + + def test_invalid_window_size(self): + """Test validation fails with zero window_size.""" + cfg = OmegaConf.create( + { + "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "window_size": 0, + } + ) + with pytest.raises(ConfigValidationError): + validate_windowed_reduce_lr_on_plateau_instance_config(cfg) + + def test_invalid_update_every(self): + """Test validation fails with zero update_every.""" + cfg = OmegaConf.create( + { + "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "update_every": 0, + } + ) + with pytest.raises(ConfigValidationError): + validate_windowed_reduce_lr_on_plateau_instance_config(cfg) + + def test_invalid_factor(self): + """Test validation fails with zero factor.""" + cfg = OmegaConf.create( + { + "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "factor": 0.0, + } + ) + with pytest.raises(ConfigValidationError): + validate_windowed_reduce_lr_on_plateau_instance_config(cfg) + + def test_invalid_patience(self): + """Test validation fails with negative patience.""" + cfg = OmegaConf.create( + { + "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "patience": -1, + } + ) + with pytest.raises(ConfigValidationError): + validate_windowed_reduce_lr_on_plateau_instance_config(cfg) + + def test_invalid_cooldown(self): + """Test validation fails with negative cooldown.""" + cfg = OmegaConf.create( + { + "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "cooldown": -5, + } + ) + with pytest.raises(ConfigValidationError): + validate_windowed_reduce_lr_on_plateau_instance_config(cfg) + + +class TestValidateLrSchedulerConfig: + """Tests for validate_lr_scheduler_config.""" + + def test_valid_reduce_lr_on_plateau(self): + """Test validation passes with valid ReduceLROnPlateau config.""" + cfg = OmegaConf.create( + { + "instance": { + "_target_": "torch.optim.lr_scheduler.ReduceLROnPlateau", + "patience": 5, + }, + } + ) + validate_lr_scheduler_config(cfg) + + def test_valid_windowed_reduce_lr_on_plateau(self): + """Test validation passes with valid WindowedReduceLROnPlateau config.""" + cfg = OmegaConf.create( + { + "instance": { + "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "window_size": 10, + "update_every": 100, + "patience": 5, + }, + } + ) + validate_lr_scheduler_config(cfg) + + def test_valid_with_name(self): + """Test validation passes with optional name field.""" + cfg = OmegaConf.create( + { + "instance": { + "_target_": "torch.optim.lr_scheduler.ReduceLROnPlateau", + }, + "name": "my_scheduler", + } + ) + validate_lr_scheduler_config(cfg) + + def test_invalid_instance_not_dict(self): + """Test validation fails when instance is not a DictConfig.""" + cfg = OmegaConf.create( + { + "instance": "not_a_dict", + } + ) + with pytest.raises(ConfigValidationError, match="instance must be a DictConfig"): + validate_lr_scheduler_config(cfg) + + def test_invalid_not_plateau_scheduler(self): + """Test validation fails when target is not a plateau scheduler.""" + cfg = OmegaConf.create( + { + "instance": { + "_target_": "torch.optim.lr_scheduler.StepLR", + }, + } + ) + with pytest.raises(ConfigValidationError, match="must be ReduceLROnPlateau or WindowedReduceLROnPlateau"): + validate_lr_scheduler_config(cfg) + + def test_invalid_optimizer_target(self): + """Test validation fails when target is an optimizer.""" + cfg = OmegaConf.create( + { + "instance": { + "_target_": "torch.optim.Adam", + }, + } + ) + with pytest.raises(ConfigValidationError, match="must be ReduceLROnPlateau or WindowedReduceLROnPlateau"): + validate_lr_scheduler_config(cfg) diff --git a/tests/utils/test_analysis_utils.py b/tests/utils/test_analysis_utils.py index 0320c307..ebb9d2bb 100644 --- a/tests/utils/test_analysis_utils.py +++ b/tests/utils/test_analysis_utils.py @@ -328,7 +328,6 @@ def test_basic_functionality(self, simple_inputs, simple_beliefs, simple_probs, assert jnp.allclose(jnp.sum(dataset.probs), 1.0) # Check shapes are consistent - assert isinstance(dataset.beliefs, jax.Array) n_prefixes = dataset.beliefs.shape[0] assert dataset.probs.shape[0] == n_prefixes diff --git a/tests/visualization/test_altair_renderer.py b/tests/visualization/test_altair_renderer.py new file mode 100644 index 00000000..6c9bae29 --- /dev/null +++ b/tests/visualization/test_altair_renderer.py @@ -0,0 +1,330 @@ +"""Tests for altair renderer.""" + +import pandas as pd +import pytest + +from simplexity.exceptions import ConfigValidationError +from simplexity.visualization.altair_renderer import ( + _apply_geometry, + _build_layer_chart, + _encode_aesthetics, + build_altair_chart, +) +from simplexity.visualization.data_registry import DictDataRegistry +from simplexity.visualization.structured_configs import ( + AestheticsConfig, + ChannelAestheticsConfig, + DataConfig, + FacetConfig, + GeometryConfig, + LayerConfig, + PlotConfig, + PlotLevelGuideConfig, + PlotSizeConfig, +) + +try: + import altair as alt +except ImportError: + pytest.skip("Altair not installed", allow_module_level=True) + + +class TestBuildAltairChart: + """Tests for build_altair_chart function.""" + + def test_raises_when_no_layers(self): + """Test that empty layers raises error.""" + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[]) + registry = DictDataRegistry({"main": pd.DataFrame()}) + with pytest.raises(ConfigValidationError, match="at least one layer"): + build_altair_chart(plot_cfg, registry) + + def test_builds_simple_point_chart(self): + """Test building a simple point chart.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) + registry = DictDataRegistry({"main": df}) + chart = build_altair_chart(plot_cfg, registry) + assert chart is not None + + def test_builds_line_chart(self): + """Test building a line chart.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="line"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) + registry = DictDataRegistry({"main": df}) + chart = build_altair_chart(plot_cfg, registry) + assert chart is not None + + def test_builds_bar_chart(self): + """Test building a bar chart.""" + df = pd.DataFrame({"category": ["a", "b", "c"], "value": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="bar"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="category", type="nominal"), + y=ChannelAestheticsConfig(field="value", type="quantitative"), + ), + ) + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) + registry = DictDataRegistry({"main": df}) + chart = build_altair_chart(plot_cfg, registry) + assert chart is not None + + def test_applies_color_encoding(self): + """Test that color encoding is applied.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "cat": ["a", "b", "a"]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + color=ChannelAestheticsConfig(field="cat", type="nominal"), + ), + ) + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) + registry = DictDataRegistry({"main": df}) + chart = build_altair_chart(plot_cfg, registry) + assert chart is not None + + def test_applies_size(self): + """Test that chart size is applied.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + size = PlotSizeConfig(width=800, height=600) + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], size=size) + registry = DictDataRegistry({"main": df}) + chart = build_altair_chart(plot_cfg, registry) + assert chart is not None + + def test_applies_guides(self): + """Test that plot guides are applied.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + guides = PlotLevelGuideConfig(title="My Chart") + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], guides=guides) + registry = DictDataRegistry({"main": df}) + chart = build_altair_chart(plot_cfg, registry) + assert chart is not None + + def test_multiple_layers(self): + """Test building chart with multiple layers.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer1 = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + layer2 = LayerConfig( + geometry=GeometryConfig(type="line"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer1, layer2]) + registry = DictDataRegistry({"main": df}) + chart = build_altair_chart(plot_cfg, registry) + assert chart is not None + + +class TestApplyGeometry: + """Tests for _apply_geometry function.""" + + def test_point_geometry(self): + """Test point geometry application.""" + chart = alt.Chart(pd.DataFrame({"x": [1]})) + geometry = GeometryConfig(type="point") + result = _apply_geometry(chart, geometry) + assert result is not None + + def test_line_geometry(self): + """Test line geometry application.""" + chart = alt.Chart(pd.DataFrame({"x": [1]})) + geometry = GeometryConfig(type="line") + result = _apply_geometry(chart, geometry) + assert result is not None + + def test_bar_geometry(self): + """Test bar geometry application.""" + chart = alt.Chart(pd.DataFrame({"x": [1]})) + geometry = GeometryConfig(type="bar") + result = _apply_geometry(chart, geometry) + assert result is not None + + def test_area_geometry(self): + """Test area geometry application.""" + chart = alt.Chart(pd.DataFrame({"x": [1]})) + geometry = GeometryConfig(type="area") + result = _apply_geometry(chart, geometry) + assert result is not None + + def test_invalid_geometry_raises(self): + """Test that invalid geometry type raises error.""" + chart = alt.Chart(pd.DataFrame({"x": [1]})) + geometry = GeometryConfig(type="invalid_type") + with pytest.raises(ConfigValidationError, match="does not support geometry"): + _apply_geometry(chart, geometry) + + +class TestEncodeAesthetics: + """Tests for _encode_aesthetics function.""" + + def test_basic_x_y_encoding(self): + """Test basic x and y encoding.""" + aes = AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ) + encoding = _encode_aesthetics(aes) + assert "x" in encoding + assert "y" in encoding + + def test_color_encoding(self): + """Test color encoding.""" + aes = AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + color=ChannelAestheticsConfig(field="cat", type="nominal"), + ) + encoding = _encode_aesthetics(aes) + assert "color" in encoding + + def test_size_encoding(self): + """Test size encoding.""" + aes = AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + size=ChannelAestheticsConfig(field="size", type="quantitative"), + ) + encoding = _encode_aesthetics(aes) + assert "size" in encoding + + def test_opacity_encoding(self): + """Test opacity encoding.""" + aes = AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + opacity=ChannelAestheticsConfig(field="opacity", type="quantitative"), + ) + encoding = _encode_aesthetics(aes) + assert "opacity" in encoding + + def test_empty_aesthetics_returns_empty(self): + """Test that empty aesthetics returns empty dict.""" + aes = AestheticsConfig() + encoding = _encode_aesthetics(aes) + assert not encoding + + +class TestFaceting: + """Tests for faceted charts.""" + + def test_column_facet(self): + """Test column faceting.""" + df = pd.DataFrame({"x": [1, 2, 3, 4], "y": [4, 5, 6, 7], "group": ["a", "a", "b", "b"]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + facet = FacetConfig(column="group") + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) + registry = DictDataRegistry({"main": df}) + chart = build_altair_chart(plot_cfg, registry) + assert chart is not None + + def test_row_facet(self): + """Test row faceting.""" + df = pd.DataFrame({"x": [1, 2, 3, 4], "y": [4, 5, 6, 7], "group": ["a", "a", "b", "b"]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + facet = FacetConfig(row="group") + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) + registry = DictDataRegistry({"main": df}) + chart = build_altair_chart(plot_cfg, registry) + assert chart is not None + + def test_row_and_column_facet(self): + """Test both row and column faceting.""" + df = pd.DataFrame( + { + "x": [1, 2, 3, 4, 5, 6, 7, 8], + "y": [4, 5, 6, 7, 8, 9, 10, 11], + "row_group": ["a", "a", "a", "a", "b", "b", "b", "b"], + "col_group": ["x", "x", "y", "y", "x", "x", "y", "y"], + } + ) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + facet = FacetConfig(row="row_group", column="col_group") + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) + registry = DictDataRegistry({"main": df}) + chart = build_altair_chart(plot_cfg, registry) + assert chart is not None + + +class TestBuildLayerChart: + """Tests for _build_layer_chart function.""" + + def test_builds_chart_from_layer(self): + """Test building a chart from layer config.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + chart = _build_layer_chart(layer, df) + assert chart is not None + + def test_applies_geometry_props(self): + """Test that geometry props are applied.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point", props={"size": 100}), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + chart = _build_layer_chart(layer, df) + assert chart is not None diff --git a/tests/visualization/test_data_pipeline.py b/tests/visualization/test_data_pipeline.py new file mode 100644 index 00000000..08c50730 --- /dev/null +++ b/tests/visualization/test_data_pipeline.py @@ -0,0 +1,317 @@ +"""Tests for data pipeline transforms and materialization.""" + +import pandas as pd +import pytest + +from simplexity.exceptions import ConfigValidationError +from simplexity.visualization.data_pipeline import ( + _apply_transform, + _derive_fold_names, + _parse_function_expr, + apply_filters, + apply_transforms, + build_plot_level_dataframe, + materialize_data, + normalize_expression, + resolve_layer_dataframe, +) +from simplexity.visualization.data_registry import DictDataRegistry +from simplexity.visualization.structured_configs import ( + DataConfig, + LayerConfig, + TransformConfig, +) + + +class TestNormalizeExpression: + """Tests for normalize_expression.""" + + def test_removes_datum_prefix(self): + """Test that datum. prefix is removed.""" + assert normalize_expression("datum.x > 5") == "x > 5" + + def test_strips_whitespace(self): + """Test that whitespace is stripped.""" + assert normalize_expression(" x > 5 ") == "x > 5" + + +class TestApplyFilters: + """Tests for apply_filters.""" + + def test_single_filter(self): + """Test applying a single filter.""" + df = pd.DataFrame({"x": [1, 2, 3, 4, 5], "y": [10, 20, 30, 40, 50]}) + result = apply_filters(df, ["x > 2"]) + assert list(result["x"]) == [3, 4, 5] + + def test_multiple_filters(self): + """Test applying multiple filters.""" + df = pd.DataFrame({"x": [1, 2, 3, 4, 5], "y": [10, 20, 30, 40, 50]}) + result = apply_filters(df, ["x > 2", "y < 50"]) + assert list(result["x"]) == [3, 4] + + def test_filter_with_datum_prefix(self): + """Test that datum. prefix is normalized.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + result = apply_filters(df, ["datum.x > 1"]) + assert list(result["x"]) == [2, 3] + + +class TestMaterializeData: + """Tests for materialize_data.""" + + def test_basic_materialization(self): + """Test basic data materialization.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + registry = DictDataRegistry({"main": df}) + data_cfg = DataConfig(source="main") + result = materialize_data(data_cfg, registry) + assert list(result.columns) == ["x", "y"] + assert len(result) == 3 + + def test_with_filters(self): + """Test materialization with filters.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + registry = DictDataRegistry({"main": df}) + data_cfg = DataConfig(source="main", filters=["x > 1"]) + result = materialize_data(data_cfg, registry) + assert len(result) == 2 + + def test_with_column_selection(self): + """Test materialization with column selection.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "z": [7, 8, 9]}) + registry = DictDataRegistry({"main": df}) + data_cfg = DataConfig(source="main", columns=["x", "z"]) + result = materialize_data(data_cfg, registry) + assert list(result.columns) == ["x", "z"] + + def test_missing_column_raises(self): + """Test that missing columns raise an error.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + registry = DictDataRegistry({"main": df}) + data_cfg = DataConfig(source="main", columns=["x", "missing"]) + with pytest.raises(ConfigValidationError, match="not present"): + materialize_data(data_cfg, registry) + + +class TestBuildPlotLevelDataframe: + """Tests for build_plot_level_dataframe.""" + + def test_with_transforms(self): + """Test building dataframe with transforms.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + registry = DictDataRegistry({"main": df}) + data_cfg = DataConfig(source="main") + transforms = [TransformConfig(op="calculate", expr="x * 2", as_field="x2")] + result = build_plot_level_dataframe(data_cfg, transforms, registry) + assert "x2" in result.columns + assert list(result["x2"]) == [2, 4, 6] + + def test_without_transforms(self): + """Test building dataframe without transforms.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + registry = DictDataRegistry({"main": df}) + data_cfg = DataConfig(source="main") + result = build_plot_level_dataframe(data_cfg, [], registry) + assert list(result.columns) == ["x", "y"] + assert len(result) == 3 + + +class TestResolveLayerDataframe: + """Tests for resolve_layer_dataframe.""" + + def test_uses_plot_df_when_no_layer_data(self): + """Test that plot dataframe is used when layer has no data config.""" + plot_df = pd.DataFrame({"x": [1, 2, 3]}) + layer = LayerConfig() + result = resolve_layer_dataframe(layer, plot_df, {}) + assert list(result["x"]) == [1, 2, 3] + + def test_uses_layer_data_when_specified(self): + """Test that layer data config is used when specified.""" + plot_df = pd.DataFrame({"x": [1, 2, 3]}) + layer_df = pd.DataFrame({"y": [4, 5, 6]}) + registry = DictDataRegistry({"layer_data": layer_df}) + layer = LayerConfig(data=DataConfig(source="layer_data")) + result = resolve_layer_dataframe(layer, plot_df, registry) + assert "y" in result.columns + assert "x" not in result.columns + + def test_applies_layer_transforms(self): + """Test that layer transforms are applied.""" + plot_df = pd.DataFrame({"x": [1, 2, 3]}) + layer = LayerConfig(transforms=[TransformConfig(op="filter", filter="x > 1")]) + result = resolve_layer_dataframe(layer, plot_df, {}) + assert len(result) == 2 + + +class TestApplyTransform: + """Tests for individual transform operations.""" + + def test_filter_transform(self): + """Test filter transform.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + transform = TransformConfig(op="filter", filter="x > 1") + result = _apply_transform(df, transform) + assert len(result) == 2 + + def test_filter_requires_expression(self): + """Test that filter transform requires filter expression.""" + with pytest.raises(ConfigValidationError, match="filter"): + TransformConfig(op="filter") + + def test_calculate_transform(self): + """Test calculate transform.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + transform = TransformConfig(op="calculate", expr="x * 2", as_field="x2") + result = _apply_transform(df, transform) + assert list(result["x2"]) == [2, 4, 6] + + def test_calculate_requires_as_field(self): + """Test that calculate transform requires as_field.""" + with pytest.raises(ConfigValidationError, match="as_field"): + TransformConfig(op="calculate", expr="x * 2") + + def test_aggregate_transform(self): + """Test aggregate transform.""" + df = pd.DataFrame({"group": ["a", "a", "b"], "value": [1, 2, 3]}) + transform = TransformConfig(op="aggregate", groupby=["group"], aggregations={"total": "sum(value)"}) + result = _apply_transform(df, transform) + assert len(result) == 2 + assert "total" in result.columns + + def test_aggregate_requires_groupby_and_aggregations(self): + """Test that aggregate transform requires groupby and aggregations.""" + with pytest.raises(ConfigValidationError, match="groupby"): + TransformConfig(op="aggregate") + + def test_bin_transform(self): + """Test bin transform.""" + df = pd.DataFrame({"x": [1, 5, 10, 15, 20]}) + transform = TransformConfig(op="bin", field="x", binned_as="x_bin", maxbins=5) + result = _apply_transform(df, transform) + assert "x_bin" in result.columns + + def test_bin_requires_field_and_binned_as(self): + """Test that bin transform requires field and binned_as.""" + with pytest.raises(ConfigValidationError, match="field"): + TransformConfig(op="bin") + + def test_window_transform_rank(self): + """Test window transform with rank function.""" + df = pd.DataFrame({"x": [3, 1, 2]}) + transform = TransformConfig(op="window", window={"x_rank": "rank(x)"}) + result = _apply_transform(df, transform) + assert "x_rank" in result.columns + + def test_window_transform_cumsum(self): + """Test window transform with cumsum function.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + transform = TransformConfig(op="window", window={"x_cumsum": "cumsum(x)"}) + result = _apply_transform(df, transform) + assert list(result["x_cumsum"]) == [1, 3, 6] + + def test_window_unsupported_function(self): + """Test that unsupported window function raises error.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + transform = TransformConfig(op="window", window={"x_bad": "unsupported(x)"}) + with pytest.raises(ConfigValidationError, match="not supported"): + _apply_transform(df, transform) + + def test_window_requires_window_mapping(self): + """Test that window transform requires window mapping.""" + with pytest.raises(ConfigValidationError, match="window"): + TransformConfig(op="window") + + def test_fold_transform(self): + """Test fold transform.""" + df = pd.DataFrame({"a": [1, 2], "b": [3, 4], "c": [5, 6]}) + transform = TransformConfig(op="fold", fold_fields=["a", "b"]) + result = _apply_transform(df, transform) + assert "key" in result.columns + assert "value" in result.columns + assert len(result) == 4 + + def test_fold_requires_fold_fields(self): + """Test that fold transform requires fold_fields.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + transform = TransformConfig(op="fold") + with pytest.raises(ConfigValidationError, match="fold_fields"): + _apply_transform(df, transform) + + def test_pivot_not_implemented(self): + """Test that pivot transform is not implemented.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + transform = TransformConfig(op="pivot") + with pytest.raises(ConfigValidationError, match="not implemented"): + _apply_transform(df, transform) + + def test_unsupported_op_raises(self): + """Test that unsupported operation raises error.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + transform = TransformConfig(op="unknown") + with pytest.raises(ConfigValidationError, match="Unsupported"): + _apply_transform(df, transform) + + +class TestApplyTransforms: + """Tests for apply_transforms.""" + + def test_applies_multiple_transforms(self): + """Test that multiple transforms are applied sequentially.""" + df = pd.DataFrame({"x": [1, 2, 3, 4, 5]}) + transforms = [ + TransformConfig(op="filter", filter="x > 2"), + TransformConfig(op="calculate", expr="x * 10", as_field="x10"), + ] + result = apply_transforms(df, transforms) + assert len(result) == 3 + assert list(result["x10"]) == [30, 40, 50] + + def test_empty_transforms_returns_original(self): + """Test that empty transforms list returns original dataframe.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + result = apply_transforms(df, []) + assert list(result["x"]) == [1, 2, 3] + + +class TestParseFunctionExpr: + """Tests for _parse_function_expr.""" + + def test_parses_valid_expression(self): + """Test parsing a valid function expression.""" + func, field = _parse_function_expr("sum(value)", expected_arg=True) + assert func == "sum" + assert field == "value" + + def test_invalid_expression_raises(self): + """Test that invalid expression raises error.""" + with pytest.raises(ConfigValidationError, match="must be of the form"): + _parse_function_expr("invalid", expected_arg=True) + + def test_missing_arg_when_expected_raises(self): + """Test that missing argument raises error when expected.""" + with pytest.raises(ConfigValidationError, match="must supply an argument"): + _parse_function_expr("func()", expected_arg=True) + + +class TestDeriveFoldNames: + """Tests for _derive_fold_names.""" + + def test_default_names(self): + """Test default names when as_fields is None.""" + var_name, value_name = _derive_fold_names(None) + assert var_name == "key" + assert value_name == "value" + + def test_single_as_field(self): + """Test with single as_field.""" + var_name, value_name = _derive_fold_names(["custom_key"]) + assert var_name == "custom_key" + assert value_name == "value" + + def test_two_as_fields(self): + """Test with two as_fields.""" + var_name, value_name = _derive_fold_names(["custom_key", "custom_value"]) + assert var_name == "custom_key" + assert value_name == "custom_value" diff --git a/tests/visualization/test_history.py b/tests/visualization/test_history.py new file mode 100644 index 00000000..a3455982 --- /dev/null +++ b/tests/visualization/test_history.py @@ -0,0 +1,155 @@ +"""Tests for visualization history persistence utilities.""" + +from __future__ import annotations + +import copy + +import pandas as pd + +from simplexity.visualization.history import ( + history_paths, + load_history_dataframe, + plot_config_signature, + save_history_dataframe, +) +from simplexity.visualization.structured_configs import ( + AestheticsConfig, + ChannelAestheticsConfig, + DataConfig, + GeometryConfig, + LayerConfig, + PlotConfig, + PlotLevelGuideConfig, + PlotSizeConfig, +) + + +def _simple_plot_config() -> PlotConfig: + layer = LayerConfig( + geometry=GeometryConfig(type="line"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="step", type="quantitative"), + y=ChannelAestheticsConfig(field="value", type="quantitative"), + ), + ) + return PlotConfig( + backend="altair", + data=DataConfig(source="main"), + layers=[layer], + size=PlotSizeConfig(width=400, height=200), + guides=PlotLevelGuideConfig(), + ) + + +def test_plot_config_signature_changes_with_config_mutation(): + """Test that plot config signature changes when config is mutated.""" + cfg = _simple_plot_config() + clone = copy.deepcopy(cfg) + clone.size.width = 800 + + assert plot_config_signature(cfg) != plot_config_signature(clone) + + +def test_history_round_trip(tmp_path): + """Test saving and loading history dataframe preserves data.""" + cfg = _simple_plot_config() + signature = plot_config_signature(cfg) + data_path, meta_path = history_paths(tmp_path, "demo") + df = pd.DataFrame({"step": [0, 1], "value": [0.1, 0.2]}) + + save_history_dataframe( + df, + data_path, + meta_path, + signature=signature, + analysis="analysis", + name="viz", + backend="altair", + ) + + loaded = load_history_dataframe(data_path, meta_path, expected_signature=signature) + pd.testing.assert_frame_equal(loaded, df) + + +def test_load_returns_empty_when_files_missing(tmp_path): + """Test that missing files return empty dataframe.""" + data_path, meta_path = history_paths(tmp_path, "nonexistent") + loaded = load_history_dataframe(data_path, meta_path, expected_signature="any") + assert loaded.empty + + +def test_load_returns_empty_when_metadata_corrupted(tmp_path): + """Test that corrupted metadata returns empty dataframe.""" + data_path, meta_path = history_paths(tmp_path, "corrupted") + data_path.parent.mkdir(parents=True, exist_ok=True) + data_path.write_text('{"step": 0, "value": 0.1}\n') + meta_path.write_text("not valid json {{{") + loaded = load_history_dataframe(data_path, meta_path, expected_signature="any") + assert loaded.empty + + +def test_load_returns_empty_when_signature_mismatched(tmp_path): + """Test that mismatched signature returns empty dataframe.""" + cfg = _simple_plot_config() + signature = plot_config_signature(cfg) + data_path, meta_path = history_paths(tmp_path, "mismatched") + df = pd.DataFrame({"step": [0], "value": [0.1]}) + + save_history_dataframe( + df, + data_path, + meta_path, + signature=signature, + analysis="analysis", + name="viz", + backend="altair", + ) + + loaded = load_history_dataframe(data_path, meta_path, expected_signature="different_signature") + assert loaded.empty + + +def test_load_returns_empty_when_data_corrupted(tmp_path): + """Test that corrupted data file returns empty dataframe.""" + cfg = _simple_plot_config() + signature = plot_config_signature(cfg) + data_path, meta_path = history_paths(tmp_path, "data_corrupted") + df = pd.DataFrame({"step": [0], "value": [0.1]}) + + save_history_dataframe( + df, + data_path, + meta_path, + signature=signature, + analysis="analysis", + name="viz", + backend="altair", + ) + + # Corrupt the data file + data_path.write_text("not valid jsonl {{{") + + loaded = load_history_dataframe(data_path, meta_path, expected_signature=signature) + assert loaded.empty + + +def test_plot_config_signature_handles_path_values(): + """Test that plot config signature can serialize Path objects.""" + layer = LayerConfig( + geometry=GeometryConfig(type="line"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="step", type="quantitative"), + y=ChannelAestheticsConfig(field="value", type="quantitative"), + ), + ) + cfg = PlotConfig( + backend="altair", + data=DataConfig(source="main"), + layers=[layer], + size=PlotSizeConfig(width=400, height=200), + guides=PlotLevelGuideConfig(), + ) + # This should not raise even with complex nested objects + sig = plot_config_signature(cfg) + assert isinstance(sig, str) + assert len(sig) == 64 # SHA256 hex length diff --git a/tests/visualization/test_plotly_renderer.py b/tests/visualization/test_plotly_renderer.py new file mode 100644 index 00000000..f427af61 --- /dev/null +++ b/tests/visualization/test_plotly_renderer.py @@ -0,0 +1,411 @@ +"""Tests for plotly renderer.""" + +import pandas as pd +import pytest + +from simplexity.exceptions import ConfigValidationError +from simplexity.visualization.data_registry import DictDataRegistry +from simplexity.visualization.plotly_renderer import ( + _axis_title, + _build_scatter2d, + _build_scatter3d, + _require_field, + _resolve_layer_dropdown, + _resolve_slider_control, + build_plotly_figure, +) +from simplexity.visualization.structured_configs import ( + AestheticsConfig, + ChannelAestheticsConfig, + DataConfig, + FacetConfig, + GeometryConfig, + LayerConfig, + PlotConfig, + PlotLevelGuideConfig, + PlotSizeConfig, +) + + +class TestHelperFunctions: + """Tests for helper functions.""" + + def test_axis_title_from_config(self): + """Test axis title extraction from config.""" + config = ChannelAestheticsConfig(field="x", type="quantitative", title="X Axis") + assert _axis_title(config) == "X Axis" + + def test_axis_title_none_when_no_config(self): + """Test axis title is None when no config.""" + assert _axis_title(None) is None + + def test_axis_title_uses_field_when_no_title(self): + """Test axis title falls back to field name when no title.""" + config = ChannelAestheticsConfig(field="x", type="quantitative") + assert _axis_title(config) == "x" + + def test_require_field_extracts_field(self): + """Test that require_field extracts the field name.""" + config = ChannelAestheticsConfig(field="my_field", type="quantitative") + assert _require_field(config, "x") == "my_field" + + def test_require_field_raises_when_none(self): + """Test that require_field raises when config is None.""" + with pytest.raises(ConfigValidationError, match="requires"): + _require_field(None, "x") + + def test_require_field_raises_when_no_field(self): + """Test that require_field raises when field is None.""" + config = ChannelAestheticsConfig(field=None, type="quantitative") + with pytest.raises(ConfigValidationError, match="requires"): + _require_field(config, "x") + + +class TestResolveControls: + """Tests for control resolution functions.""" + + def test_resolve_slider_control_none_when_no_controls(self): + """Test slider returns None when no controls.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + result = _resolve_slider_control(df, None) + assert result is None + + def test_resolve_layer_dropdown_none_when_no_controls(self): + """Test layer dropdown returns None when no controls.""" + df = pd.DataFrame({"x": [1, 2, 3]}) + result = _resolve_layer_dropdown(df, None) + assert result is None + + +class TestBuildScatter2D: + """Tests for 2D scatter plot building.""" + + def test_basic_scatter2d(self): + """Test basic 2D scatter plot building.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + fig = _build_scatter2d(layer, df, None) + assert fig is not None + assert len(fig.data) > 0 + + def test_scatter2d_with_color(self): + """Test 2D scatter with color encoding.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "category": ["a", "b", "a"]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + color=ChannelAestheticsConfig(field="category", type="nominal"), + ), + ) + fig = _build_scatter2d(layer, df, None) + assert fig is not None + + +class TestBuildScatter3D: + """Tests for 3D scatter plot building.""" + + def test_basic_scatter3d(self): + """Test basic 3D scatter plot building.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "z": [7, 8, 9]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + z=ChannelAestheticsConfig(field="z", type="quantitative"), + ), + ) + fig = _build_scatter3d(layer, df, None) + assert fig is not None + assert len(fig.data) > 0 + + def test_scatter3d_with_color(self): + """Test 3D scatter with color encoding.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "z": [7, 8, 9], "cat": ["a", "b", "a"]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + z=ChannelAestheticsConfig(field="z", type="quantitative"), + color=ChannelAestheticsConfig(field="cat", type="nominal"), + ), + ) + fig = _build_scatter3d(layer, df, None) + assert fig is not None + + +class TestBuildPlotlyFigure: + """Tests for the main build_plotly_figure function.""" + + def test_raises_when_no_layers(self): + """Test that empty layers raises error.""" + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[]) + registry = DictDataRegistry({"main": pd.DataFrame()}) + with pytest.raises(ConfigValidationError, match="at least one layer"): + build_plotly_figure(plot_cfg, registry) + + def test_raises_when_multiple_layers(self): + """Test that multiple layers raises error (currently unsupported).""" + layer1 = LayerConfig(geometry=GeometryConfig(type="point")) + layer2 = LayerConfig(geometry=GeometryConfig(type="point")) + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer1, layer2]) + registry = DictDataRegistry({"main": pd.DataFrame({"x": [1], "y": [2]})}) + with pytest.raises(ConfigValidationError, match="exactly one layer"): + build_plotly_figure(plot_cfg, registry) + + def test_raises_when_non_point_geometry(self): + """Test that non-point geometry raises error.""" + layer = LayerConfig(geometry=GeometryConfig(type="line")) + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) + registry = DictDataRegistry({"main": pd.DataFrame({"x": [1], "y": [2]})}) + with pytest.raises(ConfigValidationError, match="point geometry"): + build_plotly_figure(plot_cfg, registry) + + def test_builds_2d_figure(self): + """Test building a basic 2D figure.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) + registry = DictDataRegistry({"main": df}) + fig = build_plotly_figure(plot_cfg, registry) + assert fig is not None + + def test_builds_3d_figure(self): + """Test building a basic 3D figure.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "z": [7, 8, 9]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + z=ChannelAestheticsConfig(field="z", type="quantitative"), + ), + ) + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) + registry = DictDataRegistry({"main": df}) + fig = build_plotly_figure(plot_cfg, registry) + assert fig is not None + + def test_applies_guides(self): + """Test that plot guides are applied.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + guides = PlotLevelGuideConfig(title="My Plot", subtitle="My Subtitle") + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], guides=guides) + registry = DictDataRegistry({"main": df}) + fig = build_plotly_figure(plot_cfg, registry) + layout = fig.to_dict()["layout"] + assert "My Plot" in layout["title"]["text"] + + def test_applies_size(self): + """Test that plot size is applied.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + size = PlotSizeConfig(width=800, height=600) + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], size=size) + registry = DictDataRegistry({"main": df}) + fig = build_plotly_figure(plot_cfg, registry) + layout = fig.to_dict()["layout"] + assert layout["width"] == 800 + assert layout["height"] == 600 + + +class TestFacetedFigures: + """Tests for faceted figure building.""" + + def test_builds_column_faceted_figure(self): + """Test building a column-faceted 2D figure.""" + df = pd.DataFrame({"x": [1, 2, 3, 4], "y": [4, 5, 6, 7], "group": ["a", "a", "b", "b"]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + facet = FacetConfig(column="group") + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) + registry = DictDataRegistry({"main": df}) + fig = build_plotly_figure(plot_cfg, registry) + assert fig is not None + + def test_builds_row_faceted_figure(self): + """Test building a row-faceted 2D figure.""" + df = pd.DataFrame({"x": [1, 2, 3, 4], "y": [4, 5, 6, 7], "group": ["a", "a", "b", "b"]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + facet = FacetConfig(row="group") + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) + registry = DictDataRegistry({"main": df}) + fig = build_plotly_figure(plot_cfg, registry) + assert fig is not None + + def test_builds_3d_faceted_figure(self): + """Test building a 3D faceted figure.""" + df = pd.DataFrame( + { + "x": [1, 2, 3, 4], + "y": [4, 5, 6, 7], + "z": [7, 8, 9, 10], + "group": ["a", "a", "b", "b"], + } + ) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + z=ChannelAestheticsConfig(field="z", type="quantitative"), + ), + ) + facet = FacetConfig(column="group") + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) + registry = DictDataRegistry({"main": df}) + fig = build_plotly_figure(plot_cfg, registry) + assert fig is not None + + def test_builds_row_and_column_faceted_figure(self): + """Test building figure with both row and column facets.""" + df = pd.DataFrame( + { + "x": [1, 2, 3, 4, 5, 6, 7, 8], + "y": [1, 2, 3, 4, 5, 6, 7, 8], + "row_grp": ["r1", "r1", "r1", "r1", "r2", "r2", "r2", "r2"], + "col_grp": ["c1", "c1", "c2", "c2", "c1", "c1", "c2", "c2"], + } + ) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + facet = FacetConfig(row="row_grp", column="col_grp") + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) + registry = DictDataRegistry({"main": df}) + fig = build_plotly_figure(plot_cfg, registry) + assert fig is not None + + +class TestScatterWithEncodings: + """Tests for scatter plots with various encodings.""" + + def test_scatter2d_with_size_encoding(self): + """Test 2D scatter with size encoding.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "size_val": [10, 20, 30]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + size=ChannelAestheticsConfig(field="size_val", type="quantitative"), + ), + ) + fig = _build_scatter2d(layer, df, None) + assert fig is not None + + def test_scatter2d_with_opacity(self): + """Test 2D scatter with opacity encoding.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + opacity=ChannelAestheticsConfig(field=None, type="quantitative", value=0.5), + ), + ) + fig = _build_scatter2d(layer, df, None) + assert fig is not None + + def test_scatter3d_with_size_encoding(self): + """Test 3D scatter with size encoding.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "z": [7, 8, 9], "size_val": [10, 20, 30]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + z=ChannelAestheticsConfig(field="z", type="quantitative"), + size=ChannelAestheticsConfig(field="size_val", type="quantitative"), + ), + ) + fig = _build_scatter3d(layer, df, None) + assert fig is not None + + def test_figure_with_background_color(self): + """Test that background color is applied.""" + df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + ), + ) + plot_cfg = PlotConfig( + data=DataConfig(source="main"), + layers=[layer], + background="#f0f0f0", + ) + registry = DictDataRegistry({"main": df}) + fig = build_plotly_figure(plot_cfg, registry) + layout = fig.to_dict()["layout"] + assert layout["plot_bgcolor"] == "#f0f0f0" + + def test_faceted_figure_with_color_encoding(self): + """Test faceted figure with color encoding.""" + df = pd.DataFrame( + { + "x": [1, 2, 3, 4], + "y": [4, 5, 6, 7], + "group": ["a", "a", "b", "b"], + "category": ["cat1", "cat2", "cat1", "cat2"], + } + ) + layer = LayerConfig( + geometry=GeometryConfig(type="point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + color=ChannelAestheticsConfig(field="category", type="nominal"), + ), + ) + facet = FacetConfig(column="group") + plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) + registry = DictDataRegistry({"main": df}) + fig = build_plotly_figure(plot_cfg, registry) + assert fig is not None diff --git a/tests/visualization/test_renderer_controls.py b/tests/visualization/test_renderer_controls.py new file mode 100644 index 00000000..13d28184 --- /dev/null +++ b/tests/visualization/test_renderer_controls.py @@ -0,0 +1,234 @@ +"""Tests for renderer support of interactive controls.""" + +from __future__ import annotations + +import pandas as pd + +from simplexity.activations.activation_visualizations import ( + VisualizationControlDetail, + VisualizationControlsState, +) +from simplexity.visualization.altair_renderer import build_altair_chart +from simplexity.visualization.data_registry import DictDataRegistry +from simplexity.visualization.plotly_renderer import build_plotly_figure +from simplexity.visualization.structured_configs import ( + AestheticsConfig, + ChannelAestheticsConfig, + DataConfig, + GeometryConfig, + LayerConfig, + PlotConfig, + PlotLevelGuideConfig, + PlotSizeConfig, +) + + +def _base_plot_config(backend: str = "altair") -> PlotConfig: + layer = LayerConfig( + geometry=GeometryConfig(type="line" if backend == "altair" else "point"), + aesthetics=AestheticsConfig( + x=ChannelAestheticsConfig(field="x", type="quantitative"), + y=ChannelAestheticsConfig(field="y", type="quantitative"), + z=ChannelAestheticsConfig(field="z", type="quantitative") if backend == "plotly" else None, + ), + ) + return PlotConfig( + backend=backend, + data=DataConfig(source="main"), + layers=[layer], + size=PlotSizeConfig(), + guides=PlotLevelGuideConfig(), + ) + + +def _layer_controls(values: list[str]) -> VisualizationControlsState: + dropdown = VisualizationControlDetail(type="dropdown", field="layer", options=values) + return VisualizationControlsState(dropdown=dropdown) + + +def _slider_controls(values: list[int]) -> VisualizationControlsState: + slider = VisualizationControlDetail(type="slider", field="step", options=values) + return VisualizationControlsState(slider=slider) + + +def test_altair_renderer_adds_dropdown_selection(): + """Altair renderer should add a dropdown param when controls include layer dropdown.""" + df = pd.DataFrame( + { + "x": [0, 1, 0, 1], + "y": [0, 1, 1, 0], + "layer": ["layer_0", "layer_0", "layer_1", "layer_1"], + } + ) + plot_cfg = _base_plot_config(backend="altair") + registry = DictDataRegistry({"main": df}) + controls = _layer_controls(["layer_0", "layer_1"]) + + chart = build_altair_chart(plot_cfg, registry, controls=controls) + spec = chart.to_dict() + + assert "params" in spec + assert spec["params"][0]["name"] == "layer_dropdown" + assert spec["params"][0]["bind"]["options"] == ["layer_0", "layer_1"] + + +def test_altair_renderer_adds_slider_binding(): + """Test that Altair renderer adds slider binding from controls.""" + df = pd.DataFrame( + { + "x": [0, 1, 0, 1], + "y": [0, 1, 1, 0], + "step": [0, 0, 1, 1], + } + ) + plot_cfg = _base_plot_config(backend="altair") + registry = DictDataRegistry({"main": df}) + controls = _slider_controls([0, 1]) + + chart = build_altair_chart(plot_cfg, registry, controls=controls) + spec = chart.to_dict() + + assert any(param["name"].endswith("_slider") for param in spec.get("params", [])) + slider_param = next(param for param in spec["params"] if param["name"].endswith("_slider")) + assert slider_param["bind"]["input"] in {"range", "select"} + + +def test_altair_renderer_skips_slider_when_accumulating(): + """Test that slider binding is skipped when accumulate_steps is enabled.""" + df = pd.DataFrame( + { + "x": [0, 1, 0, 1], + "y": [0, 1, 1, 0], + "step": [0, 0, 1, 1], + } + ) + plot_cfg = _base_plot_config(backend="altair") + registry = DictDataRegistry({"main": df}) + controls = VisualizationControlsState( + slider=VisualizationControlDetail(type="slider", field="step", options=[0, 1]), + accumulate_steps=True, + ) + + chart = build_altair_chart(plot_cfg, registry, controls=controls) + spec = chart.to_dict() + + assert all(not param["name"].endswith("_slider") for param in spec.get("params", [])) + + +def test_altair_renderer_injects_detail_when_accumulating(): + """Test that detail encoding is added when accumulate_steps is enabled.""" + df = pd.DataFrame( + { + "x": [0, 1, 0, 1], + "y": [0, 1, 1, 0], + "step": [0, 0, 1, 1], + } + ) + plot_cfg = _base_plot_config(backend="altair") + registry = DictDataRegistry({"main": df}) + controls = VisualizationControlsState(accumulate_steps=True) + + chart = build_altair_chart(plot_cfg, registry, controls=controls) + spec = chart.to_dict() + + assert "detail" in spec.get("encoding", {}) + detail_encoding = spec["encoding"]["detail"] + if isinstance(detail_encoding, list): + detail_encoding = detail_encoding[0] + assert detail_encoding["field"] == "step" + + +def test_altair_renderer_skips_detail_when_step_axis_used(): + """Test that detail encoding is skipped when step is already used as an axis.""" + df = pd.DataFrame( + { + "step": [0, 1, 2, 3], + "y": [0.1, 0.2, 0.3, 0.4], + } + ) + plot_cfg = _base_plot_config(backend="altair") + assert plot_cfg.layers[0].aesthetics.x is not None + assert plot_cfg.layers[0].aesthetics.y is not None + plot_cfg.layers[0].aesthetics.x.field = "step" + plot_cfg.layers[0].aesthetics.y.field = "y" + registry = DictDataRegistry({"main": df}) + controls = VisualizationControlsState(accumulate_steps=True) + + chart = build_altair_chart(plot_cfg, registry, controls=controls) + spec = chart.to_dict() + + assert "detail" not in spec.get("encoding", {}) + + +def test_plotly_renderer_adds_layer_dropdown_menu(): + """Plotly renderer should add a dropdown menu that toggles layer visibility.""" + df = pd.DataFrame( + { + "layer": ["layer_0"] * 5 + ["layer_1"] * 5, + "x": list(range(10)), + "y": [value * 0.5 for value in range(10)], + "z": [1.0] * 10, + } + ) + plot_cfg = _base_plot_config(backend="plotly") + registry = DictDataRegistry({"main": df}) + controls = _layer_controls(["layer_0", "layer_1"]) + + figure = build_plotly_figure(plot_cfg, registry, controls=controls) + + layout = figure.to_dict()["layout"] + assert layout["updatemenus"] + menu = layout["updatemenus"][0] + assert len(menu["buttons"]) == 2 + assert [button["label"] for button in menu["buttons"]] == ["layer_0", "layer_1"] + # First trace should be visible initially, remaining traces hidden until selected. + figure_dict = figure.to_dict() + traces = figure_dict["data"] + assert traces[0]["visible"] is True + assert all(trace.get("visible") is False for trace in traces[1:]) + + +def test_plotly_renderer_adds_step_slider(): + """Test that Plotly renderer adds a slider for step-based animation.""" + df = pd.DataFrame( + { + "layer": ["layer_0"] * 6 + ["layer_1"] * 6, + "x": list(range(12)), + "y": [value * 0.5 for value in range(12)], + "z": [1.0] * 12, + "step": [0, 0, 1, 1, 2, 2] * 2, + } + ) + plot_cfg = _base_plot_config(backend="plotly") + registry = DictDataRegistry({"main": df}) + controls = VisualizationControlsState( + dropdown=VisualizationControlDetail(type="dropdown", field="layer", options=["layer_0", "layer_1"]), + slider=VisualizationControlDetail(type="slider", field="step", options=[0, 1, 2]), + ) + + figure = build_plotly_figure(plot_cfg, registry, controls=controls) + + layout = figure.to_dict()["layout"] + assert layout["sliders"] + assert len(figure.frames) == 3 + + +def test_plotly_renderer_preserves_literal_colors(): + """Test that Plotly renderer preserves literal color values from data.""" + df = pd.DataFrame( + { + "x": [0, 1], + "y": [0, 1], + "z": [0, 1], + "literal_color": ["#00ff00", "#ff0000"], + } + ) + plot_cfg = _base_plot_config(backend="plotly") + plot_cfg.layers[0].aesthetics.color = ChannelAestheticsConfig(field="literal_color", type="nominal") + registry = DictDataRegistry({"main": df}) + + figure = build_plotly_figure(plot_cfg, registry) + + traces = figure.to_dict()["data"] + assert traces + assert list(traces[0]["marker"]["color"]) == ["#00ff00", "#ff0000"] diff --git a/uv.lock b/uv.lock index a98d77e4..c6d05d73 100644 --- a/uv.lock +++ b/uv.lock @@ -164,6 +164,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ba/88/6237e97e3385b57b5f1528647addea5cc03d4d65d5979ab24327d41fb00d/alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6", size = 248554 }, ] +[[package]] +name = "altair" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jinja2" }, + { name = "jsonschema" }, + { name = "narwhals" }, + { name = "packaging" }, + { name = "typing-extensions", marker = "python_full_version < '3.15'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/c0/184a89bd5feba14ff3c41cfaf1dd8a82c05f5ceedbc92145e17042eb08a4/altair-6.0.0.tar.gz", hash = "sha256:614bf5ecbe2337347b590afb111929aa9c16c9527c4887d96c9bc7f6640756b4", size = 763834 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/33/ef2f2409450ef6daa61459d5de5c08128e7d3edb773fefd0a324d1310238/altair-6.0.0-py3-none-any.whl", hash = "sha256:09ae95b53d5fe5b16987dccc785a7af8588f2dca50de1e7a156efa8a461515f8", size = 795410 }, +] + [[package]] name = "annotated-doc" version = "0.0.4" @@ -1512,6 +1528,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/e8/685f47e0d754320684db4425a0967f7d3fa70126bffd76110b7009a0090f/joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241", size = 308396 }, ] +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040 }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437 }, +] + [[package]] name = "kiwisolver" version = "1.4.9" @@ -3107,6 +3150,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341 }, ] +[[package]] +name = "referencing" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766 }, +] + [[package]] name = "regex" version = "2025.11.3" @@ -3225,6 +3282,87 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/80/97b6f357ac458d9ad9872cc3183ca09ef7439ac89e030ea43053ba1294b6/rich_argparse-1.7.2-py3-none-any.whl", hash = "sha256:0559b1f47a19bbeb82bf15f95a057f99bcbbc98385532f57937f9fc57acc501a", size = 25476 }, ] +[[package]] +name = "rpds-py" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086 }, + { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053 }, + { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763 }, + { url = "https://files.pythonhosted.org/packages/d4/36/eb2eb8515e2ad24c0bd43c3ee9cd74c33f7ca6430755ccdb240fd3144c44/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd", size = 408951 }, + { url = "https://files.pythonhosted.org/packages/d6/65/ad8dc1784a331fabbd740ef6f71ce2198c7ed0890dab595adb9ea2d775a1/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f", size = 514622 }, + { url = "https://files.pythonhosted.org/packages/63/8e/0cfa7ae158e15e143fe03993b5bcd743a59f541f5952e1546b1ac1b5fd45/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1", size = 414492 }, + { url = "https://files.pythonhosted.org/packages/60/1b/6f8f29f3f995c7ffdde46a626ddccd7c63aefc0efae881dc13b6e5d5bb16/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23", size = 394080 }, + { url = "https://files.pythonhosted.org/packages/6d/d5/a266341051a7a3ca2f4b750a3aa4abc986378431fc2da508c5034d081b70/rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6", size = 408680 }, + { url = "https://files.pythonhosted.org/packages/10/3b/71b725851df9ab7a7a4e33cf36d241933da66040d195a84781f49c50490c/rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51", size = 423589 }, + { url = "https://files.pythonhosted.org/packages/00/2b/e59e58c544dc9bd8bd8384ecdb8ea91f6727f0e37a7131baeff8d6f51661/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5", size = 573289 }, + { url = "https://files.pythonhosted.org/packages/da/3e/a18e6f5b460893172a7d6a680e86d3b6bc87a54c1f0b03446a3c8c7b588f/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e", size = 599737 }, + { url = "https://files.pythonhosted.org/packages/5c/e2/714694e4b87b85a18e2c243614974413c60aa107fd815b8cbc42b873d1d7/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394", size = 563120 }, + { url = "https://files.pythonhosted.org/packages/6f/ab/d5d5e3bcedb0a77f4f613706b750e50a5a3ba1c15ccd3665ecc636c968fd/rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf", size = 223782 }, + { url = "https://files.pythonhosted.org/packages/39/3b/f786af9957306fdc38a74cef405b7b93180f481fb48453a114bb6465744a/rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b", size = 240463 }, + { url = "https://files.pythonhosted.org/packages/f3/d2/b91dc748126c1559042cfe41990deb92c4ee3e2b415f6b5234969ffaf0cc/rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e", size = 230868 }, + { url = "https://files.pythonhosted.org/packages/ed/dc/d61221eb88ff410de3c49143407f6f3147acf2538c86f2ab7ce65ae7d5f9/rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2", size = 374887 }, + { url = "https://files.pythonhosted.org/packages/fd/32/55fb50ae104061dbc564ef15cc43c013dc4a9f4527a1f4d99baddf56fe5f/rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8", size = 358904 }, + { url = "https://files.pythonhosted.org/packages/58/70/faed8186300e3b9bdd138d0273109784eea2396c68458ed580f885dfe7ad/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4", size = 389945 }, + { url = "https://files.pythonhosted.org/packages/bd/a8/073cac3ed2c6387df38f71296d002ab43496a96b92c823e76f46b8af0543/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136", size = 407783 }, + { url = "https://files.pythonhosted.org/packages/77/57/5999eb8c58671f1c11eba084115e77a8899d6e694d2a18f69f0ba471ec8b/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7", size = 515021 }, + { url = "https://files.pythonhosted.org/packages/e0/af/5ab4833eadc36c0a8ed2bc5c0de0493c04f6c06de223170bd0798ff98ced/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2", size = 414589 }, + { url = "https://files.pythonhosted.org/packages/b7/de/f7192e12b21b9e9a68a6d0f249b4af3fdcdff8418be0767a627564afa1f1/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6", size = 394025 }, + { url = "https://files.pythonhosted.org/packages/91/c4/fc70cd0249496493500e7cc2de87504f5aa6509de1e88623431fec76d4b6/rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e", size = 408895 }, + { url = "https://files.pythonhosted.org/packages/58/95/d9275b05ab96556fefff73a385813eb66032e4c99f411d0795372d9abcea/rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d", size = 422799 }, + { url = "https://files.pythonhosted.org/packages/06/c1/3088fc04b6624eb12a57eb814f0d4997a44b0d208d6cace713033ff1a6ba/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7", size = 572731 }, + { url = "https://files.pythonhosted.org/packages/d8/42/c612a833183b39774e8ac8fecae81263a68b9583ee343db33ab571a7ce55/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31", size = 599027 }, + { url = "https://files.pythonhosted.org/packages/5f/60/525a50f45b01d70005403ae0e25f43c0384369ad24ffe46e8d9068b50086/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95", size = 563020 }, + { url = "https://files.pythonhosted.org/packages/0b/5d/47c4655e9bcd5ca907148535c10e7d489044243cc9941c16ed7cd53be91d/rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d", size = 223139 }, + { url = "https://files.pythonhosted.org/packages/f2/e1/485132437d20aa4d3e1d8b3fb5a5e65aa8139f1e097080c2a8443201742c/rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15", size = 240224 }, + { url = "https://files.pythonhosted.org/packages/24/95/ffd128ed1146a153d928617b0ef673960130be0009c77d8fbf0abe306713/rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1", size = 230645 }, + { url = "https://files.pythonhosted.org/packages/ff/1b/b10de890a0def2a319a2626334a7f0ae388215eb60914dbac8a3bae54435/rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a", size = 364443 }, + { url = "https://files.pythonhosted.org/packages/0d/bf/27e39f5971dc4f305a4fb9c672ca06f290f7c4e261c568f3dea16a410d47/rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e", size = 353375 }, + { url = "https://files.pythonhosted.org/packages/40/58/442ada3bba6e8e6615fc00483135c14a7538d2ffac30e2d933ccf6852232/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000", size = 383850 }, + { url = "https://files.pythonhosted.org/packages/14/14/f59b0127409a33c6ef6f5c1ebd5ad8e32d7861c9c7adfa9a624fc3889f6c/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db", size = 392812 }, + { url = "https://files.pythonhosted.org/packages/b3/66/e0be3e162ac299b3a22527e8913767d869e6cc75c46bd844aa43fb81ab62/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2", size = 517841 }, + { url = "https://files.pythonhosted.org/packages/3d/55/fa3b9cf31d0c963ecf1ba777f7cf4b2a2c976795ac430d24a1f43d25a6ba/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa", size = 408149 }, + { url = "https://files.pythonhosted.org/packages/60/ca/780cf3b1a32b18c0f05c441958d3758f02544f1d613abf9488cd78876378/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083", size = 383843 }, + { url = "https://files.pythonhosted.org/packages/82/86/d5f2e04f2aa6247c613da0c1dd87fcd08fa17107e858193566048a1e2f0a/rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9", size = 396507 }, + { url = "https://files.pythonhosted.org/packages/4b/9a/453255d2f769fe44e07ea9785c8347edaf867f7026872e76c1ad9f7bed92/rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0", size = 414949 }, + { url = "https://files.pythonhosted.org/packages/a3/31/622a86cdc0c45d6df0e9ccb6becdba5074735e7033c20e401a6d9d0e2ca0/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94", size = 565790 }, + { url = "https://files.pythonhosted.org/packages/1c/5d/15bbf0fb4a3f58a3b1c67855ec1efcc4ceaef4e86644665fff03e1b66d8d/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08", size = 590217 }, + { url = "https://files.pythonhosted.org/packages/6d/61/21b8c41f68e60c8cc3b2e25644f0e3681926020f11d06ab0b78e3c6bbff1/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27", size = 555806 }, + { url = "https://files.pythonhosted.org/packages/f9/39/7e067bb06c31de48de3eb200f9fc7c58982a4d3db44b07e73963e10d3be9/rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6", size = 211341 }, + { url = "https://files.pythonhosted.org/packages/0a/4d/222ef0b46443cf4cf46764d9c630f3fe4abaa7245be9417e56e9f52b8f65/rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d", size = 225768 }, + { url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099 }, + { url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192 }, + { url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080 }, + { url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841 }, + { url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670 }, + { url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005 }, + { url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112 }, + { url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049 }, + { url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661 }, + { url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606 }, + { url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126 }, + { url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371 }, + { url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298 }, + { url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604 }, + { url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391 }, + { url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868 }, + { url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747 }, + { url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795 }, + { url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330 }, + { url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194 }, + { url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340 }, + { url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765 }, + { url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834 }, + { url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470 }, + { url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630 }, + { url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148 }, + { url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030 }, + { url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570 }, + { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532 }, +] + [[package]] name = "rsa" version = "4.9.1" @@ -3502,6 +3640,7 @@ name = "simplexity" version = "0.1" source = { editable = "." } dependencies = [ + { name = "altair" }, { name = "chex" }, { name = "dotenv" }, { name = "equinox" }, @@ -3554,6 +3693,7 @@ penzai = [ [package.metadata] requires-dist = [ + { name = "altair", specifier = ">=5.3.0" }, { name = "boto3", marker = "extra == 'aws'", specifier = ">=1.37.24" }, { name = "chex" }, { name = "diff-cover", marker = "extra == 'dev'" }, From e6c2a6cbe3ac4e5a22b6f77718ac3649cf477ca6 Mon Sep 17 00:00:00 2001 From: Loren AC Date: Mon, 15 Dec 2025 22:05:36 -0500 Subject: [PATCH 05/35] Add subspace orthogonality analysis for factored processes (#136) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Refactor regression code to incorporate optional computation of pairwise subspace orthogonality metrics * Refine regression API and add comprehensive orthogonality tests - Separate coeffs/intercept in return structure (omit intercept key when fit_intercept=False) - Rename to_factors → concat_belief_states for clarity - Add 9 orthogonality tests with principled numerical thresholds (safety_factor=10) - Test orthogonal, aligned, contained subspaces; multi-factor scenarios; edge cases - Update validators and existing tests for new parameter structure - Add informative assertion messages for debugging numerical precision * Organize imports * Fix lint issues * Fix slices * Simplify lr kwarg validation * Add return type * Add pylint ignore * Fix potential division by zero * Fix potential log(0) issue * Enhance subspace orthogonality computation by adding a check for multiple belief states. Log a warning if only one belief state is present, preventing unnecessary calculations. * Fix docstring inconsistency * Update docstring * Fix lint issues * Refactor linear regression kwargs validation and improve logging. Temporarily disable pylint checks during AST traversal to avoid crashes related to package imports. * Fix merge conflict * Ammended unseen merge conflict in linear_regression tests * Rename to_factors parameter to concat_belief_states in activation analyses * Update activation analysis tests for concat_belief_states semantics * Fix validator error message and fix linting issues * Add check requiring 2+ factors in _handle_factored_regression and remove redundant orthogonality compuations warning * Add proper spacing to warning messages * Fix dictionary equivalence check in test_linear_regression and add blank line after docstring in test_layerwise_analysis * Refactor subspace orthogonality computation for JIT compatibility * Fix conditional callback execution using jax.lax.cond * Fix linting and formatting issues * Fix formatting issues * Disable too-many-locals linting issue in test_linear_regression.py * Change name of return dict from singular_values -> arrays for clarity * Add docstring describing return values for _compute_all_pairwise_orthogonality function * Add docstring describing relevance of the do_nothing_branch function * Refactor key removal method in kwarg validator and fix docstring format * Temporarily disable pylint checks during AST traversal in linear_regression.py to prevent crashes. Remove deprecated layer_linear_regression_svd function for cleaner code and encourage use of layer_linear_regression with use_svd=True. * Refactor linear regression analysis registration to use partial application of layer_linear_regression with use_svd=True, removing the deprecated layer_linear_regression_svd function for improved clarity and consistency. * Fix tests * Add detailed docstring to _compute_subspace_orthogonality function, specifying return values and their meanings for improved clarity and documentation. * Add todo * Fix kwarg validation * Fix tests * Add validator decorator for linear_regression_svd to enforce use_svd=True and exclude it from output. Enhance tests to validate behavior. * Fix test * Add get_robust_basis for robust orthonormal basis extraction * Pass pair of bases instead of coefficient matrices to _compute_subspace_orthogonality * Compute full rank and orthonormal basis of coeff matrices before passing bases to subspace analysis * Fix formatting and docstring * Update comment * Fix issues due to API changes in activation and dataframe tests * Fix formatting issues --------- Co-authored-by: Eric Alt --- simplexity/activations/activation_analyses.py | 8 +- simplexity/analysis/layerwise_analysis.py | 81 +- simplexity/analysis/linear_regression.py | 413 ++++++- tests/activations/test_activation_analysis.py | 380 ++++++ .../activations/test_dataframe_integration.py | 6 +- tests/analysis/test_layerwise_analysis.py | 132 +- tests/analysis/test_linear_regression.py | 1071 +++++++++++++++-- 7 files changed, 1907 insertions(+), 184 deletions(-) diff --git a/simplexity/activations/activation_analyses.py b/simplexity/activations/activation_analyses.py index 51fdb7d9..ce724574 100644 --- a/simplexity/activations/activation_analyses.py +++ b/simplexity/activations/activation_analyses.py @@ -84,7 +84,7 @@ def __init__( use_probs_as_weights: bool = True, skip_first_token: bool = False, fit_intercept: bool = True, - to_factors: bool = False, + concat_belief_states: bool = False, ) -> None: super().__init__( analysis_type="linear_regression", @@ -92,7 +92,7 @@ def __init__( concat_layers=concat_layers, use_probs_as_weights=use_probs_as_weights, skip_first_token=skip_first_token, - analysis_kwargs={"fit_intercept": fit_intercept, "to_factors": to_factors}, + analysis_kwargs={"fit_intercept": fit_intercept, "concat_belief_states": concat_belief_states}, ) @@ -108,9 +108,9 @@ def __init__( skip_first_token: bool = False, rcond_values: Sequence[float] | None = None, fit_intercept: bool = True, - to_factors: bool = False, + concat_belief_states: bool = False, ) -> None: - analysis_kwargs: dict[str, Any] = {"fit_intercept": fit_intercept, "to_factors": to_factors} + analysis_kwargs: dict[str, Any] = {"fit_intercept": fit_intercept, "concat_belief_states": concat_belief_states} if rcond_values is not None: analysis_kwargs["rcond_values"] = tuple(rcond_values) super().__init__( diff --git a/simplexity/analysis/layerwise_analysis.py b/simplexity/analysis/layerwise_analysis.py index e76e1c6d..60aa5cb7 100644 --- a/simplexity/analysis/layerwise_analysis.py +++ b/simplexity/analysis/layerwise_analysis.py @@ -1,21 +1,27 @@ """Composable layer-wise analysis orchestration.""" +# pylint: disable=all # Temporarily disable all pylint checkers during AST traversal to prevent crash. +# The imports checker crashes when resolving simplexity package imports due to a bug +# in pylint/astroid: https://github.com/pylint-dev/pylint/issues/10185 +# pylint: enable=all # Re-enable all pylint checkers for the checking phase. This allows other checks +# (code quality, style, undefined names, etc.) to run normally while bypassing +# the problematic imports checker that would crash during AST traversal. + from __future__ import annotations from collections.abc import Callable, Mapping, Sequence from dataclasses import dataclass +from functools import partial from typing import Any import jax -from simplexity.analysis.linear_regression import ( - layer_linear_regression, - layer_linear_regression_svd, -) +from simplexity.analysis.linear_regression import layer_linear_regression from simplexity.analysis.pca import ( DEFAULT_VARIANCE_THRESHOLDS, layer_pca_analysis, ) +from simplexity.logger import SIMPLEXITY_LOGGER AnalysisFn = Callable[..., tuple[Mapping[str, float], Mapping[str, jax.Array]]] @@ -34,35 +40,48 @@ class AnalysisRegistration: def _validate_linear_regression_kwargs(kwargs: Mapping[str, Any] | None) -> dict[str, Any]: provided = dict(kwargs or {}) - allowed = {"fit_intercept", "to_factors"} + allowed = {"fit_intercept", "concat_belief_states", "compute_subspace_orthogonality", "use_svd", "rcond_values"} unexpected = set(provided) - allowed if unexpected: raise ValueError(f"Unexpected linear_regression kwargs: {sorted(unexpected)}") - fit_intercept = bool(provided.get("fit_intercept", True)) - to_factors = bool(provided.get("to_factors", False)) - return {"fit_intercept": fit_intercept, "to_factors": to_factors} - - -def _validate_linear_regression_svd_kwargs(kwargs: Mapping[str, Any] | None) -> dict[str, Any]: - provided = dict(kwargs or {}) - allowed = {"fit_intercept", "rcond_values", "to_factors"} - unexpected = set(provided) - allowed - if unexpected: - raise ValueError(f"Unexpected linear_regression_svd kwargs: {sorted(unexpected)}") - fit_intercept = bool(provided.get("fit_intercept", True)) - to_factors = bool(provided.get("to_factors", False)) + resolved_kwargs = {} + resolved_kwargs["fit_intercept"] = bool(provided.get("fit_intercept", True)) + resolved_kwargs["concat_belief_states"] = bool(provided.get("concat_belief_states", False)) + resolved_kwargs["compute_subspace_orthogonality"] = bool(provided.get("compute_subspace_orthogonality", False)) rcond_values = provided.get("rcond_values") - if rcond_values is not None: - if not isinstance(rcond_values, (list, tuple)): - raise TypeError("rcond_values must be a sequence of floats") - if len(rcond_values) == 0: - raise ValueError("rcond_values must not be empty") - rcond_values = tuple(float(v) for v in rcond_values) - return { - "fit_intercept": fit_intercept, - "to_factors": to_factors, - "rcond_values": rcond_values, - } + should_use_svd = rcond_values is not None + use_svd = bool(provided.get("use_svd", should_use_svd)) + resolved_kwargs["use_svd"] = use_svd + if use_svd: + if rcond_values is not None: + if not isinstance(rcond_values, (list, tuple)): + raise TypeError("rcond_values must be a sequence of floats") + if len(rcond_values) == 0: + raise ValueError("rcond_values must not be empty") + if not use_svd: + SIMPLEXITY_LOGGER.warning("rcond_values are only used when use_svd is True") + rcond_values = tuple(float(v) for v in rcond_values) + resolved_kwargs["rcond_values"] = rcond_values + elif rcond_values is not None: + raise ValueError("rcond_values are only used when use_svd is True") + return resolved_kwargs + + +def set_use_svd( + fn: ValidatorFn, +) -> ValidatorFn: + """Decorator to set use_svd to True in the kwargs and remove it from output to avoid duplicate with partial.""" + + def wrapper(kwargs: Mapping[str, Any] | None) -> dict[str, Any]: + if kwargs and "use_svd" in kwargs and not kwargs["use_svd"]: + raise ValueError("use_svd cannot be set to False for linear_regression_svd") + modified_kwargs = dict(kwargs) if kwargs else {} # Make a copy to avoid mutating the input + modified_kwargs["use_svd"] = True + resolved = fn(modified_kwargs) + resolved.pop("use_svd", None) # Remove use_svd to avoid duplicate argument with partial + return resolved + + return wrapper def _validate_pca_kwargs(kwargs: Mapping[str, Any] | None) -> dict[str, Any]: @@ -97,9 +116,9 @@ def _validate_pca_kwargs(kwargs: Mapping[str, Any] | None) -> dict[str, Any]: validator=_validate_linear_regression_kwargs, ), "linear_regression_svd": AnalysisRegistration( - fn=layer_linear_regression_svd, + fn=partial(layer_linear_regression, use_svd=True), requires_belief_states=True, - validator=_validate_linear_regression_svd_kwargs, + validator=set_use_svd(_validate_linear_regression_kwargs), ), "pca": AnalysisRegistration( fn=layer_pca_analysis, diff --git a/simplexity/analysis/linear_regression.py b/simplexity/analysis/linear_regression.py index 1ce5c086..a0ef6eee 100644 --- a/simplexity/analysis/linear_regression.py +++ b/simplexity/analysis/linear_regression.py @@ -1,15 +1,25 @@ """Reusable linear regression utilities for activation analysis.""" +# pylint: disable=all # Temporarily disable all pylint checkers during AST traversal to prevent crash. +# The imports checker crashes when resolving simplexity package imports due to a bug +# in pylint/astroid: https://github.com/pylint-dev/pylint/issues/10185 +# pylint: enable=all # Re-enable all pylint checkers for the checking phase. This allows other checks +# (code quality, style, undefined names, etc.) to run normally while bypassing +# the problematic imports checker that would crash during AST traversal. + from __future__ import annotations +import itertools from collections.abc import Callable, Mapping, Sequence from typing import Any import jax import jax.numpy as jnp import numpy as np +from jax.debug import callback from simplexity.analysis.normalization import normalize_weights, standardize_features, standardize_targets +from simplexity.logger import SIMPLEXITY_LOGGER def _design_matrix(x: jax.Array, fit_intercept: bool) -> jax.Array: @@ -69,8 +79,46 @@ def linear_regression( beta, _, _, _ = jnp.linalg.lstsq(weighted_design, weighted_targets, rcond=None) predictions = design @ beta scalars = _regression_metrics(predictions, y_arr, w_arr) - projections = {"projected": predictions} - return scalars, projections + + # Separate intercept and coefficients + if fit_intercept: + arrays = { + "projected": predictions, + "coeffs": beta[1:], # Linear coefficients (excluding intercept) + "intercept": beta[:1], # Intercept term (keep 2D: [1, n_targets]) + } + else: + arrays = { + "projected": predictions, + "coeffs": beta, # All parameters are coefficients when no intercept + } + + return scalars, arrays + + +def _compute_regression_metrics( # pylint: disable=too-many-arguments + x: jax.Array, + y: jax.Array, + weights: jax.Array | np.ndarray | None, + beta: jax.Array, + predictions: jax.Array | None = None, + *, + fit_intercept: bool = True, +) -> Mapping[str, float]: + x_arr = standardize_features(x) + y_arr = standardize_targets(y) + if x_arr.shape[0] != y_arr.shape[0]: + raise ValueError("Features and targets must share the same first dimension") + if x_arr.shape[0] == 0: + raise ValueError("At least one sample is required") + w_arr = normalize_weights(weights, x_arr.shape[0]) + if w_arr is None: + w_arr = jnp.ones(x_arr.shape[0], dtype=x_arr.dtype) / x_arr.shape[0] + if predictions is None: + design = _design_matrix(x_arr, fit_intercept) + predictions = design @ beta + scalars = _regression_metrics(predictions, y_arr, w_arr) + return scalars def _compute_beta_from_svd( @@ -115,6 +163,7 @@ def linear_regression_svd( best_scalars: Mapping[str, float] | None = None best_rcond = rconds[0] best_error = float("inf") + best_beta: jax.Array | None = None for rcond in rconds: threshold = rcond * max_singular beta = _compute_beta_from_svd(u, s, vh, weighted_targets, threshold) @@ -128,12 +177,306 @@ def linear_regression_svd( best_pred = predictions best_scalars = scalars best_rcond = rcond - if best_pred is None or best_scalars is None: + best_beta = beta + if best_pred is None or best_scalars is None or best_beta is None: raise RuntimeError("Unable to compute linear regression solution") scalars = dict(best_scalars) scalars["best_rcond"] = float(best_rcond) - projections = {"projected": best_pred} - return scalars, projections + + # Separate intercept and coefficients + if fit_intercept: + arrays = { + "projected": best_pred, + "coeffs": best_beta[1:], # Linear coefficients (excluding intercept) + "intercept": best_beta[:1], # Intercept term (keep 2D: [1, n_targets]) + } + else: + arrays = { + "projected": best_pred, + "coeffs": best_beta, # All parameters are coefficients when no intercept + } + + return scalars, arrays + + +def _process_individual_factors( + layer_activations: jax.Array, + belief_states: tuple[jax.Array, ...], + weights: jax.Array, + use_svd: bool, + **kwargs: Any, +) -> list[tuple[Mapping[str, float], Mapping[str, jax.Array]]]: + """Process each factor individually using either standard or SVD regression.""" + results = [] + regression_fn = linear_regression_svd if use_svd else linear_regression + for factor in belief_states: + if not isinstance(factor, jax.Array): + raise ValueError("Each factor in belief_states must be a jax.Array") + factor_scalars, factor_arrays = regression_fn(layer_activations, factor, weights, **kwargs) + results.append((factor_scalars, factor_arrays)) + return results + + +def _merge_results_with_prefix( + scalars: dict[str, float], + arrays: dict[str, jax.Array], + results: tuple[Mapping[str, float], Mapping[str, jax.Array]], + prefix: str, +) -> None: + results_scalars, results_arrays = results + scalars.update({f"{prefix}/{key}": value for key, value in results_scalars.items()}) + arrays.update({f"{prefix}/{key}": value for key, value in results_arrays.items()}) + + +def _split_concat_results( + layer_activations: jax.Array, + weights: jax.Array, + belief_states: tuple[jax.Array, ...], + concat_results: tuple[Mapping[str, float], Mapping[str, jax.Array]], + **kwargs: Any, +) -> list[tuple[Mapping[str, float], Mapping[str, jax.Array]]]: + """Split concatenated regression results into individual factors.""" + _, concat_arrays = concat_results + + # Split the concatenated coefficients and projections into the individual factors + factor_dims = [factor.shape[-1] for factor in belief_states] + split_indices = jnp.cumsum(jnp.array(factor_dims))[:-1] + + coeffs_list = jnp.split(concat_arrays["coeffs"], split_indices, axis=-1) + projections_list = jnp.split(concat_arrays["projected"], split_indices, axis=-1) + + # Handle intercept - split if present + if "intercept" in concat_arrays: + intercepts_list = jnp.split(concat_arrays["intercept"], split_indices, axis=-1) + else: + intercepts_list = [None] * len(belief_states) + + # Only recompute scalar metrics, reuse projections and coefficients + # Filter out rcond_values from kwargs (only relevant for SVD during fitting, not metrics) + metrics_kwargs = {k: v for k, v in kwargs.items() if k != "rcond_values"} + + results = [] + for factor, coeffs, intercept, projections in zip( + belief_states, coeffs_list, intercepts_list, projections_list, strict=True + ): + # Reconstruct full beta for metrics computation + if intercept is not None: + beta = jnp.concatenate([intercept, coeffs], axis=0) + else: + beta = coeffs + + factor_scalars = _compute_regression_metrics( + layer_activations, + factor, + weights, + beta, + predictions=projections, + **metrics_kwargs, + ) + + # Build factor arrays - include intercept only if present + factor_arrays = {"projected": projections, "coeffs": coeffs} + if intercept is not None: + factor_arrays["intercept"] = intercept + + results.append((factor_scalars, factor_arrays)) + return results + + +def get_robust_basis(matrix: jax.Array) -> jax.Array: + """Extracts an orthonormal basis for the column space of the matrx. + + Handles rank deficiency gracefully by discarding directions associated with singular values below a + certain tolerance. + """ + u, s, _ = jnp.linalg.svd(matrix, full_matrices=False) + + max_dim = max(matrix.shape) + eps = jnp.finfo(matrix.dtype).eps + tol = s[0] * max_dim * eps + + valid_dims = s > tol + basis = u[:, valid_dims] + return basis + + +def _compute_subspace_orthogonality( + basis_pair: list[jax.Array], +) -> tuple[dict[str, float], dict[str, jax.Array]]: + """Compute orthogonality metrics between two coefficient subspaces. + + Args: + basis_pair: List of two orthonormal basis matrices + + Returns: + Tuple[dict[str, float], dict[str, jax.Array]]: A tuple containing: + - scalars: A dictionary with the following keys and float values: + - 'subspace_overlap': Average squared singular value (overlap score). + - 'max_singular_value': Largest singular value. + - 'min_singular_value': Smallest singular value. + - 'participation_ratio': Participation ratio of the singular values. + - 'entropy': Entropy of the squared singular values. + - 'effective_rank': Effective rank (exp(entropy)) of the singular value distribution. + - singular_values: A dictionary with a single key: + - 'singular_values': jax.Array of the singular values between the two subspaces. + """ + q1 = basis_pair[0] + q2 = basis_pair[1] + + # Compute the singular values of the interaction matrix + interaction_matrix = q1.T @ q2 + singular_values = jnp.linalg.svd(interaction_matrix, compute_uv=False) + singular_values = jnp.clip(singular_values, 0, 1) + + # Compute the subspace overlap score + min_dim = min(q1.shape[1], q2.shape[1]) + sum_sq_sv = jnp.sum(singular_values**2) + sum_quad_sv = jnp.sum(singular_values**4) + + is_degenerate = sum_quad_sv == 0 + + # Define the False branch function (does nothing) + def do_nothing_branch(x): + """JAX 'False' branch function. + + Serves only to return a value that matches the 'True' branch's type (None) for jax.lax.cond. + """ + return None + + # Define the True branch function (runs the callback) + def execute_all_zeros_warning_branch(x): + callback(log_all_zeros, x) + return None + + def log_all_zeros(_): + SIMPLEXITY_LOGGER.warning( + "Degenerate subspace detected during orthogonality computation." + " All singular values are zero." + " Setting probability values and participation ratio to zero." + ) + + jax.lax.cond(is_degenerate, execute_all_zeros_warning_branch, do_nothing_branch, sum_sq_sv) + + pratio_denominator_safe = jnp.where(is_degenerate, 1.0, sum_quad_sv) + probs_denominator_safe = jnp.where(is_degenerate, 1.0, sum_sq_sv) + participation_ratio = sum_sq_sv**2 / pratio_denominator_safe + + subspace_overlap_score = sum_sq_sv / min_dim + + # Compute the entropy probabilities + probs = singular_values**2 / probs_denominator_safe + + def execute_some_zeros_warning_branch(x): + callback(log_some_zeros, x) + return None + + def log_some_zeros(num_zeros_array: jax.Array) -> None: + num_zeros = num_zeros_array.item() + SIMPLEXITY_LOGGER.warning( + f"Encountered {num_zeros} probability values of zero during entropy computation." + " This is likely due to numerical instability." + " Setting corresponding entropy contribution to zero." + ) + + num_zeros = jnp.sum(probs == 0) + has_some_zeros = num_zeros > 0 + jax.lax.cond(has_some_zeros, execute_some_zeros_warning_branch, do_nothing_branch, num_zeros) + + p_log_p = probs * jnp.log(probs) + entropy = -jnp.sum(jnp.where(probs > 0, p_log_p, 0.0)) + + # Compute the effective rank + effective_rank = jnp.exp(entropy) + + scalars = { + "subspace_overlap": float(subspace_overlap_score), + "max_singular_value": float(jnp.max(singular_values)), + "min_singular_value": float(jnp.min(singular_values)), + "participation_ratio": float(participation_ratio), + "entropy": float(entropy), + "effective_rank": float(effective_rank), + } + + arrays = { + "singular_values": singular_values, + } + + return scalars, arrays + + +def _compute_all_pairwise_orthogonality( + coeffs_list: list[jax.Array], +) -> tuple[dict[str, float], dict[str, jax.Array]]: + """Compute pairwise orthogonality metrics for all factor pairs. + + Args: + coeffs_list: List of coefficient matrices (one per factor, excludes intercepts) + + Returns: + Tuple[dict[str, float], dict[str, jax.Array]]: + - scalars: Dictionary mapping keys of the form "orthogonality_{i}_{j}/" to scalar float metrics for + each pair of factors (i, j). + - arrays: Dictionary mapping keys of the form "orthogonality_{i}_{j}/" to array-valued + metrics for each pair of factors (i, j). + """ + scalars = {} + arrays = {} + factor_pairs = list(itertools.combinations(range(len(coeffs_list)), 2)) + basis_list = [get_robust_basis(coeffs) for coeffs in coeffs_list] # computes orthonormal basis of coeff matrix + for i, j in factor_pairs: + basis_pair = [basis_list[i], basis_list[j]] + orthogonality_scalars, orthogonality_arrays = _compute_subspace_orthogonality(basis_pair) + scalars.update({f"orthogonality_{i}_{j}/{key}": value for key, value in orthogonality_scalars.items()}) + arrays.update({f"orthogonality_{i}_{j}/{key}": value for key, value in orthogonality_arrays.items()}) + return scalars, arrays + + +def _handle_factored_regression( + layer_activations: jax.Array, + weights: jax.Array, + belief_states: tuple[jax.Array, ...], + concat_belief_states: bool, + compute_subspace_orthogonality: bool, + use_svd: bool, + **kwargs: Any, +) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: + """Handle regression for two or more factored belief states using either standard or SVD method.""" + if len(belief_states) < 2: + raise ValueError("At least two factors are required for factored regression") + + scalars: dict[str, float] = {} + arrays: dict[str, jax.Array] = {} + + regression_fn = linear_regression_svd if use_svd else linear_regression + + # Process concatenated belief states if requested + if concat_belief_states: + belief_states_concat = jnp.concatenate(belief_states, axis=-1) + concat_results = regression_fn(layer_activations, belief_states_concat, weights, **kwargs) + _merge_results_with_prefix(scalars, arrays, concat_results, "concat") + + # Split the concatenated parameters and projections into the individual factors + factor_results = _split_concat_results( + layer_activations, + weights, + belief_states, + concat_results, + **kwargs, + ) + else: + factor_results = _process_individual_factors(layer_activations, belief_states, weights, use_svd, **kwargs) + + for factor_idx, factor_result in enumerate(factor_results): + _merge_results_with_prefix(scalars, arrays, factor_result, f"factor_{factor_idx}") + + if compute_subspace_orthogonality: + # Extract coefficients (excludes intercept) for orthogonality computation + coeffs_list = [factor_arrays["coeffs"] for _, factor_arrays in factor_results] + orthogonality_scalars, orthogonality_singular_values = _compute_all_pairwise_orthogonality(coeffs_list) + scalars.update(orthogonality_scalars) + arrays.update(orthogonality_singular_values) + + return scalars, arrays def _apply_layer_regression( @@ -168,25 +511,53 @@ def layer_linear_regression( layer_activations: jax.Array, weights: jax.Array, belief_states: jax.Array | tuple[jax.Array, ...] | None, - to_factors: bool = False, + concat_belief_states: bool = False, + compute_subspace_orthogonality: bool = False, + use_svd: bool = False, **kwargs: Any, ) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: - """Layer-wise regression helper that wraps :func:`linear_regression`.""" - if belief_states is None: + """Layer-wise regression helper that wraps :func:`linear_regression` or :func:`linear_regression_svd`. + + Args: + layer_activations: Neural network activations for a single layer + weights: Sample weights for weighted regression + belief_states: Target belief states (single array or tuple for factored processes) + concat_belief_states: If True and belief_states is a tuple, concatenate and regress jointly + compute_subspace_orthogonality: If True, compute orthogonality between factor subspaces + use_svd: If True, use SVD-based regression instead of standard least squares + **kwargs: Additional arguments passed to regression function (fit_intercept, rcond_values, etc.) + + Returns: + scalars: Dictionary of scalar metrics + arrays: Dictionary of arrays (projected predictions, parameters, singular values if orthogonality computed) + """ + # If no belief states are provided, raise an error + if ( + belief_states is None + or (isinstance(belief_states, tuple) and len(belief_states) == 0) + or (isinstance(belief_states, jax.Array) and belief_states.size == 0) + ): raise ValueError("linear_regression requires belief_states") - return _apply_layer_regression(linear_regression, layer_activations, weights, belief_states, to_factors, **kwargs) + regression_fn = linear_regression_svd if use_svd else linear_regression -def layer_linear_regression_svd( - layer_activations: jax.Array, - weights: jax.Array, - belief_states: jax.Array | tuple[jax.Array, ...] | None, - to_factors: bool = False, - **kwargs: Any, -) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: - """Layer-wise regression helper that wraps :func:`linear_regression_svd`.""" - if belief_states is None: - raise ValueError("linear_regression_svd requires belief_states") - return _apply_layer_regression( - linear_regression_svd, layer_activations, weights, belief_states, to_factors, **kwargs + if not isinstance(belief_states, tuple) or len(belief_states) == 1: + if compute_subspace_orthogonality: + SIMPLEXITY_LOGGER.warning( + "Subspace orthogonality requires multiple factors." + " Received single factor of type %s; skipping orthogonality metrics.", + type(belief_states).__name__, + ) + belief_states = belief_states[0] if isinstance(belief_states, tuple) else belief_states + scalars, arrays = regression_fn(layer_activations, belief_states, weights, **kwargs) + return scalars, arrays + + return _handle_factored_regression( + layer_activations, + weights, + belief_states, + concat_belief_states, + compute_subspace_orthogonality, + use_svd, + **kwargs, ) diff --git a/tests/activations/test_activation_analysis.py b/tests/activations/test_activation_analysis.py index b31c8a8a..b2288716 100644 --- a/tests/activations/test_activation_analysis.py +++ b/tests/activations/test_activation_analysis.py @@ -740,6 +740,386 @@ def test_controls_accumulate_steps_conflict(self): ActivationVisualizationControlsConfig(slider="step", accumulate_steps=True) +class TestTupleBeliefStates: + """Test activation tracker with tuple belief states for factored processes.""" + + @pytest.fixture + def factored_belief_data(self): + """Create synthetic data with factored belief states.""" + batch_size = 4 + seq_len = 5 + d_layer0 = 8 + d_layer1 = 12 + + inputs = jnp.array( + [ + [1, 2, 3, 4, 5], + [1, 2, 3, 6, 7], + [1, 2, 8, 9, 10], + [1, 2, 3, 4, 11], + ] + ) + + # Factored beliefs: 2 factors with dimensions 3 and 2 + factor_0 = jnp.ones((batch_size, seq_len, 3)) * 0.3 + factor_1 = jnp.ones((batch_size, seq_len, 2)) * 0.7 + factored_beliefs = (factor_0, factor_1) + + probs = jnp.ones((batch_size, seq_len)) * 0.1 + + activations = { + "layer_0": jnp.ones((batch_size, seq_len, d_layer0)) * 0.3, + "layer_1": jnp.ones((batch_size, seq_len, d_layer1)) * 0.7, + } + + return { + "inputs": inputs, + "factored_beliefs": factored_beliefs, + "probs": probs, + "activations": activations, + "batch_size": batch_size, + "seq_len": seq_len, + "factor_0_dim": 3, + "factor_1_dim": 2, + "d_layer0": d_layer0, + "d_layer1": d_layer1, + } + + def test_prepare_activations_accepts_tuple_beliefs(self, factored_belief_data): + """prepare_activations should accept and preserve tuple belief states.""" + result = prepare_activations( + factored_belief_data["inputs"], + factored_belief_data["factored_beliefs"], + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 2 + + batch_size = factored_belief_data["batch_size"] + assert result.belief_states[0].shape == (batch_size, factored_belief_data["factor_0_dim"]) + assert result.belief_states[1].shape == (batch_size, factored_belief_data["factor_1_dim"]) + + def test_prepare_activations_tuple_beliefs_all_tokens(self, factored_belief_data): + """Tuple beliefs should work with all tokens mode.""" + result = prepare_activations( + factored_belief_data["inputs"], + factored_belief_data["factored_beliefs"], + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=False, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 2 + + # With deduplication, we expect fewer samples than batch_size * seq_len + n_prefixes = result.belief_states[0].shape[0] + assert result.belief_states[0].shape == (n_prefixes, factored_belief_data["factor_0_dim"]) + assert result.belief_states[1].shape == (n_prefixes, factored_belief_data["factor_1_dim"]) + assert result.activations["layer_0"].shape[0] == n_prefixes + + def test_prepare_activations_torch_tuple_beliefs(self, factored_belief_data): + """prepare_activations should accept tuple of PyTorch tensors.""" + torch = pytest.importorskip("torch") + + torch_factor_0 = torch.tensor(np.asarray(factored_belief_data["factored_beliefs"][0])) + torch_factor_1 = torch.tensor(np.asarray(factored_belief_data["factored_beliefs"][1])) + torch_beliefs = (torch_factor_0, torch_factor_1) + + result = prepare_activations( + factored_belief_data["inputs"], + torch_beliefs, + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 2 + # Should be converted to JAX arrays + assert isinstance(result.belief_states[0], jnp.ndarray) + assert isinstance(result.belief_states[1], jnp.ndarray) + + def test_prepare_activations_numpy_tuple_beliefs(self, factored_belief_data): + """prepare_activations should accept tuple of numpy arrays.""" + np_factor_0 = np.asarray(factored_belief_data["factored_beliefs"][0]) + np_factor_1 = np.asarray(factored_belief_data["factored_beliefs"][1]) + np_beliefs = (np_factor_0, np_factor_1) + + result = prepare_activations( + factored_belief_data["inputs"], + np_beliefs, + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 2 + # Should be converted to JAX arrays + assert isinstance(result.belief_states[0], jnp.ndarray) + assert isinstance(result.belief_states[1], jnp.ndarray) + + def test_linear_regression_with_multiple_factors(self, factored_belief_data): + """LinearRegressionAnalysis with multi-factor tuple should regress to each factor separately.""" + analysis = LinearRegressionAnalysis() + + prepared = prepare_activations( + factored_belief_data["inputs"], + factored_belief_data["factored_beliefs"], + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + scalars, projections = analysis.analyze( + activations=prepared.activations, + belief_states=prepared.belief_states, + weights=prepared.weights, + ) + + # Should have separate metrics for each factor + # Format is: layer_name_factor_idx/metric_name + assert "layer_0_factor_0/r2" in scalars + assert "layer_0_factor_1/r2" in scalars + assert "layer_0_factor_0/rmse" in scalars + assert "layer_0_factor_1/rmse" in scalars + assert "layer_0_factor_0/mae" in scalars + assert "layer_0_factor_1/mae" in scalars + assert "layer_0_factor_0/dist" in scalars + assert "layer_0_factor_1/dist" in scalars + + assert "layer_1_factor_0/r2" in scalars + assert "layer_1_factor_1/r2" in scalars + + # Should have separate projections for each factor + assert "layer_0_factor_0/projected" in projections + assert "layer_0_factor_1/projected" in projections + assert "layer_1_factor_0/projected" in projections + assert "layer_1_factor_1/projected" in projections + + # Check projection shapes + batch_size = factored_belief_data["batch_size"] + assert projections["layer_0_factor_0/projected"].shape == (batch_size, factored_belief_data["factor_0_dim"]) + assert projections["layer_0_factor_1/projected"].shape == (batch_size, factored_belief_data["factor_1_dim"]) + + def test_linear_regression_svd_with_multiple_factors(self, factored_belief_data): + """LinearRegressionSVDAnalysis with multi-factor tuple should regress to each factor separately.""" + analysis = LinearRegressionSVDAnalysis(rcond_values=[1e-10]) + + prepared = prepare_activations( + factored_belief_data["inputs"], + factored_belief_data["factored_beliefs"], + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + scalars, projections = analysis.analyze( + activations=prepared.activations, + belief_states=prepared.belief_states, + weights=prepared.weights, + ) + + # Should have separate metrics for each factor including best_rcond + assert "layer_0_factor_0/r2" in scalars + assert "layer_0_factor_1/r2" in scalars + assert "layer_0_factor_0/best_rcond" in scalars + assert "layer_0_factor_1/best_rcond" in scalars + + # Should have separate projections for each factor + assert "layer_0_factor_0/projected" in projections + assert "layer_0_factor_1/projected" in projections + + def test_tracker_with_factored_beliefs(self, factored_belief_data): + """ActivationTracker should work with tuple belief states.""" + tracker = ActivationTracker( + { + "regression": LinearRegressionAnalysis( + last_token_only=True, + concat_layers=False, + ), + "pca": PcaAnalysis( + n_components=2, + last_token_only=True, + concat_layers=False, + ), + } + ) + + scalars, projections, _ = tracker.analyze( + inputs=factored_belief_data["inputs"], + beliefs=factored_belief_data["factored_beliefs"], + probs=factored_belief_data["probs"], + activations=factored_belief_data["activations"], + ) + + # Regression should have per-factor metrics + assert "regression/layer_0_factor_0/r2" in scalars + assert "regression/layer_0_factor_1/r2" in scalars + + # PCA should still work (doesn't use belief states) + assert "pca/layer_0_variance_explained" in scalars + + # Projections should be present + assert "regression/layer_0_factor_0/projected" in projections + assert "regression/layer_0_factor_1/projected" in projections + assert "pca/layer_0_pca" in projections + + def test_single_factor_tuple(self, synthetic_data): + """Test with a single-factor tuple (edge case).""" + # Create single-factor tuple + single_factor = (synthetic_data["beliefs"],) + + result = prepare_activations( + synthetic_data["inputs"], + single_factor, + synthetic_data["probs"], + synthetic_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 1 + assert result.belief_states[0].shape == (synthetic_data["batch_size"], synthetic_data["belief_dim"]) + + def test_linear_regression_single_factor_tuple_behaves_like_non_tuple(self, synthetic_data): + """LinearRegressionAnalysis with single-factor tuple should behave like non-tuple (no factor keys).""" + single_factor = (synthetic_data["beliefs"],) + analysis = LinearRegressionAnalysis() + + prepared = prepare_activations( + synthetic_data["inputs"], + single_factor, + synthetic_data["probs"], + synthetic_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + scalars, projections = analysis.analyze( + activations=prepared.activations, + belief_states=prepared.belief_states, + weights=prepared.weights, + ) + + # Should have simple keys without "factor_" prefix + assert "layer_0_r2" in scalars + assert "layer_0_rmse" in scalars + assert "layer_0_projected" in projections + + # Should NOT have factor keys + assert "layer_0_factor_0/r2" not in scalars + assert "layer_0_factor_0/projected" not in projections + + def test_linear_regression_concat_belief_states(self, factored_belief_data): + """LinearRegressionAnalysis with concat_belief_states=True should return both factor and concat results.""" + analysis = LinearRegressionAnalysis(concat_belief_states=True) + + prepared = prepare_activations( + factored_belief_data["inputs"], + factored_belief_data["factored_beliefs"], + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + scalars, projections = analysis.analyze( + activations=prepared.activations, + belief_states=prepared.belief_states, + weights=prepared.weights, + ) + + # Should have per-factor results + assert "layer_0_factor_0/r2" in scalars + assert "layer_0_factor_1/r2" in scalars + assert "layer_0_factor_0/projected" in projections + assert "layer_0_factor_1/projected" in projections + + # Should ALSO have concatenated results + assert "layer_0_concat/r2" in scalars + assert "layer_0_concat/rmse" in scalars + assert "layer_0_concat/projected" in projections + + # Check concatenated projection shape (should be sum of factor dimensions) + batch_size = factored_belief_data["batch_size"] + total_dim = factored_belief_data["factor_0_dim"] + factored_belief_data["factor_1_dim"] + assert projections["layer_0_concat/projected"].shape == (batch_size, total_dim) + + def test_three_factor_tuple(self, factored_belief_data): + """Test with three factors to ensure generalization.""" + batch_size = factored_belief_data["batch_size"] + seq_len = factored_belief_data["seq_len"] + + # Add a third factor + factor_0 = jnp.ones((batch_size, seq_len, 3)) * 0.3 + factor_1 = jnp.ones((batch_size, seq_len, 2)) * 0.5 + factor_2 = jnp.ones((batch_size, seq_len, 4)) * 0.7 + three_factor_beliefs = (factor_0, factor_1, factor_2) + + result = prepare_activations( + factored_belief_data["inputs"], + three_factor_beliefs, + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + assert result.belief_states is not None + assert isinstance(result.belief_states, tuple) + assert len(result.belief_states) == 3 + assert result.belief_states[0].shape == (batch_size, 3) + assert result.belief_states[1].shape == (batch_size, 2) + assert result.belief_states[2].shape == (batch_size, 4) + + class TestScalarSeriesMapping: """Tests for scalar_series dataframe construction.""" diff --git a/tests/activations/test_dataframe_integration.py b/tests/activations/test_dataframe_integration.py index 4ca363fc..1fa8727d 100644 --- a/tests/activations/test_dataframe_integration.py +++ b/tests/activations/test_dataframe_integration.py @@ -15,7 +15,7 @@ ActivationVisualizationFieldRef, CombinedMappingSection, ) -from simplexity.analysis.linear_regression import layer_linear_regression_svd +from simplexity.analysis.linear_regression import layer_linear_regression from simplexity.exceptions import ConfigValidationError @@ -339,8 +339,8 @@ def test_linear_regression_projections_match_beliefs(self): beliefs_softmax = beliefs_softmax / beliefs_softmax.sum(axis=2, keepdims=True) belief_states = tuple(jnp.array(beliefs_softmax[:, f, :]) for f in range(n_factors)) - scalars, projections = layer_linear_regression_svd( - jnp.array(ds), jnp.ones(n_samples) / n_samples, belief_states, to_factors=True + scalars, projections = layer_linear_regression( + jnp.array(ds), jnp.ones(n_samples) / n_samples, belief_states, use_svd=True ) for f in range(n_factors): diff --git a/tests/analysis/test_layerwise_analysis.py b/tests/analysis/test_layerwise_analysis.py index c0d1a839..5b2c9dbd 100644 --- a/tests/analysis/test_layerwise_analysis.py +++ b/tests/analysis/test_layerwise_analysis.py @@ -1,5 +1,12 @@ """Tests for the LayerwiseAnalysis orchestrator.""" +# pylint: disable=all # Temporarily disable all pylint checkers during AST traversal to prevent crash. +# The imports checker crashes when resolving simplexity package imports due to a bug +# in pylint/astroid: https://github.com/pylint-dev/pylint/issues/10185 +# pylint: enable=all # Re-enable all pylint checkers for the checking phase. This allows other checks +# (code quality, style, undefined names, etc.) to run normally while bypassing +# the problematic imports checker that would crash during AST traversal. + import jax.numpy as jnp import pytest @@ -9,6 +16,7 @@ @pytest.fixture def analysis_inputs() -> tuple[dict[str, jnp.ndarray], jnp.ndarray, jnp.ndarray]: """Provides sample activations, weights, and belief states for analysis tests.""" + activations = { "layer_a": jnp.array([[1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]), "layer_b": jnp.array([[2.0, 1.0], [1.0, 2.0], [0.0, 1.0]]), @@ -20,6 +28,7 @@ def analysis_inputs() -> tuple[dict[str, jnp.ndarray], jnp.ndarray, jnp.ndarray] def test_layerwise_analysis_linear_regression_namespacing(analysis_inputs) -> None: """Metrics and projections should be namespace-qualified per layer.""" + activations, weights, belief_states = analysis_inputs analysis = LayerwiseAnalysis("linear_regression", last_token_only=True) @@ -30,11 +39,19 @@ def test_layerwise_analysis_linear_regression_namespacing(analysis_inputs) -> No ) assert set(scalars) >= {"layer_a_r2", "layer_b_r2"} - assert set(projections) == {"layer_a_projected", "layer_b_projected"} + assert set(projections) == { + "layer_a_projected", + "layer_b_projected", + "layer_a_coeffs", + "layer_b_coeffs", + "layer_a_intercept", + "layer_b_intercept", + } def test_layerwise_analysis_requires_targets(analysis_inputs) -> None: """Analyses that need belief states should validate input.""" + activations, weights, _ = analysis_inputs analysis = LayerwiseAnalysis("linear_regression") @@ -44,12 +61,14 @@ def test_layerwise_analysis_requires_targets(analysis_inputs) -> None: def test_invalid_analysis_type_raises() -> None: """Unknown analysis types should raise clear errors.""" + with pytest.raises(ValueError, match="Unknown analysis_type"): LayerwiseAnalysis("unknown") def test_invalid_kwargs_validation() -> None: """Validator rejects unsupported kwargs for a registered analysis.""" + with pytest.raises(ValueError, match="Unexpected linear_regression kwargs"): LayerwiseAnalysis( "linear_regression", @@ -59,6 +78,7 @@ def test_invalid_kwargs_validation() -> None: def test_pca_analysis_does_not_require_beliefs(analysis_inputs) -> None: """PCA analysis should run without belief states and namespace results.""" + activations, weights, _ = analysis_inputs analysis = LayerwiseAnalysis( "pca", @@ -76,6 +96,7 @@ def test_pca_analysis_does_not_require_beliefs(analysis_inputs) -> None: def test_invalid_pca_kwargs() -> None: """Invalid PCA kwargs should raise helpful errors.""" + with pytest.raises(ValueError, match="n_components must be positive"): LayerwiseAnalysis( "pca", @@ -85,6 +106,7 @@ def test_invalid_pca_kwargs() -> None: def test_linear_regression_svd_kwargs_validation_errors() -> None: """SVD-specific validators should reject unsupported inputs.""" + with pytest.raises(TypeError, match="rcond_values must be a sequence"): LayerwiseAnalysis( "linear_regression_svd", @@ -100,7 +122,8 @@ def test_linear_regression_svd_kwargs_validation_errors() -> None: def test_linear_regression_svd_rejects_unexpected_kwargs() -> None: """Unexpected SVD kwargs should raise clear errors.""" - with pytest.raises(ValueError, match="Unexpected linear_regression_svd kwargs"): + + with pytest.raises(ValueError, match="Unexpected linear_regression kwargs"): LayerwiseAnalysis( "linear_regression_svd", analysis_kwargs={"bad": True}, @@ -109,6 +132,7 @@ def test_linear_regression_svd_rejects_unexpected_kwargs() -> None: def test_linear_regression_svd_kwargs_are_normalized() -> None: """Validator should coerce mixed numeric types to floats.""" + validator = ANALYSIS_REGISTRY["linear_regression_svd"].validator params = validator({"rcond_values": [1, 1e-3]}) @@ -117,6 +141,7 @@ def test_linear_regression_svd_kwargs_are_normalized() -> None: def test_pca_kwargs_require_int_components() -> None: """PCA validator should enforce integral n_components.""" + with pytest.raises(TypeError, match="n_components must be an int or None"): LayerwiseAnalysis( "pca", @@ -126,6 +151,7 @@ def test_pca_kwargs_require_int_components() -> None: def test_pca_kwargs_require_sequence_thresholds() -> None: """Variance thresholds must be sequences with valid ranges.""" + with pytest.raises(TypeError, match="variance_thresholds must be a sequence"): LayerwiseAnalysis( "pca", @@ -141,6 +167,7 @@ def test_pca_kwargs_require_sequence_thresholds() -> None: def test_pca_rejects_unexpected_kwargs() -> None: """Unexpected PCA kwargs should surface informative errors.""" + with pytest.raises(ValueError, match="Unexpected pca kwargs"): LayerwiseAnalysis( "pca", @@ -150,6 +177,7 @@ def test_pca_rejects_unexpected_kwargs() -> None: def test_layerwise_analysis_property_accessors() -> None: """Constructor flags should surface via property accessors.""" + analysis = LayerwiseAnalysis( "pca", last_token_only=True, @@ -161,3 +189,103 @@ def test_layerwise_analysis_property_accessors() -> None: assert analysis.concat_layers assert not analysis.use_probs_as_weights assert not analysis.requires_belief_states + + +def test_linear_regression_accepts_concat_belief_states() -> None: + """linear_regression validator should accept concat_belief_states parameter.""" + + validator = ANALYSIS_REGISTRY["linear_regression"].validator + params = validator({"fit_intercept": False, "concat_belief_states": True}) + + assert params["fit_intercept"] is False + assert params["concat_belief_states"] is True + + +def test_linear_regression_svd_accepts_concat_belief_states() -> None: + """linear_regression_svd validator should accept concat_belief_states parameter.""" + + validator = ANALYSIS_REGISTRY["linear_regression_svd"].validator + params = validator({"fit_intercept": True, "concat_belief_states": True, "rcond_values": [1e-3]}) + + assert params["fit_intercept"] is True + assert params["concat_belief_states"] is True + assert params["rcond_values"] == (0.001,) + + +def test_linear_regression_concat_belief_states_defaults_false() -> None: + """concat_belief_states should default to False when not provided.""" + + validator = ANALYSIS_REGISTRY["linear_regression"].validator + params = validator({"fit_intercept": True}) + + assert params["concat_belief_states"] is False + + +def test_linear_regression_accepts_compute_subspace_orthogonality() -> None: + """linear_regression validator should accept compute_subspace_orthogonality parameter.""" + + validator = ANALYSIS_REGISTRY["linear_regression"].validator + params = validator({"fit_intercept": True, "compute_subspace_orthogonality": True}) + + assert params["fit_intercept"] is True + assert params["compute_subspace_orthogonality"] is True + + +def test_linear_regression_svd_accepts_compute_subspace_orthogonality() -> None: + """linear_regression_svd validator should accept compute_subspace_orthogonality parameter.""" + + validator = ANALYSIS_REGISTRY["linear_regression_svd"].validator + params = validator({"fit_intercept": True, "compute_subspace_orthogonality": True, "rcond_values": [1e-3]}) + + assert params["fit_intercept"] is True + assert params["compute_subspace_orthogonality"] is True + assert params["rcond_values"] == (0.001,) + + +def test_linear_regression_svd_rejects_false_use_svd() -> None: + """linear_regression_svd validator should reject explicit use_svd parameter since it's bound in partial.""" + + validator = ANALYSIS_REGISTRY["linear_regression_svd"].validator + + validator({"use_svd": True}) + + with pytest.raises(ValueError, match="use_svd cannot be set to False for linear_regression_svd"): + validator({"use_svd": False}) + + +def test_linear_regression_svd_excludes_use_svd_from_output() -> None: + """linear_regression_svd validator should not include use_svd in resolved kwargs.""" + + validator = ANALYSIS_REGISTRY["linear_regression_svd"].validator + params = validator({"rcond_values": [1e-3]}) + + # use_svd should not be in the output since it's already bound in the partial + assert "use_svd" not in params + assert params["rcond_values"] == (0.001,) + + +def test_linear_regression_compute_subspace_orthogonality_defaults_false() -> None: + """compute_subspace_orthogonality should default to False when not provided.""" + + validator = ANALYSIS_REGISTRY["linear_regression"].validator + params = validator({"fit_intercept": True}) + + assert params["compute_subspace_orthogonality"] is False + + +def test_linear_regression_accepts_use_svd() -> None: + """linear_regression validator should accept use_svd parameter.""" + + validator = ANALYSIS_REGISTRY["linear_regression"].validator + params = validator({"use_svd": True}) + + assert params["use_svd"] is True + + +def test_linear_regression_use_svd_defaults_false() -> None: + """use_svd should default to False when not provided.""" + + validator = ANALYSIS_REGISTRY["linear_regression"].validator + params = validator({}) + + assert params["use_svd"] is False diff --git a/tests/analysis/test_linear_regression.py b/tests/analysis/test_linear_regression.py index f6bfe084..c32766c1 100644 --- a/tests/analysis/test_linear_regression.py +++ b/tests/analysis/test_linear_regression.py @@ -1,29 +1,66 @@ """Tests for reusable linear regression helpers.""" +# pylint: disable=all # Temporarily disable all pylint checkers during AST traversal to prevent crash. +# The imports checker crashes when resolving simplexity package imports due to a bug +# in pylint/astroid: https://github.com/pylint-dev/pylint/issues/10185 +# pylint: enable=all # Re-enable all pylint checkers for the checking phase. This allows other checks +# (code quality, style, undefined names, etc.) to run normally while bypassing +# the problematic imports checker that would crash during AST traversal. + +# pylint: disable=too-many-lines +# pylint: disable=too-many-locals + import chex +import jax import jax.numpy as jnp import pytest from simplexity.analysis.linear_regression import ( + get_robust_basis, layer_linear_regression, - layer_linear_regression_svd, linear_regression, linear_regression_svd, ) +def _compute_orthogonality_threshold( + x: jax.Array, + *factors: jax.Array, + safety_factor: int = 10, +) -> float: + """Compute principled threshold for near-zero orthogonality checks. + + Threshold is based on machine precision scaled by problem dimensions. + For orthogonality via QR + SVD, typical numerical error is O(ε·n) where + ε is machine epsilon and n is the maximum relevant dimension. + + Args: + x: Input features array (used for dtype and dimension) + *factors: Factor arrays being compared (used for output dimensions) + safety_factor: Multiplicative safety factor (default 10) + + Returns: + Threshold value for considering singular values as effectively zero + """ + eps = jnp.finfo(x.dtype).eps + n_features = x.shape[1] + factor_dims = [f.shape[1] for f in factors] + max_dim = max(n_features, *factor_dims) + return float(max_dim * eps * safety_factor) + + def test_linear_regression_perfect_fit() -> None: """Verify weighted least squares recovers a perfect linear relation.""" x = jnp.arange(6.0).reshape(-1, 1) y = 3.0 * x + 2.0 weights = jnp.ones(x.shape[0]) - scalars, projections = linear_regression(x, y, weights) + scalars, arrays = linear_regression(x, y, weights) assert pytest.approx(1.0) == scalars["r2"] assert pytest.approx(0.0, abs=1e-5) == scalars["rmse"] assert pytest.approx(0.0, abs=1e-5) == scalars["mae"] - chex.assert_trees_all_close(projections["projected"], y) + chex.assert_trees_all_close(arrays["projected"], y) def test_linear_regression_svd_selects_best_rcond() -> None: @@ -32,7 +69,7 @@ def test_linear_regression_svd_selects_best_rcond() -> None: y = jnp.sum(x, axis=1, keepdims=True) weights = jnp.array([0.1, 0.2, 0.3, 0.4]) - scalars, projections = linear_regression_svd( + scalars, arrays = linear_regression_svd( x, y, weights, @@ -40,7 +77,7 @@ def test_linear_regression_svd_selects_best_rcond() -> None: ) assert scalars["best_rcond"] in {1e-6, 1e-4, 1e-2} - chex.assert_trees_all_close(projections["projected"], y) + chex.assert_trees_all_close(arrays["projected"], y) def test_layer_regression_requires_targets() -> None: @@ -51,9 +88,6 @@ def test_layer_regression_requires_targets() -> None: with pytest.raises(ValueError, match="requires belief_states"): layer_linear_regression(x, weights, None) - with pytest.raises(ValueError, match="requires belief_states"): - layer_linear_regression_svd(x, weights, None) - def test_linear_regression_rejects_mismatched_weights() -> None: """Weights must align with the sample dimension.""" @@ -90,10 +124,10 @@ def test_linear_regression_without_intercept_uses_uniform_weights() -> None: x = jnp.arange(1.0, 4.0)[:, None] y = 2.0 * x - scalars, projections = linear_regression(x, y, None, fit_intercept=False) + scalars, arrays = linear_regression(x, y, None, fit_intercept=False) assert pytest.approx(1.0) == scalars["r2"] - chex.assert_trees_all_close(projections["projected"], y) + chex.assert_trees_all_close(arrays["projected"], y) def test_linear_regression_svd_handles_empty_features() -> None: @@ -102,10 +136,10 @@ def test_linear_regression_svd_handles_empty_features() -> None: y = jnp.arange(3.0)[:, None] weights = jnp.ones(3) - scalars, projections = linear_regression_svd(x, y, weights, fit_intercept=False) + scalars, arrays = linear_regression_svd(x, y, weights, fit_intercept=False) assert scalars["best_rcond"] == pytest.approx(1e-15) - chex.assert_trees_all_close(projections["projected"], jnp.zeros_like(y)) + chex.assert_trees_all_close(arrays["projected"], jnp.zeros_like(y)) def test_linear_regression_accepts_one_dimensional_inputs() -> None: @@ -114,10 +148,10 @@ def test_linear_regression_accepts_one_dimensional_inputs() -> None: y = 5.0 * x + 1.0 weights = jnp.ones_like(x) - scalars, projections = linear_regression(x, y, weights) + scalars, arrays = linear_regression(x, y, weights) assert pytest.approx(1.0) == scalars["r2"] - chex.assert_trees_all_close(projections["projected"], y[:, None]) + chex.assert_trees_all_close(arrays["projected"], y[:, None]) def test_linear_regression_rejects_high_rank_inputs() -> None: @@ -168,25 +202,26 @@ def test_linear_regression_svd_falls_back_to_default_rcond() -> None: assert scalars["best_rcond"] == pytest.approx(1e-15) -def test_layer_linear_regression_svd_runs_end_to_end() -> None: +def test_layer_linear_regression_runs_end_to_end() -> None: """Layer helper should proxy through to the base implementation.""" x = jnp.arange(6.0).reshape(3, 2) weights = jnp.ones(3) / 3.0 beliefs = 2.0 * x.sum(axis=1, keepdims=True) - scalars, projections = layer_linear_regression_svd( + scalars, arrays = layer_linear_regression( x, weights, beliefs, + use_svd=True, rcond_values=[1e-3], ) assert pytest.approx(1.0, abs=1e-6) == scalars["r2"] - chex.assert_trees_all_close(projections["projected"], beliefs) + chex.assert_trees_all_close(arrays["projected"], beliefs) -def test_layer_linear_regression_to_factors_basic() -> None: - """Layer regression with to_factors should regress to each factor separately.""" +def test_layer_linear_regression_belief_states_tuple_default() -> None: + """By default, layer regression should regress to each factor separately if given a tuple of belief states.""" x = jnp.arange(12.0).reshape(4, 3) # 4 samples, 3 features weights = jnp.ones(4) / 4.0 @@ -195,11 +230,10 @@ def test_layer_linear_regression_to_factors_basic() -> None: factor_1 = jnp.array([[0.2, 0.3, 0.5], [0.1, 0.6, 0.3], [0.4, 0.4, 0.2], [0.3, 0.3, 0.4]]) # [4, 3] factored_beliefs = (factor_0, factor_1) - scalars, projections = layer_linear_regression( + scalars, arrays = layer_linear_regression( x, weights, factored_beliefs, - to_factors=True, ) # Should have separate metrics for each factor @@ -213,16 +247,28 @@ def test_layer_linear_regression_to_factors_basic() -> None: assert "factor_1/dist" in scalars # Should have separate projections for each factor - assert "factor_0/projected" in projections - assert "factor_1/projected" in projections + assert "factor_0/projected" in arrays + assert "factor_1/projected" in arrays + + # Should have separate parameters for each factor + assert "factor_0/coeffs" in arrays + assert "factor_1/coeffs" in arrays + + # Should have separate intercepts for each factor by default + assert "factor_0/intercept" in arrays + assert "factor_1/intercept" in arrays # Check shapes - assert projections["factor_0/projected"].shape == factor_0.shape - assert projections["factor_1/projected"].shape == factor_1.shape + assert arrays["factor_0/projected"].shape == factor_0.shape + assert arrays["factor_1/projected"].shape == factor_1.shape + assert arrays["factor_0/coeffs"].shape == (x.shape[1], factor_0.shape[1]) + assert arrays["factor_1/coeffs"].shape == (x.shape[1], factor_1.shape[1]) + assert arrays["factor_0/intercept"].shape == (1, factor_0.shape[1]) + assert arrays["factor_1/intercept"].shape == (1, factor_1.shape[1]) -def test_layer_linear_regression_svd_to_factors_basic() -> None: - """Layer regression SVD with to_factors should regress to each factor separately.""" +def test_layer_linear_regression_svd_belief_states_tuple_default() -> None: + """By default, layer regression SVD should regress to each factor separately if given a tuple of belief states.""" x = jnp.arange(12.0).reshape(4, 3) # 4 samples, 3 features weights = jnp.ones(4) / 4.0 @@ -231,31 +277,45 @@ def test_layer_linear_regression_svd_to_factors_basic() -> None: factor_1 = jnp.array([[0.2, 0.3, 0.5], [0.1, 0.6, 0.3], [0.4, 0.4, 0.2], [0.3, 0.3, 0.4]]) # [4, 3] factored_beliefs = (factor_0, factor_1) - scalars, projections = layer_linear_regression_svd( + scalars, arrays = layer_linear_regression( x, weights, factored_beliefs, - to_factors=True, + use_svd=True, rcond_values=[1e-6], ) - # Should have separate metrics for each factor including best_rcond - assert "factor_0/r2" in scalars - assert "factor_1/r2" in scalars - assert "factor_0/best_rcond" in scalars - assert "factor_1/best_rcond" in scalars + # Should have ALL regression metrics for each factor including best_rcond + for factor in [0, 1]: + assert f"factor_{factor}/r2" in scalars + assert f"factor_{factor}/rmse" in scalars + assert f"factor_{factor}/mae" in scalars + assert f"factor_{factor}/dist" in scalars + assert f"factor_{factor}/best_rcond" in scalars # Should have separate projections for each factor - assert "factor_0/projected" in projections - assert "factor_1/projected" in projections + assert "factor_0/projected" in arrays + assert "factor_1/projected" in arrays + + # Should have separate coefficients for each factor + assert "factor_0/coeffs" in arrays + assert "factor_1/coeffs" in arrays + + # Should have separate intercepts for each factor by default + assert "factor_0/intercept" in arrays + assert "factor_1/intercept" in arrays # Check shapes - assert projections["factor_0/projected"].shape == factor_0.shape - assert projections["factor_1/projected"].shape == factor_1.shape + assert arrays["factor_0/projected"].shape == factor_0.shape + assert arrays["factor_1/projected"].shape == factor_1.shape + assert arrays["factor_0/coeffs"].shape == (x.shape[1], factor_0.shape[1]) + assert arrays["factor_1/coeffs"].shape == (x.shape[1], factor_1.shape[1]) + assert arrays["factor_0/intercept"].shape == (1, factor_0.shape[1]) + assert arrays["factor_1/intercept"].shape == (1, factor_1.shape[1]) -def test_layer_linear_regression_to_factors_single_factor() -> None: - """to_factors=True should work with a single factor tuple.""" +def test_layer_linear_regression_belief_states_tuple_single_factor() -> None: + """Single-element tuple should behave the same as passing a single array.""" x = jnp.arange(9.0).reshape(3, 3) weights = jnp.ones(3) / 3.0 @@ -263,126 +323,891 @@ def test_layer_linear_regression_to_factors_single_factor() -> None: factor_0 = jnp.array([[0.3, 0.7], [0.5, 0.5], [0.8, 0.2]]) factored_beliefs = (factor_0,) - scalars, projections = layer_linear_regression( + scalars, arrays = layer_linear_regression( x, weights, factored_beliefs, - to_factors=True, ) - # Should have metrics for single factor - assert "factor_0/r2" in scalars - assert "factor_0/projected" in projections - assert projections["factor_0/projected"].shape == factor_0.shape + # Should have same structure as non-tuple case + assert "r2" in scalars + assert "rmse" in scalars + assert "mae" in scalars + assert "dist" in scalars + assert "projected" in arrays + assert "coeffs" in arrays + assert "intercept" in arrays + # Verify it matches non-tuple behavior + scalars_non_tuple, arrays_non_tuple = layer_linear_regression(x, weights, factor_0) -def test_layer_linear_regression_to_factors_requires_tuple() -> None: - """to_factors=True requires belief_states to be a tuple.""" - x = jnp.ones((3, 2)) - weights = jnp.ones(3) / 3.0 - beliefs_array = jnp.ones((3, 2)) + chex.assert_trees_all_close(scalars, scalars_non_tuple) + chex.assert_trees_all_close(arrays, arrays_non_tuple) - with pytest.raises(ValueError, match="belief_states must be a tuple when to_factors is True"): - layer_linear_regression(x, weights, beliefs_array, to_factors=True) - with pytest.raises(ValueError, match="belief_states must be a tuple when to_factors is True"): - layer_linear_regression_svd(x, weights, beliefs_array, to_factors=True) +def test_orthogonality_with_orthogonal_subspaces() -> None: + """Orthogonal factors constructed explicitly should have near-zero overlap.""" + # Create truly orthogonal coefficient matrices by construction + n_samples, n_features = 100, 6 + key = jax.random.PRNGKey(0) + x = jax.random.normal(key, (n_samples, n_features)) -def test_layer_linear_regression_to_factors_validates_tuple_contents() -> None: - """to_factors=True requires all elements in tuple to be jax.Arrays.""" - x = jnp.ones((3, 2)) - weights = jnp.ones(3) / 3.0 + # Define orthogonal coefficient matrices + # w_0 uses first 3 features, w_1 uses last 3 features + w_0 = jnp.array([[1.0, 0.0], [0.0, 1.0], [1.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]]) # (6, 2) + w_1 = jnp.array([[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]) # (6, 2) - # Invalid: tuple contains non-array - invalid_beliefs = (jnp.ones((3, 2)), "not an array") # type: ignore + # Generate factors using orthogonal subspaces (no intercept for simplicity) + factor_0 = x @ w_0 # (100, 2) + factor_1 = x @ w_1 # (100, 2) + factored_beliefs = (factor_0, factor_1) + weights = jnp.ones(n_samples) / n_samples - with pytest.raises(ValueError, match="Each factor in belief_states must be a jax.Array"): - layer_linear_regression(x, weights, invalid_beliefs, to_factors=True) # type: ignore + scalars, arrays = layer_linear_regression( + x, + weights, + factored_beliefs, + compute_subspace_orthogonality=True, + fit_intercept=False, # No intercept for cleaner test + ) - with pytest.raises(ValueError, match="Each factor in belief_states must be a jax.Array"): - layer_linear_regression_svd(x, weights, invalid_beliefs, to_factors=True) # type: ignore + # Should have standard factor metrics with perfect fit + assert scalars["factor_0/r2"] > 0.99 # Should fit nearly perfectly + assert scalars["factor_1/r2"] > 0.99 + + # Should have ALL orthogonality metrics + assert "orthogonality_0_1/subspace_overlap" in scalars + assert "orthogonality_0_1/max_singular_value" in scalars + assert "orthogonality_0_1/min_singular_value" in scalars + assert "orthogonality_0_1/participation_ratio" in scalars + assert "orthogonality_0_1/entropy" in scalars + assert "orthogonality_0_1/effective_rank" in scalars + + # Compute principled threshold based on machine precision and problem size + threshold = _compute_orthogonality_threshold(x, factor_0, factor_1) + + # Should indicate near-zero overlap (orthogonal by construction) + assert scalars["orthogonality_0_1/subspace_overlap"] < threshold + assert scalars["orthogonality_0_1/max_singular_value"] < threshold + + # Should have singular values in arrays + assert "orthogonality_0_1/singular_values" in arrays + # Both factors have 2 dimensions, so min(2, 2) = 2 singular values + assert arrays["orthogonality_0_1/singular_values"].shape[0] == 2 + # All singular values should be near zero (orthogonal) + assert jnp.all(arrays["orthogonality_0_1/singular_values"] < threshold) + + +def test_orthogonality_with_aligned_subspaces() -> None: + """Aligned factors with identical column spaces should have high overlap.""" + + # Create truly aligned coefficient matrices by construction + n_samples, n_features = 100, 6 + key = jax.random.PRNGKey(0) + x = jax.random.normal(key, (n_samples, n_features)) + + # Define aligned coefficient matrices - w_1 = w_0 @ A for invertible A + # This ensures span(w_1) = span(w_0) + w_0 = jnp.array([[1.0, 0.0], [0.0, 1.0], [1.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]]) # (6, 2) + w_1 = jnp.array([[0.5, 1.0], [1.0, 0.5], [1.5, 1.5], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]]) # (6, 2) + + # Generate factors using aligned subspaces (no intercept for simplicity) + factor_0 = x @ w_0 # (100, 2) + factor_1 = x @ w_1 # (100, 2) + factored_beliefs = (factor_0, factor_1) + weights = jnp.ones(n_samples) / n_samples + scalars, arrays = layer_linear_regression( + x, + weights, + factored_beliefs, + compute_subspace_orthogonality=True, + fit_intercept=False, # No intercept for cleaner test + ) -def test_layer_linear_regression_to_factors_false_works() -> None: - """to_factors=False requires belief_states to be a single array, not a tuple.""" - x = jnp.ones((3, 2)) - weights = jnp.ones(3) / 3.0 + # Should have standard factor metrics with perfect fit + assert scalars["factor_0/r2"] > 0.99 # Should fit nearly perfectly + assert scalars["factor_1/r2"] > 0.99 + + # Should have ALL orthogonality metrics + assert "orthogonality_0_1/subspace_overlap" in scalars + assert "orthogonality_0_1/max_singular_value" in scalars + assert "orthogonality_0_1/min_singular_value" in scalars + assert "orthogonality_0_1/participation_ratio" in scalars + assert "orthogonality_0_1/entropy" in scalars + assert "orthogonality_0_1/effective_rank" in scalars + + # Should indicate high overlap (aligned by construction) + assert scalars["orthogonality_0_1/subspace_overlap"] > 0.99 + assert scalars["orthogonality_0_1/max_singular_value"] > 0.99 + + # Should have singular values in arrays + assert "orthogonality_0_1/singular_values" in arrays + # Both factors have 2 dimensions, so min(2, 2) = 2 singular values + assert arrays["orthogonality_0_1/singular_values"].shape[0] == 2 + # All singular values should be near 1.0 (perfectly aligned) + assert jnp.all(arrays["orthogonality_0_1/singular_values"] > 0.99) + + +def test_orthogonality_with_three_factors() -> None: + """Three factors should produce all pairwise orthogonality combinations.""" + + # Create three mutually orthogonal coefficient matrices + n_samples, n_features = 100, 6 + key = jax.random.PRNGKey(0) + x = jax.random.normal(key, (n_samples, n_features)) + + # Define three orthogonal coefficient matrices using disjoint features + w_0 = jnp.array([[1.0, 0.0], [0.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]]) # Uses features 0-1 + w_1 = jnp.array([[0.0, 0.0], [0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [0.0, 0.0], [0.0, 0.0]]) # Uses features 2-3 + w_2 = jnp.array([[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [1.0, 0.0], [0.0, 1.0]]) # Uses features 4-5 + + # Generate factors using orthogonal subspaces + factor_0 = x @ w_0 # (100, 2) + factor_1 = x @ w_1 # (100, 2) + factor_2 = x @ w_2 # (100, 2) + factored_beliefs = (factor_0, factor_1, factor_2) + weights = jnp.ones(n_samples) / n_samples + + scalars, arrays = layer_linear_regression( + x, + weights, + factored_beliefs, + compute_subspace_orthogonality=True, + fit_intercept=False, + ) - # Invalid: tuple when to_factors=False - factored_beliefs = (jnp.ones((3, 2)), jnp.ones((3, 3))) + # Should have standard factor metrics for all three factors + assert scalars["factor_0/r2"] > 0.99 + assert scalars["factor_1/r2"] > 0.99 + assert scalars["factor_2/r2"] > 0.99 - scalars, projections = layer_linear_regression(x, weights, factored_beliefs, to_factors=False) - assert "r2" in scalars - assert "projected" in projections - assert projections["projected"].shape == (3, 5) + # Compute principled threshold based on machine precision and problem size + threshold = _compute_orthogonality_threshold(x, factor_0, factor_1, factor_2) + + # Should have ALL three pairwise orthogonality combinations + pairwise_keys = ["orthogonality_0_1", "orthogonality_0_2", "orthogonality_1_2"] + for pair_key in pairwise_keys: + assert f"{pair_key}/subspace_overlap" in scalars + assert f"{pair_key}/max_singular_value" in scalars + assert f"{pair_key}/min_singular_value" in scalars + assert f"{pair_key}/participation_ratio" in scalars + assert f"{pair_key}/entropy" in scalars + assert f"{pair_key}/effective_rank" in scalars + assert f"{pair_key}/singular_values" in arrays + + # All pairs should be orthogonal (near-zero overlap) + overlap = scalars[f"{pair_key}/subspace_overlap"] + assert overlap < threshold, f"{pair_key} subspace_overlap={overlap} >= threshold={threshold}" + + max_sv = scalars[f"{pair_key}/max_singular_value"] + assert max_sv < threshold, f"{pair_key} max_singular_value={max_sv} >= threshold={threshold}" + + # Each pair has 2D subspaces, so 2 singular values + assert arrays[f"{pair_key}/singular_values"].shape[0] == 2 + svs = arrays[f"{pair_key}/singular_values"] + assert jnp.all(svs < threshold), f"{pair_key} singular_values={svs} not all < threshold={threshold}" + + +def test_orthogonality_not_computed_by_default() -> None: + """Orthogonality metrics should not be computed when compute_subspace_orthogonality=False.""" - scalars, projections = layer_linear_regression_svd(x, weights, factored_beliefs, to_factors=False) + # Setup two-factor regression + n_samples, n_features = 50, 4 + key = jax.random.PRNGKey(0) + x = jax.random.normal(key, (n_samples, n_features)) + + w_0 = jnp.array([[1.0, 0.0], [0.0, 1.0], [0.0, 0.0], [0.0, 0.0]]) + w_1 = jnp.array([[0.0, 0.0], [0.0, 0.0], [1.0, 0.0], [0.0, 1.0]]) + + factor_0 = x @ w_0 + factor_1 = x @ w_1 + factored_beliefs = (factor_0, factor_1) + weights = jnp.ones(n_samples) / n_samples + + # Run WITHOUT compute_subspace_orthogonality (default is False) + scalars, arrays = layer_linear_regression( + x, + weights, + factored_beliefs, + fit_intercept=False, + ) + + # Should have standard factor metrics + assert "factor_0/r2" in scalars + assert "factor_1/r2" in scalars + + # Should NOT have any orthogonality metrics + orthogonality_keys = [ + "orthogonality_0_1/subspace_overlap", + "orthogonality_0_1/max_singular_value", + "orthogonality_0_1/min_singular_value", + "orthogonality_0_1/participation_ratio", + "orthogonality_0_1/entropy", + "orthogonality_0_1/effective_rank", + ] + for key in orthogonality_keys: + assert key not in scalars + + # Should NOT have orthogonality singular values in arrays + assert "orthogonality_0_1/singular_values" not in arrays + + +def test_orthogonality_warning_for_single_belief_state(caplog: pytest.LogCaptureFixture) -> None: + """Should warn when requesting orthogonality with a single belief state.""" + + # Setup single-factor regression + n_samples, n_features = 30, 4 + key = jax.random.PRNGKey(0) + x = jax.random.normal(key, (n_samples, n_features)) + belief_state = jax.random.normal(key, (n_samples, 2)) + weights = jnp.ones(n_samples) / n_samples + + # Request orthogonality with single belief state (not a tuple) + with caplog.at_level("WARNING"): + scalars, arrays = layer_linear_regression( + x, + weights, + belief_state, + compute_subspace_orthogonality=True, + fit_intercept=False, + ) + + # Should have logged a warning + assert "Subspace orthogonality requires multiple factors." in caplog.text + + # Should still run regression successfully assert "r2" in scalars - assert "projected" in projections - assert projections["projected"].shape == (3, 5) + assert "projected" in arrays + # Should NOT have orthogonality metrics + assert "orthogonality_0_1/subspace_overlap" not in scalars + assert "orthogonality_0_1/singular_values" not in arrays -def test_factored_regression_perfect_linear_fit() -> None: - """Test factored regression with perfectly linear targets achieves perfect fit. - Uses targets that are exact linear combinations of features to verify - the regression machinery works correctly for the factored case. - """ - # 5 samples, 4 features - x = jnp.array( +def test_use_svd_flag_equivalence() -> None: + """layer_linear_regression with use_svd=True should match layer_linear_regression_svd.""" + + n_samples, n_features = 40, 4 + key = jax.random.PRNGKey(0) + x = jax.random.normal(key, (n_samples, n_features)) + + # Test with single belief state + belief_state = jax.random.normal(key, (n_samples, 3)) + weights = jnp.ones(n_samples) / n_samples + rcond_values = [1e-6, 1e-4] + + # Method 1: use_svd=True + scalars_flag, arrays_flag = layer_linear_regression( + x, + weights, + belief_state, + use_svd=True, + rcond_values=rcond_values, + ) + + # Method 2: layer_linear_regression_svd + scalars_wrapper, arrays_wrapper = layer_linear_regression( + x, + weights, + belief_state, + use_svd=True, + rcond_values=rcond_values, + ) + + # Should produce identical results + assert scalars_flag.keys() == scalars_wrapper.keys() + for key, value in scalars_flag.items(): + assert value == pytest.approx(scalars_wrapper[key]) + + assert arrays_flag.keys() == arrays_wrapper.keys() + for key, value in arrays_flag.items(): + chex.assert_trees_all_close(value, arrays_wrapper[key]) + + # Test with factored belief states + w_0 = jnp.array([[1.0, 0.0], [0.0, 1.0], [0.0, 0.0], [0.0, 0.0]]) + w_1 = jnp.array([[0.0, 0.0], [0.0, 0.0], [1.0, 0.0], [0.0, 1.0]]) + factor_0 = x @ w_0 + factor_1 = x @ w_1 + factored_beliefs = (factor_0, factor_1) + + # Method 1: use_svd=True with factored beliefs + scalars_flag_fact, arrays_flag_fact = layer_linear_regression( + x, + weights, + factored_beliefs, + use_svd=True, + rcond_values=rcond_values, + ) + + # Method 2: layer_linear_regression_svd with factored beliefs + scalars_wrapper_fact, arrays_wrapper_fact = layer_linear_regression( + x, + weights, + factored_beliefs, + use_svd=True, + rcond_values=rcond_values, + ) + + # Should produce identical results + assert scalars_flag_fact.keys() == scalars_wrapper_fact.keys() + for key, value in scalars_flag_fact.items(): + assert value == pytest.approx(scalars_wrapper_fact[key]) + + assert arrays_flag_fact.keys() == arrays_wrapper_fact.keys() + for key, value in arrays_flag_fact.items(): + chex.assert_trees_all_close(value, arrays_wrapper_fact[key]) + + +def test_use_svd_with_orthogonality() -> None: + """SVD regression should work with orthogonality computation.""" + + n_samples, n_features = 80, 6 + key = jax.random.PRNGKey(0) + x = jax.random.normal(key, (n_samples, n_features)) + + # Create orthogonal coefficient matrices + w_0 = jnp.array([[1.0, 0.0], [0.0, 1.0], [1.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]]) + w_1 = jnp.array([[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]) + + factor_0 = x @ w_0 + factor_1 = x @ w_1 + factored_beliefs = (factor_0, factor_1) + weights = jnp.ones(n_samples) / n_samples + + # Run SVD regression with orthogonality computation + scalars, arrays = layer_linear_regression( + x, + weights, + factored_beliefs, + use_svd=True, + compute_subspace_orthogonality=True, + rcond_values=[1e-6], + fit_intercept=False, + ) + + # Should have standard factor metrics with SVD + assert "factor_0/r2" in scalars + assert "factor_1/r2" in scalars + assert "factor_0/best_rcond" in scalars + assert "factor_1/best_rcond" in scalars + + # Should have orthogonality metrics + assert "orthogonality_0_1/subspace_overlap" in scalars + assert "orthogonality_0_1/max_singular_value" in scalars + assert "orthogonality_0_1/singular_values" in arrays + + # Compute principled threshold + threshold = _compute_orthogonality_threshold(x, factor_0, factor_1) + + # Should indicate near-zero overlap (orthogonal by construction) + assert scalars["orthogonality_0_1/subspace_overlap"] < threshold + assert scalars["orthogonality_0_1/max_singular_value"] < threshold + + # Should have good regression fit + assert scalars["factor_0/r2"] > 0.99 + assert scalars["factor_1/r2"] > 0.99 + + +def test_orthogonality_with_different_subspace_dimensions() -> None: + """Orthogonality should work when factors have different output dimensions.""" + + n_samples, n_features = 100, 8 + key = jax.random.PRNGKey(0) + x = jax.random.normal(key, (n_samples, n_features)) + + # Create orthogonal coefficient matrices with different output dimensions + # factor_0 has 2 output dimensions, factor_1 has 5 output dimensions + w_0 = jnp.array( + [ + [1.0, 0.0], + [0.0, 1.0], + [1.0, 1.0], + [0.0, 0.0], + [0.0, 0.0], + [0.0, 0.0], + [0.0, 0.0], + [0.0, 0.0], + ] + ) # (8, 2) + w_1 = jnp.array( [ - [1.0, 2.0, 3.0, 4.0], - [2.0, 3.0, 4.0, 5.0], - [3.0, 4.0, 5.0, 6.0], - [4.0, 5.0, 6.0, 7.0], - [5.0, 6.0, 7.0, 8.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + [1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 1.0], ] + ) # (8, 5) + + factor_0 = x @ w_0 # (100, 2) + factor_1 = x @ w_1 # (100, 5) + factored_beliefs = (factor_0, factor_1) + weights = jnp.ones(n_samples) / n_samples + + scalars, arrays = layer_linear_regression( + x, + weights, + factored_beliefs, + compute_subspace_orthogonality=True, + fit_intercept=False, + ) + + # Should have standard factor metrics + assert scalars["factor_0/r2"] > 0.99 + assert scalars["factor_1/r2"] > 0.99 + + # Should have orthogonality metrics + assert "orthogonality_0_1/subspace_overlap" in scalars + assert "orthogonality_0_1/max_singular_value" in scalars + assert "orthogonality_0_1/singular_values" in arrays + + # Compute principled threshold + threshold = _compute_orthogonality_threshold(x, factor_0, factor_1) + + # Should indicate near-zero overlap (orthogonal by construction) + assert scalars["orthogonality_0_1/subspace_overlap"] < threshold + assert scalars["orthogonality_0_1/max_singular_value"] < threshold + + # Singular values shape should be min(2, 5) = 2 + assert arrays["orthogonality_0_1/singular_values"].shape[0] == 2 + assert jnp.all(arrays["orthogonality_0_1/singular_values"] < threshold) + + +def test_orthogonality_with_contained_subspace() -> None: + """Smaller subspace fully contained in larger subspace should show high alignment.""" + + n_samples, n_features = 100, 8 + key = jax.random.PRNGKey(0) + x = jax.random.normal(key, (n_samples, n_features)) + + # Create coefficient matrices where factor_0's subspace is contained in factor_1's + # factor_0: 2D subspace using features [0, 1] + # factor_1: 3D subspace using features [0, 1, 2] (contains factor_0's space) + w_0 = jnp.array( + [ + [1.0, 0.0], + [0.0, 1.0], + [0.0, 0.0], + [0.0, 0.0], + [0.0, 0.0], + [0.0, 0.0], + [0.0, 0.0], + [0.0, 0.0], + ] + ) # (8, 2) + w_1 = jnp.array( + [ + [1.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [0.0, 0.0, 1.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + ] + ) # (8, 3) + + factor_0 = x @ w_0 # (100, 2) + factor_1 = x @ w_1 # (100, 3) + factored_beliefs = (factor_0, factor_1) + weights = jnp.ones(n_samples) / n_samples + + scalars, arrays = layer_linear_regression( + x, + weights, + factored_beliefs, + compute_subspace_orthogonality=True, + fit_intercept=False, + ) + + # Should have standard factor metrics + assert scalars["factor_0/r2"] > 0.99 + assert scalars["factor_1/r2"] > 0.99 + + # Should have orthogonality metrics + assert "orthogonality_0_1/subspace_overlap" in scalars + assert "orthogonality_0_1/max_singular_value" in scalars + assert "orthogonality_0_1/singular_values" in arrays + + # Singular values shape should be min(2, 3) = 2 + assert arrays["orthogonality_0_1/singular_values"].shape[0] == 2 + + # Since factor_0's subspace is contained in factor_1's, singular values should be near 1.0 + # (indicating perfect alignment in the 2D shared subspace) + assert scalars["orthogonality_0_1/subspace_overlap"] > 0.99 + assert scalars["orthogonality_0_1/max_singular_value"] > 0.99 + assert scalars["orthogonality_0_1/min_singular_value"] > 0.99 + assert jnp.all(arrays["orthogonality_0_1/singular_values"] > 0.99) + + +def test_orthogonality_excludes_intercept() -> None: + """Orthogonality should be computed using only coefficients, not intercept.""" + + n_samples, n_features = 100, 6 + key = jax.random.PRNGKey(0) + x = jax.random.normal(key, (n_samples, n_features)) + + # Create orthogonal coefficient matrices + w_0 = jnp.array([[1.0, 0.0], [0.0, 1.0], [1.0, 1.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]]) + w_1 = jnp.array([[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]) + + # Add different intercepts to the factors + intercept_0 = jnp.array([[5.0, -3.0]]) + intercept_1 = jnp.array([[10.0, 7.0]]) + + factor_0 = x @ w_0 + intercept_0 # (100, 2) + factor_1 = x @ w_1 + intercept_1 # (100, 2) + factored_beliefs = (factor_0, factor_1) + weights = jnp.ones(n_samples) / n_samples + + # Run with fit_intercept=True + scalars, arrays = layer_linear_regression( + x, + weights, + factored_beliefs, + compute_subspace_orthogonality=True, + fit_intercept=True, ) - weights = jnp.ones(5) / 5.0 - # Factor 0: 3 states, exact linear combination (with intercept) - # y0 = [x0 + 1, x1 + 2, x2 + 3] - factor_0 = jnp.stack([x[:, 0] + 1, x[:, 1] + 2, x[:, 2] + 3], axis=1) + # Should have intercepts for both factors + assert "factor_0/intercept" in arrays + assert "factor_1/intercept" in arrays + + # Should have good regression fit + assert scalars["factor_0/r2"] > 0.99 + assert scalars["factor_1/r2"] > 0.99 - # Factor 1: 2 states, exact linear combination - # y1 = [x1, x3] - factor_1 = jnp.stack([x[:, 1], x[:, 3]], axis=1) + # Orthogonality should still be near-zero (computed from coefficients only, not intercepts) + threshold = _compute_orthogonality_threshold(x, factor_0, factor_1) - scalars, projections = layer_linear_regression(x, weights, (factor_0, factor_1), to_factors=True) + assert "orthogonality_0_1/subspace_overlap" in scalars + assert "orthogonality_0_1/max_singular_value" in scalars - # Should achieve perfect R² since targets are exact linear combinations - assert scalars["factor_0/r2"] > 0.99, f"factor_0 R² too low: {scalars['factor_0/r2']}" - assert scalars["factor_1/r2"] > 0.99, f"factor_1 R² too low: {scalars['factor_1/r2']}" + overlap = scalars["orthogonality_0_1/subspace_overlap"] + assert overlap < threshold, f"subspace_overlap={overlap} >= threshold={threshold}" - # Projections should match targets very closely - chex.assert_trees_all_close(projections["factor_0/projected"], factor_0, atol=1e-4) - chex.assert_trees_all_close(projections["factor_1/projected"], factor_1, atol=1e-4) + max_sv = scalars["orthogonality_0_1/max_singular_value"] + assert max_sv < threshold, f"max_singular_value={max_sv} >= threshold={threshold}" + # The different intercepts should not affect orthogonality + svs = arrays["orthogonality_0_1/singular_values"] + assert jnp.all(svs < threshold), f"singular_values={svs} not all < threshold={threshold}" -def test_factored_regression_different_state_counts() -> None: - """Test factored regression with factors having different numbers of states. - This reproduces a scenario where factors have different dimensionality, - which is common in factored generative processes. +def test_linear_regression_constant_targets_r2_and_dist() -> None: + """Constant targets should yield r2==0, and dist matches weighted residual norm. + + With intercept: perfect fit to constant -> zero residuals but r2 fallback to 0.0. + Without intercept: nonzero residuals; verify `dist` against manual computation. """ - x = jnp.arange(24.0).reshape(6, 4) # 6 samples, 4 features - weights = jnp.ones(6) / 6.0 + x = jnp.arange(4.0)[:, None] + y = jnp.ones_like(x) * 3.0 + weights = jnp.array([0.1, 0.2, 0.3, 0.4]) - # Factor 0: 3 states (like "mess3") - factor_0_raw = x[:, :3] - factor_0 = factor_0_raw / factor_0_raw.sum(axis=1, keepdims=True) + # With intercept -> perfect constant fit, but r2 should fallback to 0.0 when variance is zero + scalars, _ = linear_regression(x, y, weights) + assert scalars["r2"] == 0.0 + assert jnp.isclose(scalars["rmse"], 0.0, atol=1e-6, rtol=0.0).item() + assert jnp.isclose(scalars["mae"], 0.0, atol=1e-6, rtol=0.0).item() + assert jnp.isclose(scalars["dist"], 0.0, atol=1e-6, rtol=0.0).item() + + # Without intercept -> cannot fit a constant perfectly; r2 still 0.0, and dist should match manual computation + scalars_no_int, arrays_no_int = linear_regression(x, y, weights, fit_intercept=False) + assert scalars_no_int["r2"] == 0.0 + residuals = arrays_no_int["projected"] - y + per_sample = jnp.sqrt(jnp.sum(residuals**2, axis=1)) + expected_dist = float(jnp.sum(per_sample * weights)) + assert jnp.isclose(scalars_no_int["dist"], expected_dist, atol=1e-6, rtol=0.0).item() + + +def test_linear_regression_intercept_and_shapes_both_solvers() -> None: + """Validate intercept presence/absence and array shapes for both solvers.""" + n, d, t = 5, 3, 2 + x = jnp.arange(float(n * d)).reshape(n, d) + # Construct multi-target y with known linear relation and intercept + true_coeffs = jnp.array([[1.0, 2.0], [0.5, -1.0], [3.0, 0.0]]) # (d, t) + true_intercept = jnp.array([[0.7, -0.3]]) # (1, t) + y = x @ true_coeffs + true_intercept + weights = jnp.ones(n) / n + + # Standard solver, with intercept + _, arrays = linear_regression(x, y, weights, fit_intercept=True) + assert "projected" in arrays + assert "coeffs" in arrays + assert "intercept" in arrays + assert arrays["projected"].shape == (n, t) + assert arrays["coeffs"].shape == (d, t) + assert arrays["intercept"].shape == (1, t) + + # Standard solver, without intercept + _, arrays_no_int = linear_regression(x, y, weights, fit_intercept=False) + assert "projected" in arrays_no_int + assert "coeffs" in arrays_no_int + assert "intercept" not in arrays_no_int + assert arrays_no_int["projected"].shape == (n, t) + assert arrays_no_int["coeffs"].shape == (d, t) + + # SVD solver, with intercept + _, arrays_svd = linear_regression_svd(x, y, weights, fit_intercept=True) + assert "projected" in arrays_svd + assert "coeffs" in arrays_svd + assert "intercept" in arrays_svd + assert arrays_svd["projected"].shape == (n, t) + assert arrays_svd["coeffs"].shape == (d, t) + assert arrays_svd["intercept"].shape == (1, t) + + # SVD solver, without intercept + _, arrays_svd_no_int = linear_regression_svd(x, y, weights, fit_intercept=False) + assert "projected" in arrays_svd_no_int + assert "coeffs" in arrays_svd_no_int + assert "intercept" not in arrays_svd_no_int + assert arrays_svd_no_int["projected"].shape == (n, t) + assert arrays_svd_no_int["coeffs"].shape == (d, t) + + +def test_layer_linear_regression_concat_vs_separate_equivalence() -> None: + """Concat and separate factor regressions should yield identical per-factor arrays.""" + n, d = 6, 3 + x = jnp.arange(float(n * d)).reshape(n, d) + # Two factors with different output dims + w_0 = jnp.array([[1.0, 0.5], [0.0, -1.0], [2.0, 1.0]]) # (d, 2) + b0 = jnp.array([[0.3, -0.2]]) # (1, 2) + factor_0 = x @ w_0 + b0 + + w_1 = jnp.array([[0.2, 0.0, -0.5], [1.0, 1.0, 0.0], [-1.0, 0.5, 0.3]]) # (d, 3) + b1 = jnp.array([[0.1, 0.2, -0.1]]) # (1, 3) + factor_1 = x @ w_1 + b1 - # Factor 1: 2 states (like "tom quantum") - factor_1_raw = x[:, :2] - factor_1 = factor_1_raw / factor_1_raw.sum(axis=1, keepdims=True) + factored_beliefs = (factor_0, factor_1) + weights = jnp.array([0.05, 0.10, 0.15, 0.20, 0.25, 0.25]) - scalars, projections = layer_linear_regression(x, weights, (factor_0, factor_1), to_factors=True) + # Separate per-factor regression + _, arrays_sep = layer_linear_regression( + x, + weights, + factored_beliefs, + concat_belief_states=False, + ) - # Verify shapes are correct - assert projections["factor_0/projected"].shape == (6, 3) - assert projections["factor_1/projected"].shape == (6, 2) + # Concatenated regression with splitting + _, arrays_cat = layer_linear_regression( + x, + weights, + factored_beliefs, + concat_belief_states=True, + ) + + # Concat path should also provide combined arrays + assert "concat/projected" in arrays_cat + assert "concat/coeffs" in arrays_cat + assert "concat/intercept" in arrays_cat + + # Per-factor arrays should match between separate and concatenated flows + for k in ["projected", "coeffs", "intercept"]: + chex.assert_trees_all_close(arrays_sep[f"factor_0/{k}"], arrays_cat[f"factor_0/{k}"]) + chex.assert_trees_all_close(arrays_sep[f"factor_1/{k}"], arrays_cat[f"factor_1/{k}"]) + + +def test_layer_linear_regression_svd_concat_vs_separate_equivalence_best_rcond() -> None: + """SVD regression: concat-split vs separate produce identical per-factor arrays. + + If belief concatenation is enabled, we only report rcond for the concatenated fit as "concat/best_rcond". + If belief concatenation is disabled, we report rcond for each factor as "factor_k/best_rcond". + """ + n, d = 6, 3 + x = jnp.arange(float(n * d)).reshape(n, d) + # Two factors with different output dims + w_0 = jnp.array([[1.0, 0.5], [0.0, -1.0], [2.0, 1.0]]) # (d, 2) + b0 = jnp.array([[0.3, -0.2]]) # (1, 2) + factor_0 = x @ w_0 + b0 + + w_1 = jnp.array([[0.2, 0.0, -0.5], [1.0, 1.0, 0.0], [-1.0, 0.5, 0.3]]) # (d, 3) + b1 = jnp.array([[0.1, 0.2, -0.1]]) # (1, 3) + factor_1 = x @ w_1 + b1 + + factored_beliefs = (factor_0, factor_1) + weights = jnp.array([0.05, 0.10, 0.15, 0.20, 0.25, 0.25]) + + # Separate per-factor SVD regression + scalars_sep, arrays_sep = layer_linear_regression( + x, + weights, + factored_beliefs, + concat_belief_states=False, + use_svd=True, + rcond_values=[1e-3], + ) + + # Concatenated SVD regression with splitting + scalars_cat, arrays_cat = layer_linear_regression( + x, + weights, + factored_beliefs, + concat_belief_states=True, + use_svd=True, + rcond_values=[1e-3], + ) - # Both should achieve reasonable fit - assert scalars["factor_0/r2"] > 0.5, f"factor_0 R² too low: {scalars['factor_0/r2']}" - assert scalars["factor_1/r2"] > 0.5, f"factor_1 R² too low: {scalars['factor_1/r2']}" + # Concat path should provide combined arrays and best_rcond + assert "concat/projected" in arrays_cat + assert "concat/coeffs" in arrays_cat + assert "concat/intercept" in arrays_cat + assert "concat/best_rcond" in scalars_cat + assert scalars_cat["concat/best_rcond"] == pytest.approx(1e-3) + + # Separate path should include per-factor best_rcond; concat-split path should not + assert "factor_0/best_rcond" in scalars_sep + assert "factor_1/best_rcond" in scalars_sep + assert "factor_0/best_rcond" not in scalars_cat + assert "factor_1/best_rcond" not in scalars_cat + + # Per-factor arrays should match between separate and concat-split flows + for k in ["projected", "coeffs", "intercept"]: + chex.assert_trees_all_close(arrays_sep[f"factor_0/{k}"], arrays_cat[f"factor_0/{k}"]) + chex.assert_trees_all_close(arrays_sep[f"factor_1/{k}"], arrays_cat[f"factor_1/{k}"]) + + # Overlapping scalar metrics should agree closely across flows + for metric in ["r2", "rmse", "mae", "dist"]: + assert jnp.isclose( + jnp.asarray(scalars_sep[f"factor_0/{metric}"]), + jnp.asarray(scalars_cat[f"factor_0/{metric}"]), + atol=1e-6, + rtol=0.0, + ).item() + assert jnp.isclose( + jnp.asarray(scalars_sep[f"factor_1/{metric}"]), + jnp.asarray(scalars_cat[f"factor_1/{metric}"]), + atol=1e-6, + rtol=0.0, + ).item() + + +def test_get_robust_basis_full_rank(): + """Full rank matrix should return all basis vectors.""" + # Create a full rank 5x3 matrix + key = jax.random.PRNGKey(42) + matrix = jax.random.normal(key, (5, 3)) + + basis = get_robust_basis(matrix) + + # Should return 3 basis vectors (all columns are linearly independent) + assert basis.shape == (5, 3) + + # Basis should be orthonormal + # Error in Gram matrix scales with: n_basis * eps + eps = jnp.finfo(basis.dtype).eps + tol = basis.shape[1] * eps + gram = basis.T @ basis + assert jnp.allclose(gram, jnp.eye(3), atol=tol) + + +def test_get_robust_basis_rank_deficient(): + """Rank deficient matrix should filter out zero singular value directions.""" + # Create a rank-2 matrix with 3 columns (third is linear combination) + col1 = jnp.array([[1.0], [0.0], [0.0], [0.0]]) + col2 = jnp.array([[0.0], [1.0], [0.0], [0.0]]) + col3 = 2.0 * col1 + 3.0 * col2 # Linear combination, rank deficient + matrix = jnp.hstack([col1, col2, col3]) + + basis = get_robust_basis(matrix) + + # Should return only 2 basis vectors (true rank is 2) + assert basis.shape[1] == 2 + + # Basis should be orthonormal + # Error in Gram matrix scales with: n_basis * eps + eps = jnp.finfo(basis.dtype).eps + tol = basis.shape[1] * eps + gram = basis.T @ basis + assert jnp.allclose(gram, jnp.eye(2), atol=tol) + + +def test_get_robust_basis_zero_matrix(): + """Zero matrix should return empty basis.""" + matrix = jnp.zeros((5, 3)) + basis = get_robust_basis(matrix) + + # Should return empty basis (no valid directions) + assert basis.shape == (5, 0) + + +def test_get_robust_basis_near_rank_deficient(): + """Matrix with very small singular value should filter it out.""" + # Create matrix with controlled singular values using SVD construction + key = jax.random.PRNGKey(123) + u = jax.random.normal(key, (6, 3)) + u, _ = jnp.linalg.qr(u) # Orthonormalize + + # Singular values: [10.0, 1.0, 1e-10] - last one is tiny + s = jnp.array([10.0, 1.0, 1e-10]) + v = jnp.eye(3) + + matrix = u @ jnp.diag(s) @ v + basis = get_robust_basis(matrix) + + # Should filter out the tiny singular value, keeping only 2 vectors + assert basis.shape[1] == 2 + + # Basis should be orthonormal + # Error in Gram matrix scales with: n_basis * eps + eps = jnp.finfo(basis.dtype).eps + tol = basis.shape[1] * eps + gram = basis.T @ basis + assert jnp.allclose(gram, jnp.eye(2), atol=tol) + + +def test_get_robust_basis_preserves_column_space(): + """Basis should span the same space as the original matrix's columns.""" + # Create a known rank-2 matrix + col1 = jnp.array([[1.0], [0.0], [0.0], [0.0]]) + col2 = jnp.array([[0.0], [1.0], [0.0], [0.0]]) + col3 = 2 * col1 + 3 * col2 # Linear combination + matrix = jnp.hstack([col1, col2, col3]) + + basis = get_robust_basis(matrix) + + # Basis should be rank 2 + assert basis.shape[1] == 2 + + # Compute principled tolerance based on matrix properties + # Error in projection scales with: max_dim * eps * max_singular_value + max_dim = max(matrix.shape) + eps = jnp.finfo(matrix.dtype).eps + max_sv = jnp.linalg.svd(matrix, compute_uv=False)[0] + tol = max_dim * eps * max_sv + + # Each original column should be expressible as linear combination of basis + for i in range(3): + col = matrix[:, i : i + 1] + # Project onto basis + projection = basis @ (basis.T @ col) + # Should be very close to original (within numerical tolerance) + assert jnp.allclose(projection, col, atol=tol) + + +def test_get_robust_basis_single_vector(): + """Single non-zero column should return normalized version.""" + vector = jnp.array([[3.0], [4.0], [0.0]]) + basis = get_robust_basis(vector) + + # Should return one basis vector + assert basis.shape == (3, 1) + + # Should be unit norm + # Error in norm computation scales with: dimension * eps + dim = vector.shape[0] + eps = jnp.finfo(vector.dtype).eps + norm_tol = dim * eps + assert jnp.allclose(jnp.linalg.norm(basis), 1.0, atol=norm_tol) + + # Should be parallel to input + # Error in dot product scales with: dimension * eps * magnitude + expected_norm = jnp.linalg.norm(vector) + parallel_tol = dim * eps * expected_norm + assert jnp.allclose(jnp.abs(basis.T @ vector), expected_norm, atol=parallel_tol) From 44f53ee5d4a7c1bfbc91d03f1792d5f4c4ab6aef Mon Sep 17 00:00:00 2001 From: ealt Date: Tue, 16 Dec 2025 16:40:43 -0800 Subject: [PATCH 06/35] Expose ability to compute subspace orthogonality in LinearRegressionAnalysis and LinearRegressionSVDAnalysis (#140) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Enhance PyTorch training with metric tracking and update configuration - Introduced `TrainingMetricTracker` for stateful metric tracking during PyTorch training, allowing for detailed monitoring of loss, learning rates, and parameter updates. - Updated `train_pytorch_model` to integrate the metric tracker, enabling automatic logging of training metrics. - Added new metrics to track cumulative and instantaneous values, including loss averages and parameter norms. - Modified `pyproject.toml` to include `reportUnnecessaryEllipsis` setting and added `diff-cover` as a development dependency. - Expanded the README with documentation on the new `TrainingMetricTracker` and its usage. - Added tests for the metric tracker to ensure accurate reporting of metrics during training. * pylint (#98) * add compatibility for factored states * concrete examples and alternating process * tweaks to vocab sizes * Refactor metric tracker * Update metrics * Add current loss metric enhancements - Introduced additional metrics for tracking loss: minimum loss, moving average (MA), and exponential moving average (EMA). - Updated the `compute` method to return these new metrics alongside the current loss. - Enhanced the distance from initialization metric to track the maximum distance encountered during training. * Fix bugs with metrics tracker * Fix loss metrics * update naming * Rename metric tracker * Refactor MetricTracker and metrics initialization - Removed initial_loss and optimal_loss parameters from MetricTracker constructor. - Introduced metric_kwargs to pass additional parameters for metrics initialization. - Updated the _initialize_context and _initialize_metrics methods to accommodate changes. - Enhanced CurrentLossMetric and LossProgressMetric to use kwargs for initialization, improving flexibility. * Refactor MetricTracker and MetricContext to unify named parameters handling - Renamed and consolidated handling of named parameters in MetricTracker and MetricContext. - Updated methods to use a single `named_parameters` attribute instead of separate current and previous parameters. - Adjusted metrics computations to reflect the new structure, ensuring consistency across metrics that rely on named parameters. * Refactor MetricTracker and MetricContext to use unified token count - Renamed `batch_tokens` and `total_tokens` to `num_tokens` in MetricContext and MetricTracker. - Updated metrics calculations in TokensMetric, LearningRateWeightedTokensMetric, and GradientWeightedTokensMetric to reflect the new naming convention. - Enhanced cumulative token tracking for improved clarity and consistency. * Refactor metrics to use update method and improve computation - Updated the `compute` method in various metrics to remove context dependency and introduced an `update` method for state management. - Enhanced metrics such as TokensMetric, LearningRateMetric, and GradientWeightedTokensMetric to maintain internal state for more efficient calculations. - Added new utility functions for L2 norm calculations across collections of tensors, improving performance and clarity in metric computations. * Refactor LossProgressMetric to separate update and compute methods - Introduced an `update` method to manage the current loss state, enhancing clarity and separation of concerns. - Updated the `compute` method to calculate progress based on the current loss, improving the metric's functionality. * Update TokensMetric to rename token metrics for clarity - Changed metric keys from "tokens/batch" and "tokens/total" to "tokens/raw" and "tokens/raw/cumulative" to better reflect their purpose and improve consistency in naming conventions. * Clear gradients and learning rates after metric computation in GradientWeightedTokensMetric and FisherInformationMetric for improved state management. * Refactor MetricTracker to enhance metric group handling and requirements management - Updated MetricTracker to initialize metric groups and requirement flags more efficiently. - Modified the update method to support group-specific requirements for learning rates, gradients, and named parameters. - Simplified the initialization of metrics by consolidating logic and improving clarity in the code structure. - Added `update_every_step` attribute to several metrics for better state management during updates. * Add logging for missing update keys in MetricTracker - Introduced logging to warn when required update keys are missing for metric groups. - Enhanced metric group handling by adding a method to identify missing update keys based on the `update_every_step` attribute. - Improved clarity in the metric initialization process by consolidating logic for required metrics. * Refactor L2 norm computation in metrics.py - Simplified the docstring for the _tensor_collection_l2_norms function to focus on its core functionality. - Removed unnecessary casting to CPU in the _named_tensor_distance function to streamline tensor operations. * Refactor metric computations to utilize new utility functions - Replaced internal L2 norm and distance calculations in metrics.py with calls to the newly defined tensor_collection_l2_norm and named_tensor_distance functions from pytorch_utils.py. - Updated docstrings for clarity and removed redundant comments to streamline the codebase. * Refactor MetricTracker and metrics protocol for improved clarity - Renamed the TrainingMetric protocol to Metric for better alignment with its purpose. - Updated the MetricTracker's _initialize_metrics method to utilize the new Metric protocol, enhancing type consistency and clarity in metric initialization. * Refactor metrics to utilize tensor_stack_l2_norm for improved efficiency - Replaced instances of tensor_collection_l2_norm with tensor_stack_l2_norm in various metrics for optimized L2 norm calculations. - Simplified the update and compute methods in GradientWeightedTokensMetric, CumulativeParameterUpdateMetric, and FisherInformationMetric to enhance state management and clarity. - Removed redundant internal functions for L2 norm and distance calculations, streamlining the codebase. * Remove metric tracker * refactor: Split `MetricTracker.update` into `step` and `update_metrics`, and optimize tensor operations in `named_tensor_distance`, gradient extraction, and parameter snapshots by removing CPU transfers and vectorizing calculations. * Add configs and metric tracker in run management * Simplify * Refactor metrics and tracker * Rename step group * Renames * Update metric tracker config validation * Make metric tracker context non-private * Get initial loss from context * Add metric tracker to e2e test * Remove example * Fix config name * Cahange dict to mapping to handle DictConfig * Fix bug in updating lr * Remove unused return value, simplify method call * Refactor metric naming conventions for consistency and clarity. Update metric keys to include context and step information, and rename CurrentLossMetric to LossMetric for better understanding. * Add loss progress to LossMetric * Refactor requirements formatting in metrics for improved readability and consistency * Enhance ParameterNormMetric to compute both parameter and weight norms, consolidating metrics into a single return statement. Remove WeightNormMetric class as its functionality is now integrated. * Rename keys, merge fisher proxy into grad weighted tokens * Update names * Enhance MetricTracker and LossMetric to support custom step values, improving flexibility in metric tracking and loss computation. * Remove step from context * Add eval metric tracker to training * Remove weights norm * Check if metric names is a list config * add instance to metric tracker keys * Disable databricks.sdk info logs * Configure devices to be the same * Reanme experiment/run names * Add tokens per second metrics * Detatch loss before converting to float * Create full training configs * Update uv.lock * ruff format * Avoid div by zero * lock * full merge, renaming * Fix training test * test factored representation * Fix device mismatch * Device mismatch pt 2 * finalise gen-process PR * update after merge * static analysis * static analysis tweaks * arg name * better test coverage * factor input args * ruff * better linting * bind i * elipsis to protocol * simplify protocol * format * hack to get training working again * Simplify components key * Change metrics returns * Update optimizer handling to log warnings for multiple optimizers and return None instead of the first optimizer. * Create tests for requirements * learning rates metric test * Tokens metric test * lr weighted tokens test * gradient weighted tokens test * parameter update test * Have loss progess approach zero instead of one * loss metric test * param norm test * parameter distance test * uv sync * Test pytorch utils * Create metric groups property * Create metric tracker tests * add xavier's leaky RRXOR (#130) * Update workflows to support dev branch ruleset standards * Update GitHub workflows to correctly reference pull request base branches in conditions * feat: Add `compute_subspace_orthogonality` option to `LinearRegressionAnalysis` and `LinearRegressionSVDAnalysis` to expose subspace metrics, along with corresponding tests. --------- Co-authored-by: Casper Lutzhoft Christensen Co-authored-by: Casper Lützhøft Christensen <61698286+casperlchristensen@users.noreply.github.com> --- simplexity/activations/activation_analyses.py | 14 +++++- tests/activations/test_activation_analysis.py | 45 +++++++++++++++++++ 2 files changed, 57 insertions(+), 2 deletions(-) diff --git a/simplexity/activations/activation_analyses.py b/simplexity/activations/activation_analyses.py index ce724574..a365093c 100644 --- a/simplexity/activations/activation_analyses.py +++ b/simplexity/activations/activation_analyses.py @@ -85,6 +85,7 @@ def __init__( skip_first_token: bool = False, fit_intercept: bool = True, concat_belief_states: bool = False, + compute_subspace_orthogonality: bool = False, ) -> None: super().__init__( analysis_type="linear_regression", @@ -92,7 +93,11 @@ def __init__( concat_layers=concat_layers, use_probs_as_weights=use_probs_as_weights, skip_first_token=skip_first_token, - analysis_kwargs={"fit_intercept": fit_intercept, "concat_belief_states": concat_belief_states}, + analysis_kwargs={ + "fit_intercept": fit_intercept, + "concat_belief_states": concat_belief_states, + "compute_subspace_orthogonality": compute_subspace_orthogonality, + }, ) @@ -109,8 +114,13 @@ def __init__( rcond_values: Sequence[float] | None = None, fit_intercept: bool = True, concat_belief_states: bool = False, + compute_subspace_orthogonality: bool = False, ) -> None: - analysis_kwargs: dict[str, Any] = {"fit_intercept": fit_intercept, "concat_belief_states": concat_belief_states} + analysis_kwargs: dict[str, Any] = { + "fit_intercept": fit_intercept, + "concat_belief_states": concat_belief_states, + "compute_subspace_orthogonality": compute_subspace_orthogonality, + } if rcond_values is not None: analysis_kwargs["rcond_values"] = tuple(rcond_values) super().__init__( diff --git a/tests/activations/test_activation_analysis.py b/tests/activations/test_activation_analysis.py index b2288716..305f3b37 100644 --- a/tests/activations/test_activation_analysis.py +++ b/tests/activations/test_activation_analysis.py @@ -1119,6 +1119,51 @@ def test_three_factor_tuple(self, factored_belief_data): assert result.belief_states[1].shape == (batch_size, 2) assert result.belief_states[2].shape == (batch_size, 4) + def test_compute_subspace_orthogonality(self, factored_belief_data): + """Test compute_subspace_orthogonality flag exposes metrics.""" + prepared = prepare_activations( + factored_belief_data["inputs"], + factored_belief_data["factored_beliefs"], + factored_belief_data["probs"], + factored_belief_data["activations"], + prepare_options=PrepareOptions( + last_token_only=True, + concat_layers=False, + use_probs_as_weights=False, + ), + ) + + # Standard Linear Regression + analysis = LinearRegressionAnalysis( + last_token_only=True, + compute_subspace_orthogonality=True, + ) + + scalars, projections = analysis.analyze( + activations=prepared.activations, + belief_states=prepared.belief_states, + weights=prepared.weights, + ) + + assert "layer_0_orthogonality_0_1/subspace_overlap" in scalars + assert "layer_0_orthogonality_0_1/max_singular_value" in scalars + assert "layer_0_orthogonality_0_1/participation_ratio" in scalars + assert "layer_0_orthogonality_0_1/effective_rank" in scalars + + # SVD Linear Regression + analysis_svd = LinearRegressionSVDAnalysis( + last_token_only=True, + compute_subspace_orthogonality=True, + ) + + scalars_svd, _ = analysis_svd.analyze( + activations=prepared.activations, + belief_states=prepared.belief_states, + weights=prepared.weights, + ) + + assert "layer_0_orthogonality_0_1/subspace_overlap" in scalars_svd + class TestScalarSeriesMapping: """Tests for scalar_series dataframe construction.""" From 6681393573627ffed344c4a8acb6cc07ca14634a Mon Sep 17 00:00:00 2001 From: Loren AC Date: Wed, 17 Dec 2025 12:20:04 -0500 Subject: [PATCH 07/35] Add CONTRIBUTING.md with PR requirements for dev and main (#138) --- CONTRIBUTING.md | 96 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) create mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..a1cd238c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,96 @@ +# Contributing Guidelines + +This document outlines requirements and best practices for contributing code to this repository. We use a two-tier review process with different standards for `dev` and `main` branches. + +## Branch Strategy + +- **`dev`**: Integration branch for ongoing work. Code here should be structurally sound and tested, but may still be evolving. +- **`main`**: Production-ready code. Higher bar for test coverage, implementation quality, and static analysis compliance. + +## Requirements by Target Branch + +### Merging into `dev` + +PRs targeting `dev` must meet the following criteria: + +**Testing** +- All existing tests must pass +- No regressions in functionality + +**Design** +- Interfaces and data structures should be well-considered and expected to remain stable +- Public APIs should be designed with future extensibility in mind +- Avoid patterns that will require breaking changes later + +**Static Analysis** +- Strive to pass formatting (`black`, `ruff format`, or equivalent) +- Strive to pass linting (`ruff`, `flake8`, or equivalent) +- Strive to pass type checking (`pyright`, `mypy`, or equivalent) +- Minor violations may be accepted with justification + +### Merging into `main` + +PRs targeting `main` must meet all `dev` requirements plus: + +**Testing** +- New tests required for new functionality +- Comprehensive coverage of edge cases and failure modes +- Coverage metrics should not decrease + +**Implementation** +- Code will be scrutinized for correctness, efficiency, and maintainability +- Algorithms and logic should be well-documented +- Error handling must be robust + +**Static Analysis** +- All checks must pass +- Any `# type: ignore`, `# noqa`, or equivalent suppressions require explicit justification in the PR description +- No new warnings or errors permitted + +## PR Process + +### Before Opening a PR + +1. Run the test suite locally: `pytest` +2. Run static checks: + ```bash + ruff format --check . + ruff check . + pyright + ``` +3. Ensure your branch is up to date with the target branch + +### PR Description + +Include the following in your PR description: + +- **Summary**: What does this change do? +- **Motivation**: Why is this change needed? +- **Testing**: How was this tested? +- **Breaking Changes**: Any breaking changes to public APIs? +- **Suppressions** (if any): Justification for any ignored static analysis rules + +### Review Criteria + +Reviewers will evaluate: + +| Criterion | `dev` | `main` | +|-----------|-------|--------| +| Existing tests pass | Required | Required | +| New test coverage | Encouraged | Required | +| Interface stability | Required | Required | +| Implementation quality | Reviewed | Scrutinized | +| Formatting | Should pass | Must pass | +| Linting | Should pass | Must pass | +| Type checking | Should pass | Must pass | + +## Style Guidelines + +- Follow existing code conventions in the repository +- Prefer explicit over implicit +- Write docstrings for public functions and classes +- Keep functions focused and composable + +## Questions? + +If you're unsure whether your contribution meets these standards, open a draft PR early to get feedback before investing too much time. \ No newline at end of file From 7a62e72118f906fc899f5baf69a13e771c1e3991 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Casper=20L=C3=BCtzh=C3=B8ft=20Christensen?= <61698286+casperlchristensen@users.noreply.github.com> Date: Wed, 17 Dec 2025 10:45:05 -0800 Subject: [PATCH 08/35] Fix/dropdown slider interaction (#143) * fix slider rendering * fix reference * update tests post bug-fix * static analysis --- .../learning_rate_scheduler.py | 4 +- simplexity/visualization/plotly_renderer.py | 242 ++++-------------- .../test_learning_rate_scheduler.py | 22 +- 3 files changed, 65 insertions(+), 203 deletions(-) diff --git a/simplexity/structured_configs/learning_rate_scheduler.py b/simplexity/structured_configs/learning_rate_scheduler.py index 01a9bb68..2fcffa43 100644 --- a/simplexity/structured_configs/learning_rate_scheduler.py +++ b/simplexity/structured_configs/learning_rate_scheduler.py @@ -79,7 +79,7 @@ def is_windowed_reduce_lr_on_plateau_config(cfg: DictConfig) -> bool: """Check if the configuration is a WindowedReduceLROnPlateau scheduler configuration.""" target = cfg.get("_target_", None) if isinstance(target, str): - return target == "simplexity.lr_schedulers.WindowedReduceLROnPlateau" + return target == "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau" return False @@ -105,7 +105,7 @@ def is_lr_scheduler_target(target: str) -> bool: """Check if the target is a supported learning rate scheduler target.""" return target in ( "torch.optim.lr_scheduler.ReduceLROnPlateau", - "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau", ) diff --git a/simplexity/visualization/plotly_renderer.py b/simplexity/visualization/plotly_renderer.py index 856bfaf7..1f54d9ac 100644 --- a/simplexity/visualization/plotly_renderer.py +++ b/simplexity/visualization/plotly_renderer.py @@ -226,56 +226,48 @@ def build_facet_traces(source_df: pd.DataFrame, show_legend: bool = True): if slider and layer_field: assert slider_field is not None - # Both slider and dropdown: complex case with frames per (layer, step) - # For simplicity, build frames for current layer only - initial_step = slider_values[0] if slider_values else None - initial_df = working_df - if initial_step is not None and slider_field in working_df.columns: - initial_df = working_df.loc[working_df[slider_field] == initial_step] + # Both slider and dropdown: build traces for ALL layers with visibility control + layer_independent = df.loc[df[layer_field] == "_no_layer_"] - traces_by_cell = build_facet_traces(initial_df) - for (row_idx, col_idx), traces in traces_by_cell.items(): - for trace in traces: - fig.add_trace(trace, row=row_idx, col=col_idx) + trace_ranges: list[tuple[int, int]] = [] + trace_count = 0 - # Build frames for slider animation + for layer_idx, layer_opt in enumerate(layer_options): + layer_specific = df.loc[(df[layer_field] != "_no_layer_") & (df[layer_field] == layer_opt)] + layer_df = pd.concat([layer_specific, layer_independent], ignore_index=True) + initial_df = layer_df.loc[layer_df[slider_field] == slider_values[0]] + + traces_by_cell = build_facet_traces(initial_df, show_legend=(layer_idx == 0)) + + start = trace_count + for (row_idx, col_idx), traces in sorted(traces_by_cell.items()): + for trace in traces: + trace.visible = layer_idx == 0 + fig.add_trace(trace, row=row_idx, col=col_idx) + trace_count += 1 + trace_ranges.append((start, trace_count)) + + # Build frames for ALL layers at each slider step frames = [] for step_val in slider_values: - step_filtered = working_df.loc[working_df[slider_field] == step_val] - frame_traces_by_cell = build_facet_traces(step_filtered, show_legend=False) frame_traces: list[Any] = [] - for row_idx, col_idx in sorted(frame_traces_by_cell.keys()): - frame_traces.extend(frame_traces_by_cell[(row_idx, col_idx)]) + for layer_opt in layer_options: + layer_specific = df.loc[(df[layer_field] != "_no_layer_") & (df[layer_field] == layer_opt)] + layer_df = pd.concat([layer_specific, layer_independent], ignore_index=True) + step_df = layer_df.loc[layer_df[slider_field] == step_val] + + traces_by_cell = build_facet_traces(step_df, show_legend=False) + for row_idx, col_idx in sorted(traces_by_cell.keys()): + frame_traces.extend(traces_by_cell[(row_idx, col_idx)]) + frames.append(go.Frame(name=str(step_val), data=frame_traces)) + fig.frames = frames _add_slider_layout(fig, slider_field, slider_values) - # Add layer dropdown + # Add layer dropdown using visibility toggling if len(layer_options) > 1: - _add_faceted_layer_dropdown( - fig, - df, - layer_field, - layer_options, - slider_field, - slider_values, - row_values, - col_values, - row_field, - col_field, - x_field, - y_field, - z_field, - color_field, - size_field_name, - hover_fields, - opacity_value, - color_specs, - layer, - has_z, - n_cols, - size_value, - ) + _add_layer_dropdown_menu(fig, layer_options, trace_ranges) elif slider: assert slider_field is not None @@ -304,36 +296,27 @@ def build_facet_traces(source_df: pd.DataFrame, show_legend: bool = True): elif dropdown and len(layer_options) > 1: assert layer_field is not None - # Dropdown only - traces_by_cell = build_facet_traces(working_df) - for (row_idx, col_idx), traces in traces_by_cell.items(): - for trace in traces: - fig.add_trace(trace, row=row_idx, col=col_idx) + # Dropdown only: build traces for ALL layers with visibility control + layer_independent = df.loc[df[layer_field] == "_no_layer_"] - _add_faceted_layer_dropdown( - fig, - df, - layer_field, - layer_options, - None, - [], - row_values, - col_values, - row_field, - col_field, - x_field, - y_field, - z_field, - color_field, - size_field_name, - hover_fields, - opacity_value, - color_specs, - layer, - has_z, - n_cols, - size_value, - ) + trace_ranges: list[tuple[int, int]] = [] + trace_count = 0 + + for layer_idx, layer_opt in enumerate(layer_options): + layer_specific = df.loc[(df[layer_field] != "_no_layer_") & (df[layer_field] == layer_opt)] + layer_df = pd.concat([layer_specific, layer_independent], ignore_index=True) + + traces_by_cell = build_facet_traces(layer_df, show_legend=(layer_idx == 0)) + + start = trace_count + for (row_idx, col_idx), traces in sorted(traces_by_cell.items()): + for trace in traces: + trace.visible = layer_idx == 0 + fig.add_trace(trace, row=row_idx, col=col_idx) + trace_count += 1 + trace_ranges.append((start, trace_count)) + + _add_layer_dropdown_menu(fig, layer_options, trace_ranges) else: # No controls traces_by_cell = build_facet_traces(working_df) @@ -393,127 +376,6 @@ def build_facet_traces(source_df: pd.DataFrame, show_legend: bool = True): return fig -def _add_faceted_layer_dropdown( - fig: go.Figure, - df: pd.DataFrame, - layer_field: str, - layer_options: list[Any], - slider_field: str | None, - slider_values: list[Any], - row_values: list[str | None], - col_values: list[str | None], - row_field: str | None, - col_field: str | None, - x_field: str, - y_field: str, - z_field: str | None, - color_field: str | None, - size_field: str | None, - hover_fields: list[str], - opacity_value: float | None, - color_specs: list[ColorGroupSpec], - layer: LayerConfig, - has_z: bool, - n_cols: int, - size_value: float | None = None, -) -> None: - """Add a layer dropdown menu that rebuilds traces for faceted figures.""" - # Get layer-independent rows (e.g., ground truth from belief states) - layer_independent = df.loc[df[layer_field] == "_no_layer_"] - - buttons = [] - for layer_opt in layer_options: - # Combine layer-specific rows with layer-independent rows - layer_specific_filtered = df.loc[(df[layer_field] != "_no_layer_") & (df[layer_field] == layer_opt)] - layer_df = pd.concat([layer_specific_filtered, layer_independent], ignore_index=True) - - # If there's a slider, filter to initial step - if slider_field and slider_values: - layer_df = layer_df.loc[layer_df[slider_field] == slider_values[0]] - - # Build traces for this layer - all_traces: list[Any] = [] - for row_idx, row_val in enumerate(row_values, start=1): - for col_idx, col_val in enumerate(col_values, start=1): - cell_df = layer_df.copy() - if row_field: - cell_df = cell_df.loc[cell_df[row_field].astype(str) == row_val] - if col_field: - cell_df = cell_df.loc[cell_df[col_field].astype(str) == col_val] - - if cell_df.empty: - continue - - if has_z: - assert z_field is not None - traces = _scatter3d_traces( - cell_df, - x_field, - y_field, - z_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - layer_name=layer.name, - size_value=size_value, - ) - scene_idx = (row_idx - 1) * n_cols + col_idx - scene_name = "scene" if scene_idx == 1 else f"scene{scene_idx}" - for trace in traces: - trace.scene = scene_name - trace.showlegend = False - else: - traces = _scatter2d_traces( - cell_df, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - layer_name=layer.name, - size_value=size_value, - ) - for trace in traces: - trace.showlegend = False - - all_traces.extend(traces) - - # Create button that replaces all trace data - button = { - "label": str(layer_opt), - "method": "restyle", - "args": [ - { - "x": [list(t.x) if hasattr(t, "x") else [] for t in all_traces], - "y": [list(t.y) if hasattr(t, "y") else [] for t in all_traces], - } - ], - } - if has_z: - button["args"][0]["z"] = [list(t.z) if hasattr(t, "z") else [] for t in all_traces] - - buttons.append(button) - - fig.update_layout( - updatemenus=[ - { - "buttons": buttons, - "direction": "down", - "showactive": True, - "x": 1.05, - "xanchor": "left", - "y": 1, - "yanchor": "top", - "pad": {"l": 10, "r": 10, "t": 0, "b": 0}, - } - ] - ) - - def _build_scatter3d(layer: LayerConfig, df: pd.DataFrame, controls: Any | None): aes = layer.aesthetics x_field = _require_field(aes.x, "x") diff --git a/tests/structured_configs/test_learning_rate_scheduler.py b/tests/structured_configs/test_learning_rate_scheduler.py index eb99324a..6fd6ee5c 100644 --- a/tests/structured_configs/test_learning_rate_scheduler.py +++ b/tests/structured_configs/test_learning_rate_scheduler.py @@ -38,7 +38,7 @@ class TestIsWindowedReduceLROnPlateauConfig: def test_is_windowed_reduce_lr_on_plateau_config(self): """Test that WindowedReduceLROnPlateau target is correctly identified.""" - cfg = OmegaConf.create({"_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau"}) + cfg = OmegaConf.create({"_target_": "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau"}) assert is_windowed_reduce_lr_on_plateau_config(cfg) is True def test_is_windowed_reduce_lr_on_plateau_config_wrong_target(self): @@ -62,7 +62,7 @@ def test_is_lr_scheduler_config_reduce_on_plateau(self): def test_is_lr_scheduler_config_windowed(self): """Test is_lr_scheduler_config with WindowedReduceLROnPlateau target.""" - cfg = OmegaConf.create({"_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau"}) + cfg = OmegaConf.create({"_target_": "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau"}) assert is_lr_scheduler_config(cfg) is True def test_is_lr_scheduler_config_other_scheduler(self): @@ -162,7 +162,7 @@ def test_valid_config(self): """Test validation passes with valid WindowedReduceLROnPlateau config.""" cfg = OmegaConf.create( { - "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "_target_": "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau", "window_size": 10, "update_every": 100, "mode": "min", @@ -180,7 +180,7 @@ def test_valid_max_mode(self): """Test validation passes with mode='max'.""" cfg = OmegaConf.create( { - "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "_target_": "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau", "mode": "max", } ) @@ -190,7 +190,7 @@ def test_invalid_mode(self): """Test validation fails with invalid mode.""" cfg = OmegaConf.create( { - "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "_target_": "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau", "mode": "invalid", } ) @@ -201,7 +201,7 @@ def test_invalid_window_size(self): """Test validation fails with zero window_size.""" cfg = OmegaConf.create( { - "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "_target_": "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau", "window_size": 0, } ) @@ -212,7 +212,7 @@ def test_invalid_update_every(self): """Test validation fails with zero update_every.""" cfg = OmegaConf.create( { - "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "_target_": "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau", "update_every": 0, } ) @@ -223,7 +223,7 @@ def test_invalid_factor(self): """Test validation fails with zero factor.""" cfg = OmegaConf.create( { - "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "_target_": "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau", "factor": 0.0, } ) @@ -234,7 +234,7 @@ def test_invalid_patience(self): """Test validation fails with negative patience.""" cfg = OmegaConf.create( { - "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "_target_": "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau", "patience": -1, } ) @@ -245,7 +245,7 @@ def test_invalid_cooldown(self): """Test validation fails with negative cooldown.""" cfg = OmegaConf.create( { - "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "_target_": "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau", "cooldown": -5, } ) @@ -273,7 +273,7 @@ def test_valid_windowed_reduce_lr_on_plateau(self): cfg = OmegaConf.create( { "instance": { - "_target_": "simplexity.lr_schedulers.WindowedReduceLROnPlateau", + "_target_": "simplexity.optimization.lr_schedulers.WindowedReduceLROnPlateau", "window_size": 10, "update_every": 100, "patience": 5, From 919adeed1f0468ec28f3619648fb303402298994 Mon Sep 17 00:00:00 2001 From: ealt Date: Wed, 17 Dec 2025 15:03:24 -0800 Subject: [PATCH 09/35] Automatically save log files at the end of managed runs (#142) --- simplexity/logger.py | 102 ++++ simplexity/run_management/run_management.py | 71 ++- simplexity/structured_configs/base.py | 10 +- simplexity/structured_configs/validation.py | 14 + tests/end_to_end/configs/logging.ini | 38 ++ .../configs/test_metric_tracker.yaml | 1 + tests/end_to_end/configs/training.yaml | 1 + .../end_to_end/configs/training_factored.yaml | 1 + tests/end_to_end/configs/training_test.yaml | 1 + tests/end_to_end/training.py | 1 + tests/structured_configs/test_base_config.py | 13 +- tests/test_logger.py | 465 +++++++++++++++++- 12 files changed, 710 insertions(+), 8 deletions(-) create mode 100644 tests/end_to_end/configs/logging.ini diff --git a/simplexity/logger.py b/simplexity/logger.py index 64f23371..a8578ba9 100644 --- a/simplexity/logger.py +++ b/simplexity/logger.py @@ -5,7 +5,10 @@ and creates a logger instance named "simplexity" for use throughout the package. """ +import contextlib import logging +from collections.abc import Iterable +from pathlib import Path # Configure Python's warnings system to be captured by the logging system. # This ensures that warnings issued by the warnings module are redirected to @@ -17,3 +20,102 @@ # This logger is used throughout the codebase for info, debug, warning, and error messages. # It can be imported and used directly: `from simplexity.logger import SIMPLEXITY_LOGGER` SIMPLEXITY_LOGGER: logging.Logger = logging.getLogger("simplexity") + + +def add_handlers_to_existing_loggers() -> None: + """Add root logger's handlers to existing loggers that don't propagate. + + This is useful for loggers created before fileConfig() that have propagate=0 + or otherwise don't inherit handlers from root. Most loggers propagate to root + by default, so they'll use root's handlers automatically. + + This function adds ALL handlers from root (not just file handlers) to ensure + consistency for loggers that need explicit handlers. + + **When this is useful:** + - Loggers with propagate=0 created before fileConfig() runs (they won't inherit + root's handlers automatically) + - Third-party loggers that disable propagation and were created during early imports + (e.g., jax._src.xla_bridge if it has propagate=0) + + **When it's NOT needed:** + - Most loggers propagate to root by default, so they automatically use root's handlers + - fileConfig() with disable_existing_loggers=False should update existing loggers + that are specified in the INI config + + **Recommendation:** + Test without calling this function first. If you find loggers that should be + logging to the file but aren't (especially those with propagate=0), then call + this function after configure_logging_from_file(). Otherwise, it may be unnecessary. + """ + root_logger = logging.getLogger() + if not root_logger.handlers: + return + + # Add all root handlers to loggers that don't propagate and don't already have them + for logger_name in logging.Logger.manager.loggerDict: + logger = logging.getLogger(logger_name) + # Skip root logger itself + if logger is root_logger: + continue + + # Only add handlers to loggers that don't propagate (they need their own handlers) + # or loggers that were created before fileConfig and might not have handlers + if not logger.propagate: + for handler in root_logger.handlers: + # Check if logger already has this exact handler object (by identity, not similarity) + # This allows loggers to have multiple handlers of the same type (e.g., multiple + # FileHandlers writing to different files), while preventing duplicate handler objects + if handler not in logger.handlers: + logger.addHandler(handler) + + +def get_log_files() -> list[str]: + """Get the log files from all loggers.""" + root_logger = logging.getLogger() + log_files = [handler.baseFilename for handler in root_logger.handlers if isinstance(handler, logging.FileHandler)] + for logger_name in logging.Logger.manager.loggerDict: + logger = logging.getLogger(logger_name) + log_files.extend( + [handler.baseFilename for handler in logger.handlers if isinstance(handler, logging.FileHandler)] + ) + return list(set(log_files)) + + +def remove_file_handlers(logger: logging.Logger, log_file: str | None = None) -> None: + """Remove the file handlers for the log file.""" + # Iterate over a copy because we mutate logger.handlers during removal. + for handler in list(logger.handlers): + if not isinstance(handler, logging.FileHandler): + continue + + if log_file is None or handler.baseFilename == log_file: + logger.removeHandler(handler) + # Close to release file descriptors (important on some platforms). + with contextlib.suppress(OSError, ValueError): + handler.close() + + +def remove_log_file(log_file: str | Path) -> None: + """Remove the log files.""" + root_logger = logging.getLogger() + remove_file_handlers(root_logger, str(log_file)) + for logger_name in logging.Logger.manager.loggerDict: + logger = logging.getLogger(logger_name) + remove_file_handlers(logger, str(log_file)) + try: + Path(log_file).unlink() + except FileNotFoundError: + SIMPLEXITY_LOGGER.debug("[logger] log file %s does not exist", log_file) + except IsADirectoryError: + SIMPLEXITY_LOGGER.warning("[logger] log file %s is a directory", log_file) + except PermissionError: + SIMPLEXITY_LOGGER.error("[logger] permission denied when removing log file %s", log_file) + + +def remove_log_files(log_files: Iterable[str] | None = None) -> None: + """Remove the log files.""" + if log_files is None: + log_files = get_log_files() + for log_file in log_files: + remove_log_file(log_file) diff --git a/simplexity/run_management/run_management.py b/simplexity/run_management/run_management.py index b59fab73..fdadbd12 100644 --- a/simplexity/run_management/run_management.py +++ b/simplexity/run_management/run_management.py @@ -14,12 +14,17 @@ # (code quality, style, undefined names, etc.) to run normally while bypassing # the problematic imports checker that would crash during AST traversal. +import configparser +import logging +import logging.config import os import random import subprocess +import traceback import warnings from collections.abc import Callable, Iterator from contextlib import contextmanager, nullcontext +from pathlib import Path from typing import Any import hydra @@ -27,11 +32,12 @@ import mlflow import torch from jax._src.config import StateContextManager +from mlflow.exceptions import MlflowException, RestException from omegaconf import DictConfig, OmegaConf from torch.nn import Module as PytorchModel from simplexity.generative_processes.generative_process import GenerativeProcess -from simplexity.logger import SIMPLEXITY_LOGGER +from simplexity.logger import SIMPLEXITY_LOGGER, add_handlers_to_existing_loggers, get_log_files, remove_log_files from simplexity.logging.logger import Logger from simplexity.logging.mlflow_logger import MLFlowLogger from simplexity.persistence.mlflow_persister import MLFlowPersister @@ -128,6 +134,30 @@ def _suppress_pydantic_field_attribute_warning() -> Iterator[None]: yield +def _setup_python_logging(cfg: DictConfig) -> None: + """Setup the logging.""" + logging_config_path = cfg.get("logging_config_path") + if not logging_config_path: + SIMPLEXITY_LOGGER.debug("[logging] config path not found") + return + config_path = Path(logging_config_path) + if not config_path.exists(): + SIMPLEXITY_LOGGER.warning("[Logging] config file not found: %s", config_path) + return + + try: + logging.config.fileConfig(str(config_path), disable_existing_loggers=False) + add_handlers_to_existing_loggers() + except (configparser.Error, ValueError, OSError) as e: + SIMPLEXITY_LOGGER.error( + "[logging] failed to load config from %s: %s\n%s", + config_path, + e, + "".join(traceback.format_exception(type(e), e, e.__traceback__)), + exc_info=True, + ) + + def _setup_environment() -> None: """Setup the environment.""" for key, value in DEFAULT_ENVIRONMENT_VARIABLES.items(): @@ -663,14 +693,40 @@ def _setup(cfg: DictConfig, strict: bool, verbose: bool) -> Components: return components +def _log_log_files(logger: Logger, log_files: list[str], logger_name: str | None = None) -> list[str]: + """Log the log files to the loggers.""" + logger_name = logger_name or type(logger).__name__ + successfully_saved: list[str] = [] + for log_file in log_files: + try: + logger.log_artifact(log_file) + except (MlflowException, RestException, FileNotFoundError, IsADirectoryError, PermissionError) as e: + SIMPLEXITY_LOGGER.warning( + "[run] failed to upload log file %s to logger %s: %s", log_file, logger_name, e, exc_info=True + ) + else: + successfully_saved.append(log_file) + SIMPLEXITY_LOGGER.info("[run] uploaded log file %s to logger %s", log_file, logger_name) + return successfully_saved + + def _cleanup(components: Components) -> None: """Cleanup the run.""" + log_files = get_log_files() + successfully_saved: set[str] = set() if components.loggers: - for logger in components.loggers.values(): - logger.close() + for logger_key, logger in components.loggers.items(): + successfully_saved_to_logger = _log_log_files(logger, log_files, logger_name=logger_key) + successfully_saved.update(successfully_saved_to_logger) + try: + logger.close() + except Exception as e: + logging.warning(f"Failed to close logger {type(logger).__name__}: {e}", exc_info=True) + if components.persisters: for persister in components.persisters.values(): persister.cleanup() + remove_log_files(successfully_saved) def managed_run(strict: bool = True, verbose: bool = False) -> Callable[[Callable[..., Any]], Callable[..., Any]]: @@ -678,8 +734,10 @@ def managed_run(strict: bool = True, verbose: bool = False) -> Callable[[Callabl def decorator(fn: Callable[..., Any]) -> Callable[..., Any]: def wrapper(*args: Any, **kwargs: Any) -> Any: + components = Components() try: cfg = get_config(args, kwargs) + _setup_python_logging(cfg) validate_base_config(cfg) resolve_base_config(cfg, strict=strict) with _setup_device(cfg), _setup_mlflow(cfg): @@ -689,8 +747,11 @@ def wrapper(*args: Any, **kwargs: Any) -> Any: return output except Exception as e: SIMPLEXITY_LOGGER.error("[run] error: %s", e) - # TODO: cleanup - raise e + try: + _cleanup(components) + except Exception as cleanup_error: + SIMPLEXITY_LOGGER.error("[run] error during cleanup: %s", cleanup_error, exc_info=True) + raise return wrapper diff --git a/simplexity/structured_configs/base.py b/simplexity/structured_configs/base.py index 455daf43..3b602c50 100644 --- a/simplexity/structured_configs/base.py +++ b/simplexity/structured_configs/base.py @@ -16,7 +16,12 @@ from simplexity.exceptions import ConfigValidationError from simplexity.logger import SIMPLEXITY_LOGGER from simplexity.structured_configs.mlflow import MLFlowConfig, validate_mlflow_config -from simplexity.structured_configs.validation import validate_mapping, validate_non_negative_int, validate_nonempty_str +from simplexity.structured_configs.validation import ( + validate_mapping, + validate_non_negative_int, + validate_nonempty_str, + validate_path, +) from simplexity.utils.config_utils import dynamic_resolve @@ -27,6 +32,7 @@ class BaseConfig: device: str | None = None seed: int | None = None tags: dict[str, str] | None = None + logging_config_path: str | None = None mlflow: MLFlowConfig | None = None @@ -39,6 +45,7 @@ def validate_base_config(cfg: DictConfig) -> None: device = cfg.get("device") seed = cfg.get("seed") tags = cfg.get("tags") + logging_config_path = cfg.get("logging_config_path") mlflow = cfg.get("mlflow") validate_nonempty_str(device, "BaseConfig.device", is_none_allowed=True) @@ -47,6 +54,7 @@ def validate_base_config(cfg: DictConfig) -> None: raise ConfigValidationError(f"BaseConfig.device must be one of: {allowed_devices}") validate_non_negative_int(seed, "BaseConfig.seed", is_none_allowed=True) validate_mapping(tags, "BaseConfig.tags", key_type=str, value_type=str, is_none_allowed=True) + validate_path(logging_config_path, "BaseConfig.logging_config_path", is_none_allowed=True, must_exist=True) if mlflow is not None: if not isinstance(mlflow, DictConfig): raise ConfigValidationError("BaseConfig.mlflow must be a MLFlowConfig") diff --git a/simplexity/structured_configs/validation.py b/simplexity/structured_configs/validation.py index 8e7135c8..2d4936af 100644 --- a/simplexity/structured_configs/validation.py +++ b/simplexity/structured_configs/validation.py @@ -10,6 +10,7 @@ # the problematic imports checker that would crash during AST traversal. from collections.abc import Mapping, Sequence +from pathlib import Path from typing import Any from urllib.parse import urlparse @@ -149,6 +150,19 @@ def validate_uri(uri: str | None, field_name: str, is_none_allowed: bool = False raise ConfigValidationError(f"{field_name} is not a valid URI: {e}") from e +def validate_path(path: str | None, field_name: str, is_none_allowed: bool = False, must_exist: bool = True) -> None: + """Validate that a string is a valid path.""" + if is_none_allowed and path is None: + return + if not isinstance(path, str): + allowed_types = "a string or None" if is_none_allowed else "a string" + raise ConfigValidationError(f"{field_name} must be {allowed_types}, got {type(path)}") + if not path.strip(): + raise ConfigValidationError(f"{field_name} cannot be empty") + if must_exist and not Path(path).exists(): + raise ConfigValidationError(f"{field_name} does not exist: {path}") + + def validate_transition_matrices(transition_matrices: Any, field_name: str) -> None: """Validate a transition matrices. diff --git a/tests/end_to_end/configs/logging.ini b/tests/end_to_end/configs/logging.ini new file mode 100644 index 00000000..8aa19941 --- /dev/null +++ b/tests/end_to_end/configs/logging.ini @@ -0,0 +1,38 @@ +[loggers] +keys=root,simplexity + +[handlers] +keys=consoleHandler,fileHandler + +[formatters] +keys=standardFormatter,detailedFormatter + +[logger_root] +level=INFO +handlers=consoleHandler,fileHandler + +[logger_simplexity] +level=INFO +handlers=consoleHandler,fileHandler +qualname=simplexity +propagate=0 + +[handler_consoleHandler] +class=StreamHandler +level=INFO +formatter=standardFormatter +args=(sys.stdout,) + +[handler_fileHandler] +class=FileHandler +level=DEBUG +formatter=detailedFormatter +args=('training.log', 'a', 'utf-8') + +[formatter_standardFormatter] +format=%(asctime)s [%(levelname)s] %(name)s: %(message)s +datefmt=%Y-%m-%d %H:%M:%S + +[formatter_detailedFormatter] +format=%(asctime)s [%(levelname)s] %(name)s:%(lineno)d: %(message)s +datefmt=%Y-%m-%d %H:%M:%S diff --git a/tests/end_to_end/configs/test_metric_tracker.yaml b/tests/end_to_end/configs/test_metric_tracker.yaml index e736365d..b7cad76b 100644 --- a/tests/end_to_end/configs/test_metric_tracker.yaml +++ b/tests/end_to_end/configs/test_metric_tracker.yaml @@ -10,6 +10,7 @@ experiment_name: metric_tracker_test run_name: metric_tracker_test_${now:%Y%m%d_%H%M%S} device: auto seed: 42 +logging_config_path: tests/end_to_end/configs/logging.ini tags: research_step: test retention: temp diff --git a/tests/end_to_end/configs/training.yaml b/tests/end_to_end/configs/training.yaml index 56cd1f81..810450e2 100644 --- a/tests/end_to_end/configs/training.yaml +++ b/tests/end_to_end/configs/training.yaml @@ -16,6 +16,7 @@ experiment_name: training_test run_name: training_test_${now:%Y%m%d_%H%M%S} device: auto seed: 0 +logging_config_path: tests/end_to_end/configs/logging.ini tags: research_step: demo retention: temp diff --git a/tests/end_to_end/configs/training_factored.yaml b/tests/end_to_end/configs/training_factored.yaml index e6c29906..c2423a3f 100644 --- a/tests/end_to_end/configs/training_factored.yaml +++ b/tests/end_to_end/configs/training_factored.yaml @@ -16,6 +16,7 @@ experiment_name: training_test_factored run_name: training_test_factored_${now:%Y%m%d_%H%M%S} device: auto seed: 0 +logging_config_path: tests/end_to_end/configs/logging.ini tags: research_step: demo retention: temp diff --git a/tests/end_to_end/configs/training_test.yaml b/tests/end_to_end/configs/training_test.yaml index a5ef2f37..686aadd7 100644 --- a/tests/end_to_end/configs/training_test.yaml +++ b/tests/end_to_end/configs/training_test.yaml @@ -16,6 +16,7 @@ experiment_name: training_test run_name: training_test_${now:%Y%m%d_%H%M%S} device: auto seed: 0 +logging_config_path: tests/end_to_end/configs/logging.ini tags: research_step: demo retention: temp diff --git a/tests/end_to_end/training.py b/tests/end_to_end/training.py index 35dfaf56..64f510b4 100644 --- a/tests/end_to_end/training.py +++ b/tests/end_to_end/training.py @@ -82,6 +82,7 @@ class TrainingRunConfig: run_name: str seed: int tags: dict[str, str] + logging_config_path: str | None = None def _expand_init_state( diff --git a/tests/structured_configs/test_base_config.py b/tests/structured_configs/test_base_config.py index 87b7b135..47bf4e85 100644 --- a/tests/structured_configs/test_base_config.py +++ b/tests/structured_configs/test_base_config.py @@ -13,6 +13,7 @@ # (code quality, style, undefined names, etc.) to run normally while bypassing # the problematic imports checker that would crash during AST traversal. +from pathlib import Path from unittest.mock import call, patch import pytest @@ -25,16 +26,20 @@ class TestValidateBaseConfig: """Test validate_base_config.""" - def test_validate_base_config_valid(self) -> None: + def test_validate_base_config_valid(self, tmp_path: Path) -> None: """Test validate_base_config with valid configs.""" cfg = DictConfig({}) validate_base_config(cfg) + logging_config_path = tmp_path / "logging.ini" + logging_config_path.touch() + cfg = DictConfig( { "device": "auto", "seed": 42, "tags": DictConfig({"key": "value"}), + "logging_config_path": str(logging_config_path), "mlflow": DictConfig({"experiment_name": "test", "run_name": "test"}), } ) @@ -89,6 +94,12 @@ def test_validate_base_config_invalid_tags(self) -> None: with pytest.raises(ConfigValidationError, match="BaseConfig.tags values must be strs"): validate_base_config(cfg) + def test_validate_base_config_invalid_logging_config_path(self) -> None: + """Test validate_base_config with invalid logging_config_path.""" + cfg = DictConfig({"logging_config_path": "does/not/exist.ini"}) + with pytest.raises(ConfigValidationError, match="BaseConfig.logging_config_path does not exist"): + validate_base_config(cfg) + def test_validate_base_config_invalid_mlflow(self) -> None: """Test validate_base_config with invalid mlflow.""" # Non-MLFlowConfig mlflow diff --git a/tests/test_logger.py b/tests/test_logger.py index a8aa9057..884d89dc 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -9,12 +9,475 @@ # (code quality, style, undefined names, etc.) to run normally while bypassing # the problematic imports checker that would crash during AST traversal. +import contextlib +import functools import logging +import logging.config +from pathlib import Path -from simplexity.logger import SIMPLEXITY_LOGGER +from simplexity.logger import SIMPLEXITY_LOGGER, get_log_files, remove_file_handlers, remove_log_file, remove_log_files + + +def clear_logging_state(): + """Clear the logging state.""" + logging.shutdown() + logging.root.manager.loggerDict.clear() + logging.root.handlers.clear() + logging.root.level = logging.NOTSET + logging.root.propagate = True + logging.root.disabled = False + + +@contextlib.contextmanager +def clean_slate(): + """Context manager that saves all logging state, clears it during yield, then restores it.""" + # Save all existing logging state + saved_logger_dict = logging.root.manager.loggerDict.copy() + saved_logger_states = {} + for logger_name, logger in saved_logger_dict.items(): + if isinstance(logger, logging.Logger): + saved_logger_states[logger_name] = { + "handlers": logger.handlers.copy(), + "level": logger.level, + "propagate": logger.propagate, + "disabled": logger.disabled, + } + + # Save root logger state + saved_root_handlers = logging.root.handlers.copy() + saved_root_level = logging.root.level + saved_root_propagate = logging.root.propagate + saved_root_disabled = logging.root.disabled + + try: + clear_logging_state() + yield + finally: + clear_logging_state() + + # Restore loggerDict with original logger objects + logging.root.manager.loggerDict.update(saved_logger_dict) + + # Restore each logger's state (handlers, level, etc.) + for logger_name, logger_state in saved_logger_states.items(): + logger = saved_logger_dict[logger_name] + if isinstance(logger, logging.Logger): + logger.handlers = logger_state["handlers"] + logger.level = logger_state["level"] + logger.propagate = logger_state["propagate"] + logger.disabled = logger_state["disabled"] + + # Restore root logger state + logging.root.handlers = saved_root_handlers + logging.root.level = saved_root_level + logging.root.propagate = saved_root_propagate + logging.root.disabled = saved_root_disabled + + +def with_clean_slate(func): + """Decorator that preserves and restores the global logging configuration for a test function.""" + + @functools.wraps(func) + def wrapper(*args, **kwargs): + with clean_slate(): + return func(*args, **kwargs) + + return wrapper def test_simplexity_logger() -> None: """Test that the logger is created with the correct name.""" assert SIMPLEXITY_LOGGER.name == "simplexity" assert isinstance(SIMPLEXITY_LOGGER, logging.Logger) + + +@with_clean_slate +def test_get_log_files_no_files() -> None: + """Test that the log files are returned correctly.""" + assert not get_log_files() + + logging.config.dictConfig( + { + "version": 1, + "handlers": { + "stream": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + } + }, + "loggers": { + "root": { + "handlers": ["stream"], + }, + "simplexity": { + "handlers": ["stream"], + }, + }, + } + ) + assert not get_log_files() + + +@with_clean_slate +def test_get_log_files_with_files(tmp_path: Path) -> None: + """Test that the log files are returned correctly.""" + test_1_log_file = str(tmp_path / "test_1.log") + test_2_log_file = str(tmp_path / "test_2.log") + test_3_log_file = str(tmp_path / "test_3.log") + + logging.config.dictConfig( + { + "version": 1, + "handlers": { + "stream": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + }, + "file_1": { + "class": "logging.FileHandler", + "filename": test_1_log_file, + }, + "file_2": { + "class": "logging.FileHandler", + "filename": test_2_log_file, + }, + "file_3": { + "class": "logging.FileHandler", + "filename": test_3_log_file, + }, + }, + "loggers": { + "root": { + "handlers": ["stream", "file_2"], + }, + "simplexity": { + "handlers": ["file_1", "file_3"], + }, + "other": { + "handlers": ["file_1"], + }, + }, + } + ) + + log_files = get_log_files() + assert len(log_files) == 3 + assert set(log_files) == {test_1_log_file, test_2_log_file, test_3_log_file} + + +@with_clean_slate +def test_remove_file_handlers_with_no_files(tmp_path: Path) -> None: + """Test that the file handlers are removed correctly.""" + logging.config.dictConfig( + { + "version": 1, + "handlers": { + "stream": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + }, + "file": { + "class": "logging.FileHandler", + "filename": str(tmp_path / "test.log"), + }, + }, + "loggers": { + "root": { + "handlers": ["stream"], + }, + "simplexity": { + "handlers": ["file"], + }, + }, + } + ) + # no file handlers to remove + root_logger = logging.getLogger("root") + assert len(root_logger.handlers) == 1 + remove_file_handlers(root_logger) + assert len(root_logger.handlers) == 1 + + # no matching file handler to remove + simplexity_logger = logging.getLogger("simplexity") + assert len(simplexity_logger.handlers) == 1 + remove_file_handlers(simplexity_logger, str(tmp_path / "different.log")) + assert len(simplexity_logger.handlers) == 1 + + +@with_clean_slate +def test_remove_file_handlers(tmp_path: Path) -> None: + """Test that the file handlers are removed correctly.""" + test_log_file_1 = str(tmp_path / "test_1.log") + test_log_file_2 = str(tmp_path / "test_2.log") + test_log_file_3 = str(tmp_path / "test_3.log") + logging.config.dictConfig( + { + "version": 1, + "handlers": { + "stream": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + }, + "file_1": { + "class": "logging.FileHandler", + "filename": test_log_file_1, + }, + "file_2": { + "class": "logging.FileHandler", + "filename": test_log_file_2, + }, + "file_3": { + "class": "logging.FileHandler", + "filename": test_log_file_3, + }, + "another_file_3": { + "class": "logging.FileHandler", + "filename": test_log_file_3, + }, + }, + "loggers": { + "root": { + "handlers": ["stream", "file_2"], + }, + "simplexity": { + "handlers": ["stream", "file_1", "file_2"], + }, + "other": { + "handlers": ["file_2", "file_3", "another_file_3"], + }, + }, + } + ) + # remove matching file handler + root_logger = logging.getLogger("root") + assert len(root_logger.handlers) == 2 + remove_file_handlers(root_logger, test_log_file_2) + assert len(root_logger.handlers) == 1 # stream handler is still present + + # remove all file handlers + simplexity_logger = logging.getLogger("simplexity") + assert len(simplexity_logger.handlers) == 3 + remove_file_handlers(simplexity_logger) + assert len(simplexity_logger.handlers) == 1 # stream handler is still present + assert not any(isinstance(h, logging.FileHandler) for h in simplexity_logger.handlers) + + # remove multiple matching file handlers + other_logger = logging.getLogger("other") + assert len(other_logger.handlers) == 3 + remove_file_handlers(other_logger, test_log_file_3) + assert len(other_logger.handlers) == 1 # file_2 handler is still present + file_handler = other_logger.handlers[0] + assert isinstance(file_handler, logging.FileHandler) + assert file_handler.baseFilename != test_log_file_3 + + +@with_clean_slate +def test_remove_log_file(tmp_path: Path) -> None: + """Test that the log file is removed correctly.""" + test_path_1 = tmp_path / "test_1.log" + test_path_2 = tmp_path / "test_2.log" + + test_path_1.touch() + test_path_2.touch() + + test_log_file_1 = str(test_path_1) + test_log_file_2 = str(test_path_2) + logging.config.dictConfig( + { + "version": 1, + "handlers": { + "stream": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + }, + "file_1": { + "class": "logging.FileHandler", + "filename": test_log_file_1, + }, + "file_2": { + "class": "logging.FileHandler", + "filename": test_log_file_2, + }, + "another_file_2": { + "class": "logging.FileHandler", + "filename": test_log_file_2, + }, + }, + "loggers": { + "root": { + "handlers": ["stream", "file_1"], + }, + "simplexity": { + "handlers": ["file_1", "file_2"], + }, + "other": { + "handlers": ["file_2", "another_file_2"], + }, + }, + } + ) + + root_logger = logging.getLogger("root") + simplexity_logger = logging.getLogger("simplexity") + other_logger = logging.getLogger("other") + + assert len(root_logger.handlers) == 2 + assert len(simplexity_logger.handlers) == 2 + assert len(other_logger.handlers) == 2 + assert test_path_1.exists() + assert test_path_2.exists() + + remove_log_file(test_log_file_2) + + assert len(root_logger.handlers) == 2 # no file_2 handlers to remove + assert len(simplexity_logger.handlers) == 1 # file_2 handler is removed + assert len(other_logger.handlers) == 0 # both file_2 handlers are removed + assert test_path_1.exists() + assert not test_path_2.exists() + + +@with_clean_slate +def test_remove_log_files(tmp_path: Path) -> None: + """Test that the log files are removed correctly.""" + test_path_1 = tmp_path / "test_1.log" + test_path_2 = tmp_path / "test_2.log" + test_path_3 = tmp_path / "test_3.log" + + test_path_1.touch() + test_path_2.touch() + test_path_3.touch() + + test_log_file_1 = str(test_path_1) + test_log_file_2 = str(test_path_2) + test_log_file_3 = str(test_path_3) + logging.config.dictConfig( + { + "version": 1, + "handlers": { + "stream": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + }, + "file_1": { + "class": "logging.FileHandler", + "filename": test_log_file_1, + }, + "file_2": { + "class": "logging.FileHandler", + "filename": test_log_file_2, + }, + "another_file_2": { + "class": "logging.FileHandler", + "filename": test_log_file_2, + }, + "file_3": { + "class": "logging.FileHandler", + "filename": test_log_file_3, + }, + }, + "loggers": { + "root": { + "handlers": ["stream", "file_1"], + }, + "simplexity": { + "handlers": ["file_1", "file_2"], + }, + "other": { + "handlers": ["file_2", "another_file_2", "file_3"], + }, + }, + } + ) + + root_logger = logging.getLogger("root") + simplexity_logger = logging.getLogger("simplexity") + other_logger = logging.getLogger("other") + + assert len(root_logger.handlers) == 2 + assert len(simplexity_logger.handlers) == 2 + assert len(other_logger.handlers) == 3 + assert test_path_1.exists() + assert test_path_2.exists() + + remove_log_files() + + assert len(root_logger.handlers) == 1 # stream handler is still present + assert len(simplexity_logger.handlers) == 0 # all file handlers are removed + assert len(other_logger.handlers) == 0 # all file handlers are removed + assert not test_path_1.exists() + assert not test_path_2.exists() + assert not test_path_3.exists() + + +@with_clean_slate +def test_remove_log_files_with_specific_files(tmp_path: Path) -> None: + """Test that the log files are removed correctly.""" + test_path_1 = tmp_path / "test_1.log" + test_path_2 = tmp_path / "test_2.log" + test_path_3 = tmp_path / "test_3.log" + + test_path_1.touch() + test_path_2.touch() + test_path_3.touch() + + test_log_file_1 = str(test_path_1) + test_log_file_2 = str(test_path_2) + test_log_file_3 = str(test_path_3) + logging.config.dictConfig( + { + "version": 1, + "handlers": { + "stream": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + }, + "file_1": { + "class": "logging.FileHandler", + "filename": test_log_file_1, + }, + "file_2": { + "class": "logging.FileHandler", + "filename": test_log_file_2, + }, + "another_file_2": { + "class": "logging.FileHandler", + "filename": test_log_file_2, + }, + "file_3": { + "class": "logging.FileHandler", + "filename": test_log_file_3, + }, + }, + "loggers": { + "root": { + "handlers": ["stream", "file_1", "file_3"], + }, + "simplexity": { + "handlers": ["file_1", "file_2"], + }, + "other": { + "handlers": ["file_2", "another_file_2", "file_3"], + }, + }, + } + ) + + root_logger = logging.getLogger("root") + simplexity_logger = logging.getLogger("simplexity") + other_logger = logging.getLogger("other") + + assert len(root_logger.handlers) == 3 + assert len(simplexity_logger.handlers) == 2 + assert len(other_logger.handlers) == 3 + assert test_path_1.exists() + assert test_path_2.exists() + assert test_path_3.exists() + + remove_log_files({test_log_file_2, test_log_file_3}) + + assert len(root_logger.handlers) == 2 # stream and file_1 handler are still present + assert len(simplexity_logger.handlers) == 1 # file_1 handler is still present + assert len(other_logger.handlers) == 0 # file_3 and both file_2 handlers are removed + assert test_path_1.exists() + assert not test_path_2.exists() + assert not test_path_3.exists() From 0b1b6a2bd6fb1a9883e01cdfd0f4ba7d16f2d1bb Mon Sep 17 00:00:00 2001 From: adamimos Date: Thu, 18 Dec 2025 17:16:59 -0800 Subject: [PATCH 10/35] Add simplexity-multirun CLI for parallel experiment execution (#144) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add simplexity-multirun CLI for parallel experiment execution Add a new CLI tool for running multiple Hydra experiments in parallel across GPUs or CPU workers with proper device isolation. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 * Fix pylint and ruff linting issues - Add pylint disable comments for too-many-arguments, too-many-locals, etc. - Initialize variables before conditional to fix possibly-used-before-assignment - Use raw docstring (r""") for backslash escapes - Add strict=True to zip() call 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 * Refactor run_parallel to separate job generation from dispatch - Add Job dataclass with to_cmd() method for rendering commands - Extract generate_jobs() as a pure function for testability - Extract dispatch_jobs() to encapsulate ProcessPoolExecutor logic - Simplify main() to two-phase structure: generate then dispatch - Dry-run now exits before dispatch instead of passing through executor - Add tests for Job and generate_jobs() (GPU round-robin, sweep expansion) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 * Add missing docstrings to test methods 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --------- Co-authored-by: adamimos Co-authored-by: Claude Opus 4.5 --- pyproject.toml | 3 + simplexity/cli/__init__.py | 1 + simplexity/cli/run_parallel.py | 464 +++++++++++++++++++++++++++++++++ tests/cli/test_run_parallel.py | 191 ++++++++++++++ 4 files changed, 659 insertions(+) create mode 100644 simplexity/cli/__init__.py create mode 100644 simplexity/cli/run_parallel.py create mode 100644 tests/cli/test_run_parallel.py diff --git a/pyproject.toml b/pyproject.toml index c52345df..fc728e1b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,6 +61,9 @@ penzai = [ "penzai", ] # Deprecated: penzai is no longer maintained. +[project.scripts] +simplexity-multirun = "simplexity.cli.run_parallel:main" + [tool.ruff] line-length = 120 target-version = "py312" diff --git a/simplexity/cli/__init__.py b/simplexity/cli/__init__.py new file mode 100644 index 00000000..4e2fe388 --- /dev/null +++ b/simplexity/cli/__init__.py @@ -0,0 +1 @@ +"""Command-line tools for simplexity.""" diff --git a/simplexity/cli/run_parallel.py b/simplexity/cli/run_parallel.py new file mode 100644 index 00000000..04fbf5f8 --- /dev/null +++ b/simplexity/cli/run_parallel.py @@ -0,0 +1,464 @@ +#!/usr/bin/env python +r"""Run multiple Hydra experiments in parallel across GPUs or CPU. + +simplexity-multirun is a CLI tool for running multiple Hydra experiments in +parallel with proper device isolation. It's a simpler alternative to Ray or +Hydra's joblib launcher when you just need to run experiments on a single machine. + +How It Works +------------ +Each experiment runs in a separate subprocess with CUDA_VISIBLE_DEVICES set to +ensure exclusive GPU access. Jobs are assigned to devices round-robin and started +with a 5-second stagger to avoid initialization race conditions. + +Device Modes +------------ +GPU Mode (--gpus): + Specify which GPUs to use. Each job gets exclusive access to one GPU. + Jobs are distributed round-robin across the specified GPUs. + + Example: --gpus 0,1 with 4 jobs -> Job0:GPU0, Job1:GPU1, Job2:GPU0, Job3:GPU1 + +CPU Mode (--cpu --workers N): + Run without GPU acceleration. Specify number of parallel workers. + Sets CUDA_VISIBLE_DEVICES="" to disable GPU for all jobs. + +Sweep Modes +----------- +--sweep (Cartesian Product): + Generate all combinations of parameters. Can specify multiple times. + + Example: --sweep 'a=1,2' --sweep 'b=x,y' + Generates: a=1 b=x, a=1 b=y, a=2 b=x, a=2 b=y (4 jobs) + +--sweep-file (Config File): + Load sweep parameters from a YAML file. Can be combined with --sweep. + + Example file (sweeps/my_experiment.yaml): + seed: [1, 2, 3, 4] + model.lr: [0.01, 0.001] + + Example: --sweep-file sweeps/my_experiment.yaml + Generates: 4 x 2 = 8 jobs (cartesian product) + +--overrides (Explicit List): + Run specific override strings. Each string is one job. + + Example: --overrides 'seed=1 lr=0.01' 'seed=2 lr=0.001' + Generates: 2 jobs with those exact overrides + +Usage Examples +-------------- + # Basic sweep across 2 GPUs + simplexity-multirun run.py -c train_config --gpus 0,1 --sweep 'seed=1,2,3,4' + + # Load sweep params from a YAML file + simplexity-multirun run.py -c train_config --gpus 0,1 --sweep-file sweeps/experiment.yaml + + # CPU-only mode with 4 parallel workers + simplexity-multirun run.py -c train_config --cpu --workers 4 --sweep 'seed=1,2,3,4' + + # Cartesian product: 2x2 = 4 experiments + simplexity-multirun run.py -c train_config --gpus 0,1,2,3 \\ + --sweep 'model.n_heads=1,2' \\ + --sweep 'model.n_layers=1,2' + + # Limit parallelism (e.g., 4 GPUs but only 2 jobs at a time) + simplexity-multirun run.py -c train_config --gpus 0,1,2,3 --max-parallel 2 --sweep 'seed=1,2,3,4' + + # Dry run to preview commands without executing + simplexity-multirun run.py -c train_config --gpus 0,1 --sweep 'seed=1,2' --dry-run + +Why Not Ray/Joblib? +------------------- +- No package shipping or virtual environment creation +- Simple subprocess isolation - easy to debug +- No complex configuration or runtime environments +- Works reliably for single-machine multi-GPU setups +""" + +import argparse +import itertools +import os +import subprocess +import sys +import time +from concurrent.futures import ProcessPoolExecutor, as_completed +from dataclasses import dataclass +from pathlib import Path + +from omegaconf import OmegaConf + +# Delay between starting jobs to avoid initialization race conditions +JOB_START_DELAY_SECONDS = 5 + + +@dataclass(frozen=True) +class Job: + """Represents a single experiment job to be executed. + + Attributes: + script: Path to the Python script to run. + config_name: Hydra config name. + overrides: Space-separated Hydra overrides. + gpu_id: GPU ID to assign via CUDA_VISIBLE_DEVICES, or None for CPU-only. + job_num: Job number for logging and identification. + """ + + script: str + config_name: str + overrides: str + gpu_id: int | None + job_num: int + + def to_cmd(self) -> list[str]: + """Render the full command list for this job. + + Returns: + List of command arguments suitable for subprocess execution. + """ + cmd = [ + "uv", + "run", + "python", + self.script, + f"--config-name={self.config_name}", + ] + + if self.overrides: + cmd.extend(self.overrides.split()) + + return cmd + + @property + def device_str(self) -> str: + """Human-readable device description.""" + return f"GPU {self.gpu_id}" if self.gpu_id is not None else "CPU" + + +def load_sweep_file(path: str) -> list[str]: + """Load sweep parameters from a YAML file. + + The file should contain parameter names as keys and lists of values: + + seed: [1, 2, 3, 4] + model.lr: [0.01, 0.001] + + Args: + path: Path to the sweep YAML file. + + Returns: + List of sweep strings like ['seed=1,2,3,4', 'model.lr=0.01,0.001'] + """ + cfg = OmegaConf.load(path) + sweeps = [] + for key, values in cfg.items(): + # Convert OmegaConf types to Python types + values = OmegaConf.to_object(values) if OmegaConf.is_config(values) else values + if isinstance(values, (list, tuple)): + values_str = ",".join(str(v) for v in values) + else: + values_str = str(values) + sweeps.append(f"{key}={values_str}") + return sweeps + + +def parse_sweep_param(sweep_str: str) -> tuple[str, list[str]]: + """Parse a sweep parameter like 'param=1,2,3' into (param, [1, 2, 3]).""" + key, values = sweep_str.split("=", 1) + return key, [v.strip() for v in values.split(",")] + + +def generate_override_combinations(sweeps: list[str]) -> list[str]: + """Generate all combinations of sweep parameters (cartesian product). + + Args: + sweeps: List of sweep strings like ['a=1,2', 'b=x,y'] + + Returns: + List of override strings like ['a=1 b=x', 'a=1 b=y', 'a=2 b=x', 'a=2 b=y'] + """ + if not sweeps: + return [""] + + parsed = [parse_sweep_param(s) for s in sweeps] + keys = [p[0] for p in parsed] + value_lists = [p[1] for p in parsed] + + combinations = [] + for values in itertools.product(*value_lists): + override = " ".join(f"{k}={v}" for k, v in zip(keys, values, strict=True)) + combinations.append(override) + + return combinations + + +def generate_jobs( + script: str, + config_name: str, + sweeps: list[str], + overrides: list[str], + gpus: list[int] | None, +) -> list[Job]: + """Generate a list of jobs from sweep parameters and device configuration. + + This is a pure function with no side effects, making it trivially testable. + + Args: + script: Path to the Python script to run. + config_name: Hydra config name. + sweeps: List of sweep strings like ['a=1,2', 'b=x,y']. Should include + any sweeps loaded from sweep files. + overrides: Explicit override strings (alternative to sweeps). + gpus: List of GPU IDs for round-robin assignment, or None for CPU mode. + + Returns: + List of Job objects ready for dispatch. + """ + if overrides: + override_list = overrides + elif sweeps: + override_list = generate_override_combinations(sweeps) + else: + override_list = [""] + + jobs = [] + for i, override_str in enumerate(override_list): + gpu_id = gpus[i % len(gpus)] if gpus is not None else None + jobs.append( + Job( + script=script, + config_name=config_name, + overrides=override_str, + gpu_id=gpu_id, + job_num=i, + ) + ) + + return jobs + + +def _run_single_job(job: Job) -> dict: + """Run a single experiment job. + + This is an internal function called by dispatch_jobs via ProcessPoolExecutor. + + Args: + job: The Job to execute. + + Returns: + Dict with job results including status, stdout, stderr. + """ + env = os.environ.copy() + if job.gpu_id is not None: + env["CUDA_VISIBLE_DEVICES"] = str(job.gpu_id) + else: + env["CUDA_VISIBLE_DEVICES"] = "" + + cmd = job.to_cmd() + + try: + result = subprocess.run( + cmd, + env=env, + cwd=Path.cwd(), + capture_output=True, + text=True, + check=False, + ) + + status = "success" if result.returncode == 0 else "failed" + return { + "job_num": job.job_num, + "gpu": job.gpu_id, + "status": status, + "returncode": result.returncode, + "overrides": job.overrides, + "stdout": result.stdout[-2000:] if result.stdout else "", + "stderr": result.stderr[-2000:] if result.stderr else "", + } + except Exception as e: # noqa: BLE001 # pylint: disable=broad-exception-caught + return { + "job_num": job.job_num, + "gpu": job.gpu_id, + "status": "error", + "error": str(e), + "overrides": job.overrides, + } + + +def dispatch_jobs(jobs: list[Job], max_parallel: int) -> list[dict]: + """Execute jobs in parallel with staggered starts. + + Args: + jobs: List of Job objects to execute. + max_parallel: Maximum number of jobs to run concurrently. + + Returns: + List of result dictionaries, one per job. + """ + results = [] + + with ProcessPoolExecutor(max_workers=max_parallel) as executor: + futures = {} + + for i, job in enumerate(jobs): + if i > 0: + time.sleep(JOB_START_DELAY_SECONDS) + + print(f"[Job {job.job_num}] {job.device_str}: {' '.join(job.to_cmd())}") + future = executor.submit(_run_single_job, job) + futures[future] = job.job_num + + for future in as_completed(futures): + result = future.result() + results.append(result) + status_symbol = "\u2713" if result["status"] == "success" else "\u2717" + device_str = f"GPU {result['gpu']}" if result["gpu"] is not None else "CPU" + print(f"[Job {result['job_num']}] {status_symbol} {device_str}: {result['status']}") + + if result["status"] == "failed": + print(f" stderr: {result.get('stderr', '')[:500]}") + + return results + + +def main() -> None: + """Main entry point for the CLI.""" + parser = argparse.ArgumentParser( + description="Run multiple Hydra experiments in parallel across GPUs", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=__doc__, + ) + parser.add_argument( + "script", + help="Path to the run script (e.g., experiments/training/run.py)", + ) + parser.add_argument( + "--config-name", + "-c", + required=True, + help="Hydra config name (e.g., train_small)", + ) + parser.add_argument( + "--gpus", + "-g", + default=None, + help="Comma-separated GPU IDs to use (e.g., '0,1,2,3')", + ) + parser.add_argument( + "--cpu", + action="store_true", + help="Run on CPU only (disables GPU)", + ) + parser.add_argument( + "--workers", + "-w", + type=int, + default=None, + help="Number of parallel workers (required with --cpu, optional otherwise)", + ) + parser.add_argument( + "--sweep", + "-s", + action="append", + default=[], + help="Sweep parameter (e.g., 'seed=1,2,3'). Can specify multiple times for cartesian product.", + ) + parser.add_argument( + "--sweep-file", + "-f", + default=None, + help="Path to YAML file containing sweep parameters.", + ) + parser.add_argument( + "--overrides", + "-o", + nargs="*", + default=[], + help="Explicit override strings to run (alternative to --sweep)", + ) + parser.add_argument( + "--max-parallel", + "-p", + type=int, + default=None, + help="Max parallel jobs (default: number of GPUs)", + ) + parser.add_argument( + "--dry-run", + "-n", + action="store_true", + help="Print commands without executing", + ) + + args = parser.parse_args() + + # Determine devices (GPUs or CPU workers) + gpus: list[int] | None = None + n_workers: int = 0 + device_desc: str = "" + + if args.cpu: + if args.workers is None: + parser.error("--workers is required when using --cpu") + gpus = None + n_workers = args.workers + device_desc = f"{n_workers} CPU workers" + elif args.gpus: + gpus = [int(g.strip()) for g in args.gpus.split(",")] + n_workers = args.workers or len(gpus) + device_desc = f"GPUs {gpus}" + else: + parser.error( + "You must specify devices to use.\n\n" + "Options:\n" + " --gpus 0,1,2,3 Run on specific GPUs (comma-separated IDs)\n" + " --cpu --workers 4 Run on CPU only with N parallel workers\n\n" + "Examples:\n" + " simplexity-multirun run.py -c config --gpus 0,1 --sweep 'seed=1,2,3,4'\n" + " simplexity-multirun run.py -c config --cpu --workers 4 --sweep 'seed=1,2,3,4'" + ) + + # Phase 1: Generate jobs (pure, no I/O except sweep file loading) + all_sweeps = list(args.sweep) + if args.sweep_file: + all_sweeps.extend(load_sweep_file(args.sweep_file)) + + jobs = generate_jobs( + script=args.script, + config_name=args.config_name, + sweeps=all_sweeps, + overrides=args.overrides, + gpus=gpus, + ) + + n_jobs = len(jobs) + max_parallel = args.max_parallel or n_workers + + print(f"Running {n_jobs} experiments across {device_desc}") + print(f"Max parallel: {max_parallel}") + print() + + # Handle dry-run: print commands and exit before dispatch + if args.dry_run: + for job in jobs: + print(f"[Job {job.job_num}] {job.device_str}: {' '.join(job.to_cmd())}") + return + + # Phase 2: Dispatch jobs (handles all subprocess/parallelism complexity) + results = dispatch_jobs(jobs, max_parallel) + + # Summary + print() + print("=" * 60) + successes = sum(1 for r in results if r["status"] == "success") + failures = sum(1 for r in results if r["status"] == "failed") + print(f"Complete: {successes} succeeded, {failures} failed out of {n_jobs} jobs") + + if failures > 0: + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/tests/cli/test_run_parallel.py b/tests/cli/test_run_parallel.py new file mode 100644 index 00000000..fc0ba934 --- /dev/null +++ b/tests/cli/test_run_parallel.py @@ -0,0 +1,191 @@ +"""Tests for the run_parallel CLI module.""" + +import pytest + +from simplexity.cli.run_parallel import Job, generate_jobs + + +class TestJob: + """Tests for the Job dataclass.""" + + def test_to_cmd_without_overrides(self) -> None: + """Verify to_cmd() produces correct command without overrides.""" + job = Job( + script="train.py", + config_name="config", + overrides="", + gpu_id=0, + job_num=0, + ) + assert job.to_cmd() == ["uv", "run", "python", "train.py", "--config-name=config"] + + def test_to_cmd_with_overrides(self) -> None: + """Verify to_cmd() appends overrides to the command.""" + job = Job( + script="train.py", + config_name="config", + overrides="seed=42 lr=0.01", + gpu_id=0, + job_num=0, + ) + assert job.to_cmd() == [ + "uv", + "run", + "python", + "train.py", + "--config-name=config", + "seed=42", + "lr=0.01", + ] + + def test_device_str_gpu(self) -> None: + """Verify device_str shows GPU ID when gpu_id is set.""" + job = Job(script="train.py", config_name="config", overrides="", gpu_id=2, job_num=0) + assert job.device_str == "GPU 2" + + def test_device_str_cpu(self) -> None: + """Verify device_str shows CPU when gpu_id is None.""" + job = Job(script="train.py", config_name="config", overrides="", gpu_id=None, job_num=0) + assert job.device_str == "CPU" + + +class TestGenerateJobs: + """Tests for the generate_jobs function.""" + + def test_gpu_round_robin_assignment(self) -> None: + """Verify GPUs are assigned round-robin across jobs.""" + jobs = generate_jobs( + script="train.py", + config_name="config", + sweeps=["seed=1,2,3,4,5,6"], + overrides=[], + gpus=[0, 1], + ) + + assert len(jobs) == 6 + assert [job.gpu_id for job in jobs] == [0, 1, 0, 1, 0, 1] + + def test_gpu_round_robin_with_three_gpus(self) -> None: + """Verify round-robin with 3 GPUs and 5 jobs.""" + jobs = generate_jobs( + script="train.py", + config_name="config", + sweeps=["seed=1,2,3,4,5"], + overrides=[], + gpus=[0, 2, 4], + ) + + assert len(jobs) == 5 + assert [job.gpu_id for job in jobs] == [0, 2, 4, 0, 2] + + def test_cpu_mode_assigns_none(self) -> None: + """Verify CPU mode assigns None for all gpu_ids.""" + jobs = generate_jobs( + script="train.py", + config_name="config", + sweeps=["seed=1,2,3"], + overrides=[], + gpus=None, + ) + + assert len(jobs) == 3 + assert all(job.gpu_id is None for job in jobs) + + def test_sweep_cartesian_product(self) -> None: + """Verify sweeps produce cartesian product of overrides.""" + jobs = generate_jobs( + script="train.py", + config_name="config", + sweeps=["a=1,2", "b=x,y"], + overrides=[], + gpus=[0], + ) + + assert len(jobs) == 4 + overrides = [job.overrides for job in jobs] + assert overrides == ["a=1 b=x", "a=1 b=y", "a=2 b=x", "a=2 b=y"] + + def test_sweep_single_param(self) -> None: + """Verify single sweep parameter generates correct jobs.""" + jobs = generate_jobs( + script="train.py", + config_name="config", + sweeps=["seed=1,2,3"], + overrides=[], + gpus=[0, 1], + ) + + assert len(jobs) == 3 + assert [job.overrides for job in jobs] == ["seed=1", "seed=2", "seed=3"] + + def test_explicit_overrides_used_instead_of_sweeps(self) -> None: + """Verify explicit overrides take precedence over sweeps.""" + jobs = generate_jobs( + script="train.py", + config_name="config", + sweeps=["seed=1,2,3"], + overrides=["custom=a", "custom=b"], + gpus=[0], + ) + + assert len(jobs) == 2 + assert [job.overrides for job in jobs] == ["custom=a", "custom=b"] + + def test_no_sweeps_or_overrides_creates_single_job(self) -> None: + """Verify empty sweeps and overrides creates one job with empty overrides.""" + jobs = generate_jobs( + script="train.py", + config_name="config", + sweeps=[], + overrides=[], + gpus=[0], + ) + + assert len(jobs) == 1 + assert jobs[0].overrides == "" + + def test_job_numbers_sequential(self) -> None: + """Verify job numbers are assigned sequentially starting from 0.""" + jobs = generate_jobs( + script="train.py", + config_name="config", + sweeps=["seed=1,2,3,4"], + overrides=[], + gpus=[0, 1], + ) + + assert [job.job_num for job in jobs] == [0, 1, 2, 3] + + def test_script_and_config_propagated(self) -> None: + """Verify script and config_name are correctly set on all jobs.""" + jobs = generate_jobs( + script="experiments/run.py", + config_name="my_config", + sweeps=["seed=1,2"], + overrides=[], + gpus=[0], + ) + + assert all(job.script == "experiments/run.py" for job in jobs) + assert all(job.config_name == "my_config" for job in jobs) + + @pytest.mark.parametrize( + ("sweeps", "expected_count"), + [ + (["a=1,2,3"], 3), + (["a=1,2", "b=1,2"], 4), + (["a=1,2", "b=1,2,3"], 6), + (["a=1,2", "b=1,2", "c=1,2"], 8), + ], + ) + def test_cartesian_product_counts(self, sweeps: list[str], expected_count: int) -> None: + """Verify correct number of jobs for various cartesian product sizes.""" + jobs = generate_jobs( + script="train.py", + config_name="config", + sweeps=sweeps, + overrides=[], + gpus=[0], + ) + + assert len(jobs) == expected_count From 67ff8b05802c62dabafe931d15def24db3db212a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Casper=20L=C3=BCtzh=C3=B8ft=20Christensen?= <61698286+casperlchristensen@users.noreply.github.com> Date: Sat, 20 Dec 2025 00:51:37 +0100 Subject: [PATCH 11/35] save more path-specific visualizations (#145) * save more path-specific visualizations * update test path --- .../activations/visualization_persistence.py | 23 ++++++++++++++----- .../test_visualization_persistence.py | 4 ++-- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/simplexity/activations/visualization_persistence.py b/simplexity/activations/visualization_persistence.py index 27518184..f7a8b0ef 100644 --- a/simplexity/activations/visualization_persistence.py +++ b/simplexity/activations/visualization_persistence.py @@ -25,20 +25,31 @@ def save_visualization_payloads( root: Path, step: int, ) -> Mapping[str, str]: - """Persist visualization payloads, accumulating history for slider controls.""" + """Persist visualization payloads, accumulating history for slider controls. + + Non-accumulated visualizations are saved to step-specific directories: + root/analysis/step_XXXXX/name.html + + Accumulated visualizations (with slider on step) are saved to: + root/analysis/accumulated/name.html + """ if not visualizations: return {} figure_names_to_paths = {} - step_dir = root / f"step_{step:05d}" - step_dir.mkdir(parents=True, exist_ok=True) for key, payload in visualizations.items(): safe_name = key.replace("/", "_") + accumulated = _should_accumulate_steps(payload) figure = _maybe_accumulate_history(payload, root, safe_name, step) - analysis_dir = step_dir / payload.analysis - analysis_dir.mkdir(parents=True, exist_ok=True) - output_path = analysis_dir / f"{payload.name}.html" + + if accumulated: + output_dir = root / payload.analysis / "accumulated" + else: + output_dir = root / payload.analysis / f"step_{step:05d}" + output_dir.mkdir(parents=True, exist_ok=True) + + output_path = output_dir / f"{payload.name}.html" if isinstance(figure, go.Figure): figure.write_html(str(output_path)) else: diff --git a/tests/activations/test_visualization_persistence.py b/tests/activations/test_visualization_persistence.py index 58f782f9..ae3881ee 100644 --- a/tests/activations/test_visualization_persistence.py +++ b/tests/activations/test_visualization_persistence.py @@ -75,7 +75,7 @@ def test_save_visualization_payloads_accumulates_step_history(tmp_path): assert len(history_df) == len(df_first) assert set(history_df["step"]) == {1} assert set(history_df["sequence_step"]) == {0} - assert (tmp_path / "step_00001" / "analysis" / "viz.html").exists() + assert (tmp_path / "analysis" / "accumulated" / "viz.html").exists() df_second = pd.DataFrame({"step": [1], "value": [0.5]}) payload_two = _payload(df_second) @@ -86,4 +86,4 @@ def test_save_visualization_payloads_accumulates_step_history(tmp_path): assert len(history_df) == len(df_first) + len(df_second) assert set(history_df["step"]) == {1, 2} assert set(history_df["sequence_step"]) == {0, 1} - assert (tmp_path / "step_00002" / "analysis" / "viz.html").exists() + assert (tmp_path / "analysis" / "accumulated" / "viz.html").exists() From 9549554535c6b757b390c785568c623e5f9a4df3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Casper=20L=C3=BCtzh=C3=B8ft=20Christensen?= <61698286+casperlchristensen@users.noreply.github.com> Date: Wed, 7 Jan 2026 12:11:56 -0800 Subject: [PATCH 12/35] Casper/generic resolution (#161) * generic d_vocab resolution * rename and test * static analysis --- simplexity/run_management/run_management.py | 11 ++- .../structured_configs/predictive_model.py | 8 +-- .../test_predictive_model_config.py | 72 ++++++++++++------- 3 files changed, 54 insertions(+), 37 deletions(-) diff --git a/simplexity/run_management/run_management.py b/simplexity/run_management/run_management.py index fdadbd12..2478d978 100644 --- a/simplexity/run_management/run_management.py +++ b/simplexity/run_management/run_management.py @@ -85,9 +85,8 @@ validate_persistence_config, ) from simplexity.structured_configs.predictive_model import ( - is_hooked_transformer_config, is_predictive_model_target, - resolve_hooked_transformer_config, + resolve_nested_model_config, ) from simplexity.utils.config_utils import ( filter_instance_keys, @@ -465,12 +464,10 @@ def _setup_predictive_models( model_instance_keys = filter_instance_keys(cfg, instance_keys, is_predictive_model_target) for instance_key in model_instance_keys: instance_config: DictConfig | None = OmegaConf.select(cfg, instance_key, throw_on_missing=True) - if instance_config and is_hooked_transformer_config(instance_config): - instance_config_config: DictConfig | None = instance_config.get("cfg", None) - if instance_config_config is None: - raise RuntimeError("Error selecting predictive model config") + instance_config_config: DictConfig | None = instance_config.get("cfg", None) if instance_config else None + if instance_config_config is not None: vocab_size = _get_attribute_value(cfg, instance_keys, "vocab_size") - resolve_hooked_transformer_config(instance_config_config, vocab_size=vocab_size) + resolve_nested_model_config(instance_config_config, vocab_size=vocab_size) model = _instantiate_predictive_model(cfg, instance_key) step_key = instance_key.rsplit(".", 1)[0] + ".load_checkpoint_step" load_checkpoint_step: int | None = OmegaConf.select(cfg, step_key, throw_on_missing=True) diff --git a/simplexity/structured_configs/predictive_model.py b/simplexity/structured_configs/predictive_model.py index 1638143e..f33694e2 100644 --- a/simplexity/structured_configs/predictive_model.py +++ b/simplexity/structured_configs/predictive_model.py @@ -149,8 +149,8 @@ def validate_hooked_transformer_config(cfg: DictConfig) -> None: @dynamic_resolve -def resolve_hooked_transformer_config(cfg: DictConfig, *, vocab_size: int | None = None) -> None: - """Resolve the HookedTransformerConfig.""" +def resolve_nested_model_config(cfg: DictConfig, *, vocab_size: int | None = None) -> None: + """Resolve nested model config fields like d_vocab and device.""" # Resolve d_vocab if vocab_size is None: SIMPLEXITY_LOGGER.debug("[predictive model] no vocab_size set") @@ -159,9 +159,7 @@ def resolve_hooked_transformer_config(cfg: DictConfig, *, vocab_size: int | None cfg.d_vocab = vocab_size SIMPLEXITY_LOGGER.info("[predictive model] d_vocab resolved to: %s", vocab_size) elif cfg.get("d_vocab") != vocab_size: - raise ConfigValidationError( - f"HookedTransformerConfig.d_vocab ({cfg.get('d_vocab')}) must be equal to {vocab_size}" - ) + raise ConfigValidationError(f"d_vocab ({cfg.get('d_vocab')}) must be equal to {vocab_size}") else: SIMPLEXITY_LOGGER.debug("[predictive model] d_vocab defined as: %s", cfg.get("d_vocab")) diff --git a/tests/structured_configs/test_predictive_model_config.py b/tests/structured_configs/test_predictive_model_config.py index ffa6c5e4..8d5daf4d 100644 --- a/tests/structured_configs/test_predictive_model_config.py +++ b/tests/structured_configs/test_predictive_model_config.py @@ -23,7 +23,7 @@ is_hooked_transformer_config, is_predictive_model_config, is_predictive_model_target, - resolve_hooked_transformer_config, + resolve_nested_model_config, validate_hooked_transformer_config, validate_hooked_transformer_config_config, validate_predictive_model_config, @@ -266,8 +266,8 @@ def test_validate_hooked_transformer_config_missing_cfg(self) -> None: with pytest.raises(ConfigValidationError, match="HookedTransformerConfig.cfg is required"): validate_hooked_transformer_config(cfg) - def test_resolve_hooked_transformer_config_without_kwargs(self) -> None: - """Test resolve_hooked_transformer_config with valid configs.""" + def test_resolve_nested_model_config_without_kwargs(self) -> None: + """Test resolve_nested_model_config with valid configs.""" cfg = DictConfig( { "_target_": "transformer_lens.HookedTransformerConfig", @@ -284,14 +284,14 @@ def test_resolve_hooked_transformer_config_without_kwargs(self) -> None: patch("simplexity.structured_configs.predictive_model.SIMPLEXITY_LOGGER.info") as mock_info, patch("simplexity.structured_configs.predictive_model.resolve_device", return_value="cpu"), ): - resolve_hooked_transformer_config(cfg) + resolve_nested_model_config(cfg) mock_debug.assert_called_once_with("[predictive model] no vocab_size set") mock_info.assert_called_once_with("[predictive model] device resolved to: %s", "cpu") assert OmegaConf.is_missing(cfg, "d_vocab") assert cfg.get("device") == "cpu" - def test_resolve_hooked_transformer_config_with_complete_values(self) -> None: - """Test resolve_hooked_transformer_config with complete values.""" + def test_resolve_nested_model_config_with_complete_values(self) -> None: + """Test resolve_nested_model_config with complete values.""" cfg = DictConfig( { "_target_": "transformer_lens.HookedTransformerConfig", @@ -308,7 +308,7 @@ def test_resolve_hooked_transformer_config_with_complete_values(self) -> None: patch("simplexity.structured_configs.predictive_model.SIMPLEXITY_LOGGER.debug") as mock_debug, patch("simplexity.structured_configs.predictive_model.resolve_device", return_value="cuda"), ): - resolve_hooked_transformer_config(cfg, vocab_size=4) + resolve_nested_model_config(cfg, vocab_size=4) mock_debug.assert_has_calls( [ call("[predictive model] d_vocab defined as: %s", 4), @@ -318,8 +318,8 @@ def test_resolve_hooked_transformer_config_with_complete_values(self) -> None: assert cfg.get("d_vocab") == 4 assert cfg.get("device") == "cuda" - def test_resolve_hooked_transformer_config_with_missing_values(self) -> None: - """Test resolve_hooked_transformer_config with missing values.""" + def test_resolve_nested_model_config_with_missing_values(self) -> None: + """Test resolve_nested_model_config with missing values.""" cfg = DictConfig( { "_target_": "transformer_lens.HookedTransformerConfig", @@ -336,7 +336,7 @@ def test_resolve_hooked_transformer_config_with_missing_values(self) -> None: patch("simplexity.structured_configs.predictive_model.SIMPLEXITY_LOGGER.info") as mock_info, patch("simplexity.structured_configs.predictive_model.resolve_device", return_value="cuda"), ): - resolve_hooked_transformer_config(cfg, vocab_size=4) + resolve_nested_model_config(cfg, vocab_size=4) mock_info.assert_has_calls( [ call("[predictive model] d_vocab resolved to: %s", 4), @@ -346,8 +346,8 @@ def test_resolve_hooked_transformer_config_with_missing_values(self) -> None: assert cfg.get("d_vocab") == 4 assert cfg.get("device") == "cuda" - def test_resolve_hooked_transformer_config_with_invalid_values(self) -> None: - """Test resolve_hooked_transformer_config with invalid values.""" + def test_resolve_nested_model_config_with_invalid_values(self) -> None: + """Test resolve_nested_model_config with invalid values.""" cfg = DictConfig( { "_target_": "transformer_lens.HookedTransformerConfig", @@ -359,13 +359,11 @@ def test_resolve_hooked_transformer_config_with_invalid_values(self) -> None: "d_vocab": 3, } ) - with pytest.raises( - ConfigValidationError, match=re.escape("HookedTransformerConfig.d_vocab (3) must be equal to 4") - ): - resolve_hooked_transformer_config(cfg, vocab_size=4) + with pytest.raises(ConfigValidationError, match=re.escape("d_vocab (3) must be equal to 4")): + resolve_nested_model_config(cfg, vocab_size=4) - def test_resolve_hooked_transformer_config_with_conflicting_device(self) -> None: - """Test resolve_hooked_transformer_config with conflicting device.""" + def test_resolve_nested_model_config_with_conflicting_device(self) -> None: + """Test resolve_nested_model_config with conflicting device.""" cfg = DictConfig( { "_target_": "transformer_lens.HookedTransformerConfig", @@ -383,7 +381,7 @@ def test_resolve_hooked_transformer_config_with_conflicting_device(self) -> None patch("simplexity.structured_configs.predictive_model.SIMPLEXITY_LOGGER.warning") as mock_warning, patch("simplexity.structured_configs.predictive_model.resolve_device", side_effect=error), ): - resolve_hooked_transformer_config(cfg) + resolve_nested_model_config(cfg) mock_warning.assert_has_calls( [ call( @@ -396,8 +394,8 @@ def test_resolve_hooked_transformer_config_with_conflicting_device(self) -> None ) assert cfg.get("device") == "cpu" - def test_resolve_hooked_transformer_config_device_mismatch_updates_cfg(self) -> None: - """Test resolve_hooked_transformer_config device mismatch updates config.""" + def test_resolve_nested_model_config_device_mismatch_updates_cfg(self) -> None: + """Test resolve_nested_model_config device mismatch updates config.""" cfg = DictConfig( { "_target_": "transformer_lens.HookedTransformerConfig", @@ -414,12 +412,12 @@ def test_resolve_hooked_transformer_config_device_mismatch_updates_cfg(self) -> patch("simplexity.structured_configs.predictive_model.SIMPLEXITY_LOGGER.warning") as mock_warning, patch("simplexity.structured_configs.predictive_model.resolve_device", return_value="cpu"), ): - resolve_hooked_transformer_config(cfg) + resolve_nested_model_config(cfg) mock_warning.assert_called_once_with("[predictive model] specified device %s resolved to %s", "cuda", "cpu") assert cfg.get("device") == "cpu" - def test_resolve_hooked_transformer_config_device_auto(self) -> None: - """Test resolve_hooked_transformer_config with auto device.""" + def test_resolve_nested_model_config_device_auto(self) -> None: + """Test resolve_nested_model_config with auto device.""" cfg = DictConfig( { "_target_": "transformer_lens.HookedTransformerConfig", @@ -436,10 +434,34 @@ def test_resolve_hooked_transformer_config_device_auto(self) -> None: patch("simplexity.structured_configs.predictive_model.SIMPLEXITY_LOGGER.info") as mock_info, patch("simplexity.structured_configs.predictive_model.resolve_device", return_value="mps"), ): - resolve_hooked_transformer_config(cfg) + resolve_nested_model_config(cfg) mock_info.assert_any_call("[predictive model] device resolved to: %s", "mps") assert cfg.get("device") == "mps" + def test_resolve_nested_model_config_generic_model(self) -> None: + """Test resolve_nested_model_config with a generic (non-HookedTransformer) config.""" + cfg = DictConfig( + { + "_target_": "some_library.CustomModelConfig", + "hidden_size": 256, + "d_vocab": MISSING, + "device": None, + } + ) + with ( + patch("simplexity.structured_configs.predictive_model.SIMPLEXITY_LOGGER.info") as mock_info, + patch("simplexity.structured_configs.predictive_model.resolve_device", return_value="cuda"), + ): + resolve_nested_model_config(cfg, vocab_size=100) + mock_info.assert_has_calls( + [ + call("[predictive model] d_vocab resolved to: %s", 100), + call("[predictive model] device resolved to: %s", "cuda"), + ] + ) + assert cfg.get("d_vocab") == 100 + assert cfg.get("device") == "cuda" + def test_is_predictive_model_target_valid(self) -> None: """Test is_predictive_model_target with valid model targets.""" assert is_predictive_model_target("transformer_lens.HookedTransformer") From 861bfbbd27e3c2d8b65723c1255c29ecc13352ed Mon Sep 17 00:00:00 2001 From: Loren AC Date: Wed, 7 Jan 2026 15:17:42 -0500 Subject: [PATCH 13/35] Improve metric naming for length and readability (#153) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Abbreviate linear regression scalar metric names * Add method to make layer names more compact and to construct layer-specific metric key names * Integrate layer formatting methods into LayerwiseAnalysis * Update visualization key lookup to use new {analysis}/{layer} format Change projection and scalar key resolution to use the new naming convention where keys follow {analysis}/{layer_spec} format (e.g., "pca/L0.resid.pre") instead of the old {layer}_{analysis} format (e.g., "layer_0_pca"). Key changes: - Update _lookup_projection_array and _lookup_scalar_value to match keys by prefix (analysis/) rather than suffix (_analysis) - Add _key_matches_layer helper to handle factor-suffixed keys like "projected/layer_0-F0" when given pattern "projected/F0" - Update _expand_projection_key_pattern to extract factor suffixes from new format and reconstruct pattern-matchable keys - Update _expand_scalar_pattern_keys to properly handle analysis prefix for patterns with internal slashes Update all test files to use new key format in mock data. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 * Format layer names in visualization lookups and update test key assertions - Add format_layer_spec to field_resolution.py for converting layer names (e.g., blocks.0.hook_resid_pre → L0.resid.pre) before key matching - Update dataframe_builders.py to format layer names in scalar series inference and DataFrame construction - Update test_linear_regression.py assertions to new key format: - factor_X/metric → metric/FX - orthogonality_X_Y/metric → orth/metric_short/FX,Y - concat/metric → metric/Fcat - Update test_layerwise_analysis.py assertions to new key format: - layer_metric → metric/layer - Update with_visuals.yaml config templates to match new format - Update test_activation_tracker_config.py key assertion 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 * Abbreviate PCA metric names for consistency - variance_explained → var_exp - n_components_{pct}pct → nc_{pct} - cumvar_{idx} unchanged 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 * Fix formatting and linting * Format layer names in pattern expansion for projection key matching The pattern expansion logic was using unformatted layer names (e.g., 'blocks.0.hook_resid_pre') to match against projection keys that have formatted layer names (e.g., 'projected/L0.resid.pre'). This caused pattern matching to fail when expanding projection key patterns like 'projected/F*' for non-concatenated layers. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 * Eric/improve-metric-naming-for-length-and-readability (#156) * simplify metric_keys.py * Update field resolution * Remove test * Simplify pattern expansion --------- Co-authored-by: Claude Opus 4.5 Co-authored-by: ealt --- .../visualization/dataframe_builders.py | 7 +- .../visualization/field_resolution.py | 74 ++++- .../visualization/pattern_expansion.py | 56 ++-- simplexity/analysis/layerwise_analysis.py | 8 +- simplexity/analysis/linear_regression.py | 42 ++- simplexity/analysis/metric_keys.py | 62 ++++ simplexity/analysis/pca.py | 4 +- tests/activations/test_activation_analysis.py | 214 ++++++------ .../test_activation_visualizations.py | 4 +- .../activations/test_dataframe_integration.py | 34 +- tests/activations/test_field_expansion.py | 112 +++---- .../test_scalar_wildcard_expansion.py | 26 +- .../activations/test_visualization_modules.py | 109 +++---- tests/analysis/test_layerwise_analysis.py | 20 +- tests/analysis/test_linear_regression.py | 304 +++++++++--------- tests/analysis/test_metric_keys.py | 61 ++++ tests/analysis/test_pca.py | 4 +- .../activation_tracker/with_visuals.yaml | 4 +- .../test_activation_tracker_config.py | 2 +- 19 files changed, 673 insertions(+), 474 deletions(-) create mode 100644 simplexity/analysis/metric_keys.py create mode 100644 tests/analysis/test_metric_keys.py diff --git a/simplexity/activations/visualization/dataframe_builders.py b/simplexity/activations/visualization/dataframe_builders.py index 20bf93e0..a689028c 100644 --- a/simplexity/activations/visualization/dataframe_builders.py +++ b/simplexity/activations/visualization/dataframe_builders.py @@ -26,6 +26,7 @@ SamplingConfig, ScalarSeriesMapping, ) +from simplexity.analysis.metric_keys import format_layer_spec from simplexity.exceptions import ConfigValidationError @@ -155,9 +156,10 @@ def _build_scalar_series_dataframe( base_metadata = _scalar_series_metadata(metadata_columns) rows: list[dict[str, Any]] = [] for layer_name in layer_names: + formatted_layer = format_layer_spec(layer_name) index_values = mapping.index_values or _infer_scalar_series_indices(mapping, scalars, layer_name, analysis_name) for index_value in index_values: - raw_key = mapping.key_template.format(layer=layer_name, index=index_value) + raw_key = mapping.key_template.format(layer=formatted_layer, index=index_value) scalar_key = f"{analysis_name}/{raw_key}" scalar_value = scalars.get(scalar_key) if scalar_value is None: @@ -183,7 +185,8 @@ def _infer_scalar_series_indices( analysis_name: str, ) -> list[int]: """Infer available indices for scalar series from available scalar keys.""" - raw_template = mapping.key_template.format(layer=layer_name, index=_SCALAR_INDEX_SENTINEL) + formatted_layer = format_layer_spec(layer_name) + raw_template = mapping.key_template.format(layer=formatted_layer, index=_SCALAR_INDEX_SENTINEL) template = f"{analysis_name}/{raw_template}" if _SCALAR_INDEX_SENTINEL not in template: raise ConfigValidationError( diff --git a/simplexity/activations/visualization/field_resolution.py b/simplexity/activations/visualization/field_resolution.py index 2a423db3..913a92cc 100644 --- a/simplexity/activations/visualization/field_resolution.py +++ b/simplexity/activations/visualization/field_resolution.py @@ -7,38 +7,78 @@ import numpy as np from simplexity.activations.visualization_configs import ActivationVisualizationFieldRef +from simplexity.analysis.metric_keys import construct_layer_specific_key, format_layer_spec from simplexity.exceptions import ConfigValidationError def _lookup_projection_array( - projections: Mapping[str, np.ndarray], layer_name: str, key: str | None, concat_layers: bool + projections: Mapping[str, np.ndarray], layer_name: str, key: str, concat_layers: bool ) -> np.ndarray: - """Look up a projection array by key, handling layer naming conventions.""" - if key is None: - raise ConfigValidationError("Projection references must supply a `key` value.") - suffix = f"_{key}" + """Look up a projection array by key, handling layer naming conventions. + + Supports keys in the format "{analysis}/{layer_spec}" (e.g., "pca/L0.resid.pre") + or "{analysis}/{layer_spec}-{factor_spec}" (e.g., "reg/L0.resid.pre-F0"). + + When key contains a factor suffix (e.g., "projected/F0"), looks for the full key + "{analysis}/{layer_spec}-{factor_spec}" (e.g., "projected/L0.resid.pre-F0"). + """ for full_key, value in projections.items(): if concat_layers: - if full_key.endswith(suffix) or full_key == key: - return np.asarray(value) + if full_key == key or full_key.startswith(f"{key}/"): + return value else: - if not full_key.endswith(suffix): - continue - candidate_layer = full_key[: -len(suffix)] - if candidate_layer == layer_name: - return np.asarray(value) + if _key_matches_layer(full_key, key, layer_name): + return value raise ConfigValidationError(f"Projection '{key}' not available for layer '{layer_name}'.") +def _key_matches_layer(full_key: str, key: str, layer_name: str) -> bool: + """Check if a full key matches the given key pattern and layer name. + + Handles two formats: + - Simple: key="pca" matches full_key="pca/L0.resid.pre" + - Factor: key="projected/F0" matches full_key="projected/L0.resid.pre-F0" + + The layer_name is formatted using format_layer_spec before matching. + """ + if "/" not in full_key: + return False + + formatted_layer = format_layer_spec(layer_name) + + # Check if key has a factor suffix (e.g., "projected/F0") + if "/" in key: + return full_key == construct_layer_specific_key(key, formatted_layer) + + # Simple key format: key="pca" matches "pca/L0.resid.pre" + prefix = f"{key}/" + if not full_key.startswith(prefix): + return False + layer_part = full_key[len(prefix) :] + candidate_layer = layer_part.split("-")[0] + return candidate_layer == formatted_layer + + def _lookup_scalar_value(scalars: Mapping[str, float], layer_name: str, key: str, concat_layers: bool) -> float: - """Look up a scalar value by key, handling layer naming conventions.""" - suffix = f"_{key}" + """Look up a scalar value by key, handling layer naming conventions. + + Supports keys in the format "{metric}/{layer_spec}" (e.g., "r2/L0.resid.pre") + or "{metric}/{layer_spec}-{factor_spec}" (e.g., "r2/L0.resid.pre-F0"). + + The layer_name is formatted using format_layer_spec before matching. + """ + formatted_layer = format_layer_spec(layer_name) + prefix = f"{key}/" for full_key, value in scalars.items(): if concat_layers: - if full_key.endswith(suffix) or full_key == key: + if full_key.startswith(prefix) or full_key == key: return float(value) else: - if full_key.endswith(suffix) and full_key[: -len(suffix)] == layer_name: + if not full_key.startswith(prefix): + continue + layer_part = full_key[len(prefix) :] + candidate_layer = layer_part.split("-")[0] + if candidate_layer == formatted_layer: return float(value) raise ConfigValidationError(f"Scalar '{key}' not available for layer '{layer_name}'.") @@ -130,6 +170,8 @@ def _resolve_field( return np.asarray(metadata_columns["weight"]) if ref.source == "projections": + if ref.key is None: + raise ConfigValidationError("Projection references must supply a `key` value.") array = _lookup_projection_array(projections, layer_name, ref.key, analysis_concat_layers) if isinstance(ref.component, str): raise ConfigValidationError("Component indices should be expanded before resolution") diff --git a/simplexity/activations/visualization/pattern_expansion.py b/simplexity/activations/visualization/pattern_expansion.py index 000f43d9..efe9c206 100644 --- a/simplexity/activations/visualization/pattern_expansion.py +++ b/simplexity/activations/visualization/pattern_expansion.py @@ -17,6 +17,7 @@ validate_single_pattern, ) from simplexity.activations.visualization_configs import ActivationVisualizationFieldRef +from simplexity.analysis.metric_keys import format_layer_spec from simplexity.exceptions import ConfigValidationError @@ -119,7 +120,7 @@ def _get_component_count( raise ConfigValidationError(f"Projection must be 1D or 2D, got {np_array.ndim}D") return np_array.shape[1] - elif ref.source == "belief_states": + if ref.source == "belief_states": if belief_states is None: raise ConfigValidationError("Belief states not available") np_array = np.asarray(belief_states) @@ -127,8 +128,7 @@ def _get_component_count( raise ConfigValidationError(f"Belief states must be 2D, got {np_array.ndim}D") return np_array.shape[1] - else: - raise ConfigValidationError(f"Component expansion not supported for source: {ref.source}") + raise ConfigValidationError(f"Component expansion not supported for source: {ref.source}") def _expand_projection_key_pattern( @@ -149,6 +149,9 @@ def _expand_projection_key_pattern( Dict mapping extracted index (as string) to the concrete key suffix. E.g., {"0": "factor_0/projected", "1": "factor_1/projected"} """ + # Format layer name to match against projection keys which use formatted names + formatted_layer = format_layer_spec(layer_name) + # Build regex from pattern if "*" in key_pattern: regex_pattern = build_wildcard_regex(key_pattern) @@ -170,16 +173,33 @@ def _expand_projection_key_pattern( # Match against available projection keys result: dict[str, str] = {} for full_key in projections: - # Extract the key suffix (part after layer name) + # Extract the key suffix for pattern matching if analysis_concat_layers: - # Keys are like "factor_0/projected" directly + # Keys are like "analysis/Lcat" or "analysis/Lcat-F0" directly key_suffix = full_key else: - # Keys are like "layer_name_factor_0/projected" - prefix = f"{layer_name}_" - if not full_key.startswith(prefix): + # New format: keys are like "analysis/layer_name" or "analysis/layer_name-F0" + # Extract the analysis prefix and factor suffix for matching + if "/" not in full_key: + continue + parts = full_key.rsplit("/", 1) + if len(parts) != 2: continue - key_suffix = full_key[len(prefix) :] + analysis_prefix, layer_part = parts + + # Check if this key is for the current layer + if not layer_part.startswith(formatted_layer): + continue + + # Extract factor suffix if present (e.g., "L0.resid.pre-F0" -> "-F0") + factor_suffix = layer_part[len(formatted_layer) :] + + # Reconstruct a pattern-matchable key suffix + # Convert "projected/layer_0-F0" to "projected/F0" for pattern matching + if factor_suffix.startswith("-"): + key_suffix = f"{analysis_prefix}/{factor_suffix[1:]}" + else: + key_suffix = analysis_prefix match = regex_pattern.match(key_suffix) if match: @@ -201,7 +221,6 @@ def _expand_projection_key_mapping( ref: ActivationVisualizationFieldRef, layer_name: str, projections: Mapping[str, np.ndarray], - belief_states: np.ndarray | None, analysis_concat_layers: bool, ) -> dict[str, ActivationVisualizationFieldRef]: """Expand projection key patterns, optionally combined with component patterns. @@ -388,7 +407,6 @@ def _expand_belief_factor_mapping( def _expand_scalar_keys( field_pattern: str, key_pattern: str | None, - layer_name: str, scalars: Mapping[str, float], ) -> dict[str, str]: """Expand scalar field patterns by matching available scalar keys. @@ -419,14 +437,14 @@ def _expand_scalar_pattern_keys( ) -> list[str]: """Expand wildcard/range pattern against available scalar keys.""" keys = list(available_keys) - has_prefixed_keys = any("/" in key for key in keys) prefix = f"{analysis_name}/" + keys_have_prefix = any(key.startswith(prefix) for key in keys) normalized_pattern = pattern - if "/" not in normalized_pattern and has_prefixed_keys: - normalized_pattern = f"{analysis_name}/{normalized_pattern}" - elif "/" in normalized_pattern and not has_prefixed_keys and normalized_pattern.startswith(prefix): - normalized_pattern = normalized_pattern[len(prefix) :] + if keys_have_prefix and not pattern.startswith(prefix): + normalized_pattern = f"{prefix}{pattern}" + elif not keys_have_prefix and pattern.startswith(prefix): + normalized_pattern = pattern[len(prefix) :] pattern_variants = _expand_scalar_pattern_ranges(normalized_pattern) matched: list[str] = [] @@ -505,9 +523,7 @@ def _expand_field_mapping( f"Field name '{field_name}' has too many patterns (max 2 for key+component expansion)" ) - return _expand_projection_key_mapping( - field_name, ref, layer_name, projections, belief_states, analysis_concat_layers - ) + return _expand_projection_key_mapping(field_name, ref, layer_name, projections, analysis_concat_layers) # Check for belief state factor patterns if ref.source == "belief_states" and ref.factor is not None and isinstance(ref.factor, str): @@ -539,7 +555,7 @@ def _expand_field_mapping( if not field_has_pattern: return {field_name: ref} - scalar_expansions = _expand_scalar_keys(field_name, ref.key, layer_name, scalars) + scalar_expansions = _expand_scalar_keys(field_name, ref.key, scalars) return { field: ActivationVisualizationFieldRef(source="scalars", key=key, component=None, reducer=None) for field, key in scalar_expansions.items() diff --git a/simplexity/analysis/layerwise_analysis.py b/simplexity/analysis/layerwise_analysis.py index 60aa5cb7..fe47b9cd 100644 --- a/simplexity/analysis/layerwise_analysis.py +++ b/simplexity/analysis/layerwise_analysis.py @@ -17,6 +17,7 @@ import jax from simplexity.analysis.linear_regression import layer_linear_regression +from simplexity.analysis.metric_keys import construct_layer_specific_key, format_layer_spec from simplexity.analysis.pca import ( DEFAULT_VARIANCE_THRESHOLDS, layer_pca_analysis, @@ -195,10 +196,13 @@ def analyze( belief_states, **self._analysis_kwargs, ) + formatted_layer_name = format_layer_spec(layer_name) for key, value in layer_scalars.items(): - scalars[f"{layer_name}_{key}"] = value + constructed_key = construct_layer_specific_key(key, formatted_layer_name) + scalars[constructed_key] = value for key, value in layer_projections.items(): - projections[f"{layer_name}_{key}"] = value + constructed_key = construct_layer_specific_key(key, formatted_layer_name) + projections[constructed_key] = value return scalars, projections diff --git a/simplexity/analysis/linear_regression.py b/simplexity/analysis/linear_regression.py index a0ef6eee..9a37658b 100644 --- a/simplexity/analysis/linear_regression.py +++ b/simplexity/analysis/linear_regression.py @@ -228,6 +228,17 @@ def _merge_results_with_prefix( arrays.update({f"{prefix}/{key}": value for key, value in results_arrays.items()}) +def _merge_results_with_suffix( + scalars: dict[str, float], + arrays: dict[str, jax.Array], + results: tuple[Mapping[str, float], Mapping[str, jax.Array]], + suffix: str, +) -> None: + results_scalars, results_arrays = results + scalars.update({f"{key}/{suffix}": value for key, value in results_scalars.items()}) + arrays.update({f"{key}/{suffix}": value for key, value in results_arrays.items()}) + + def _split_concat_results( layer_activations: jax.Array, weights: jax.Array, @@ -389,12 +400,12 @@ def log_some_zeros(num_zeros_array: jax.Array) -> None: effective_rank = jnp.exp(entropy) scalars = { - "subspace_overlap": float(subspace_overlap_score), - "max_singular_value": float(jnp.max(singular_values)), - "min_singular_value": float(jnp.min(singular_values)), - "participation_ratio": float(participation_ratio), + "overlap": float(subspace_overlap_score), + "sv_max": float(jnp.max(singular_values)), + "sv_min": float(jnp.min(singular_values)), + "p_ratio": float(participation_ratio), "entropy": float(entropy), - "effective_rank": float(effective_rank), + "eff_rank": float(effective_rank), } arrays = { @@ -426,8 +437,8 @@ def _compute_all_pairwise_orthogonality( for i, j in factor_pairs: basis_pair = [basis_list[i], basis_list[j]] orthogonality_scalars, orthogonality_arrays = _compute_subspace_orthogonality(basis_pair) - scalars.update({f"orthogonality_{i}_{j}/{key}": value for key, value in orthogonality_scalars.items()}) - arrays.update({f"orthogonality_{i}_{j}/{key}": value for key, value in orthogonality_arrays.items()}) + scalars.update({f"{i},{j}/{key}": value for key, value in orthogonality_scalars.items()}) + arrays.update({f"{i},{j}/{key}": value for key, value in orthogonality_arrays.items()}) return scalars, arrays @@ -453,7 +464,7 @@ def _handle_factored_regression( if concat_belief_states: belief_states_concat = jnp.concatenate(belief_states, axis=-1) concat_results = regression_fn(layer_activations, belief_states_concat, weights, **kwargs) - _merge_results_with_prefix(scalars, arrays, concat_results, "concat") + _merge_results_with_suffix(scalars, arrays, concat_results, "Fcat") # Split the concatenated parameters and projections into the individual factors factor_results = _split_concat_results( @@ -467,15 +478,20 @@ def _handle_factored_regression( factor_results = _process_individual_factors(layer_activations, belief_states, weights, use_svd, **kwargs) for factor_idx, factor_result in enumerate(factor_results): - _merge_results_with_prefix(scalars, arrays, factor_result, f"factor_{factor_idx}") + _merge_results_with_suffix(scalars, arrays, factor_result, f"F{factor_idx}") if compute_subspace_orthogonality: # Extract coefficients (excludes intercept) for orthogonality computation coeffs_list = [factor_arrays["coeffs"] for _, factor_arrays in factor_results] - orthogonality_scalars, orthogonality_singular_values = _compute_all_pairwise_orthogonality(coeffs_list) - scalars.update(orthogonality_scalars) - arrays.update(orthogonality_singular_values) - + orthogonality_scalars, orthogonality_arrays = _compute_all_pairwise_orthogonality(coeffs_list) + for key, value in orthogonality_scalars.items(): + factors, metric = key.split("/") + new_key = f"orth/{metric}/F{factors}" + scalars.update({new_key: value}) + for key, value in orthogonality_arrays.items(): + factors, metric = key.split("/") + new_key = f"orth/{metric}/F{factors}" + arrays.update({new_key: value}) return scalars, arrays diff --git a/simplexity/analysis/metric_keys.py b/simplexity/analysis/metric_keys.py new file mode 100644 index 00000000..f66e6b41 --- /dev/null +++ b/simplexity/analysis/metric_keys.py @@ -0,0 +1,62 @@ +"""Utility functions for constructing layer-specific analysis keys for scalar and array metrics.""" + +from __future__ import annotations + +import re + + +def construct_layer_specific_key(key: str, layer_name: str) -> str: + """Construct a layer-specific namespaced metric key.""" + if "/" not in key: + return f"{key}/{layer_name}" + + # If the key is factor-specific (e.g. "rmse/F0") + # prepend the layer name to the factor (e.g. "rmse/L0.resid.post-F0") + analysis, factor = key.rsplit("/", 1) + if factor.startswith("F"): + return f"{analysis}/{layer_name}-{factor}" + + return f"{key}/{layer_name}" + + +def format_layer_spec(layer_name: str) -> str: + """Format layer name into compact layer specification. + + Converts verbose layer names to compact specs: + - Block layers: "blocks.N.hook_X_Y" → "LN.X.Y" + - Special layers: "embed", "pos_embed", "ln_final" → unchanged + - Concatenated: "concatenated" → "Lcat" + + Args: + layer_name: Original layer name from activations dict + + Returns: + Formatted layer spec + + Examples: + >>> format_layer_spec("blocks.2.hook_resid_post") + "L2.resid.post" + >>> format_layer_spec("blocks.0.hook_resid_pre") + "L0.resid.pre" + >>> format_layer_spec("blocks.10.hook_mlp_out") + "L10.mlp.out" + >>> format_layer_spec("embed") + "embed" + >>> format_layer_spec("concatenated") + "Lcat" + """ + if layer_name == "concatenated": + return "Lcat" + + if not layer_name.startswith("blocks."): + return layer_name + + block_pattern = r"^blocks\.(?P\d+)\.hook_(?P.+)$" + match = re.match(block_pattern, layer_name) + if match: + block_num = match.group("block_num") + hook_name = match.group("hook_name") + simplified_hook_name = hook_name.replace("_", ".") + return f"L{block_num}.{simplified_hook_name}" + + return layer_name diff --git a/simplexity/analysis/pca.py b/simplexity/analysis/pca.py index bdab8acb..625260f7 100644 --- a/simplexity/analysis/pca.py +++ b/simplexity/analysis/pca.py @@ -117,7 +117,7 @@ def layer_pca_analysis( scalars: dict[str, float] = {} for idx, value in enumerate(cumulative_variance, start=1): scalars[f"cumvar_{idx}"] = float(value) - scalars["variance_explained"] = float(cumulative_variance[-1]) + scalars["var_exp"] = float(cumulative_variance[-1]) threshold_counts = variance_threshold_counts( result["all_explained_variance_ratio"], @@ -125,7 +125,7 @@ def layer_pca_analysis( ) for threshold, count in threshold_counts.items(): percentage = int(threshold * 100) - scalars[f"n_components_{percentage}pct"] = float(count) + scalars[f"nc_{percentage}"] = float(count) projections = {"pca": result["X_proj"]} return scalars, projections diff --git a/tests/activations/test_activation_analysis.py b/tests/activations/test_activation_analysis.py index 305f3b37..256221f7 100644 --- a/tests/activations/test_activation_analysis.py +++ b/tests/activations/test_activation_analysis.py @@ -237,19 +237,19 @@ def test_basic_regression(self, synthetic_data): weights=prepared.weights, ) - assert "layer_0_r2" in scalars - assert "layer_0_rmse" in scalars - assert "layer_0_mae" in scalars - assert "layer_0_dist" in scalars - assert "layer_1_r2" in scalars + assert "r2/layer_0" in scalars + assert "rmse/layer_0" in scalars + assert "mae/layer_0" in scalars + assert "dist/layer_0" in scalars + assert "r2/layer_1" in scalars - assert "layer_0_projected" in projections - assert "layer_1_projected" in projections + assert "projected/layer_0" in projections + assert "projected/layer_1" in projections assert prepared.belief_states is not None assert isinstance(prepared.belief_states, jax.Array) - assert projections["layer_0_projected"].shape == prepared.belief_states.shape - assert projections["layer_1_projected"].shape == prepared.belief_states.shape + assert projections["projected/layer_0"].shape == prepared.belief_states.shape + assert projections["projected/layer_1"].shape == prepared.belief_states.shape def test_requires_belief_states(self, synthetic_data): """Test that analysis raises error without belief_states.""" @@ -298,8 +298,8 @@ def test_uniform_weights(self, synthetic_data): weights=prepared.weights, ) - assert "layer_0_r2" in scalars - assert "layer_0_projected" in projections + assert "r2/layer_0" in scalars + assert "projected/layer_0" in projections class TestLinearRegressionSVDAnalysis: @@ -327,24 +327,24 @@ def test_basic_regression_svd(self, synthetic_data): weights=prepared.weights, ) - assert "layer_0_r2" in scalars - assert "layer_0_rmse" in scalars - assert "layer_0_mae" in scalars - assert "layer_0_dist" in scalars - assert "layer_0_best_rcond" in scalars - assert "layer_1_r2" in scalars - assert "layer_1_best_rcond" in scalars + assert "r2/layer_0" in scalars + assert "rmse/layer_0" in scalars + assert "mae/layer_0" in scalars + assert "dist/layer_0" in scalars + assert "best_rcond/layer_0" in scalars + assert "r2/layer_1" in scalars + assert "best_rcond/layer_1" in scalars - assert "layer_0_projected" in projections - assert "layer_1_projected" in projections + assert "projected/layer_0" in projections + assert "projected/layer_1" in projections assert prepared.belief_states is not None assert isinstance(prepared.belief_states, jax.Array) - assert projections["layer_0_projected"].shape == prepared.belief_states.shape - assert projections["layer_1_projected"].shape == prepared.belief_states.shape + assert projections["projected/layer_0"].shape == prepared.belief_states.shape + assert projections["projected/layer_1"].shape == prepared.belief_states.shape # Check that best_rcond is one of the provided values - assert scalars["layer_0_best_rcond"] in [1e-15, 1e-10, 1e-8] + assert scalars["best_rcond/layer_0"] in [1e-15, 1e-10, 1e-8] def test_requires_belief_states(self, synthetic_data): """Test that SVD analysis raises error without belief_states.""" @@ -397,21 +397,21 @@ def test_basic_pca(self, synthetic_data): weights=prepared.weights, ) - assert "layer_0_cumvar_1" in scalars - assert "layer_0_cumvar_2" in scalars - assert "layer_0_cumvar_3" in scalars - assert scalars["layer_0_cumvar_1"] <= scalars["layer_0_cumvar_2"] - assert scalars["layer_0_cumvar_2"] <= scalars["layer_0_cumvar_3"] - assert "layer_0_n_components_80pct" in scalars - assert "layer_0_n_components_90pct" in scalars - assert "layer_1_cumvar_1" in scalars + assert "cumvar_1/layer_0" in scalars + assert "cumvar_2/layer_0" in scalars + assert "cumvar_3/layer_0" in scalars + assert scalars["cumvar_1/layer_0"] <= scalars["cumvar_2/layer_0"] + assert scalars["cumvar_2/layer_0"] <= scalars["cumvar_3/layer_0"] + assert "nc_80/layer_0" in scalars + assert "nc_90/layer_0" in scalars + assert "cumvar_1/layer_1" in scalars - assert "layer_0_pca" in projections - assert "layer_1_pca" in projections + assert "pca/layer_0" in projections + assert "pca/layer_1" in projections batch_size = prepared.activations["layer_0"].shape[0] - assert projections["layer_0_pca"].shape == (batch_size, 3) - assert projections["layer_1_pca"].shape == (batch_size, 3) + assert projections["pca/layer_0"].shape == (batch_size, 3) + assert projections["pca/layer_1"].shape == (batch_size, 3) def test_pca_without_belief_states(self, synthetic_data): """Test PCA works without belief_states.""" @@ -437,9 +437,9 @@ def test_pca_without_belief_states(self, synthetic_data): weights=prepared.weights, ) - assert "layer_0_cumvar_1" in scalars - assert "layer_0_cumvar_2" in scalars - assert "layer_0_pca" in projections + assert "cumvar_1/layer_0" in scalars + assert "cumvar_2/layer_0" in scalars + assert "pca/layer_0" in projections def test_pca_all_components(self, synthetic_data): """Test PCA with n_components=None computes all components.""" @@ -465,7 +465,7 @@ def test_pca_all_components(self, synthetic_data): batch_size = prepared.activations["layer_0"].shape[0] d_layer0 = synthetic_data["d_layer0"] - assert projections["layer_0_pca"].shape == (batch_size, min(batch_size, d_layer0)) + assert projections["pca/layer_0"].shape == (batch_size, min(batch_size, d_layer0)) class TestActivationTracker: @@ -494,11 +494,11 @@ def test_basic_tracking(self, synthetic_data): activations=synthetic_data["activations"], ) - assert "regression/layer_0_r2" in scalars - assert "pca/layer_0_variance_explained" in scalars + assert "regression/r2/layer_0" in scalars + assert "pca/var_exp/layer_0" in scalars - assert "regression/layer_0_projected" in projections - assert "pca/layer_0_pca" in projections + assert "regression/projected/layer_0" in projections + assert "pca/pca/layer_0" in projections assert visualizations == {} def test_all_tokens_mode(self, synthetic_data): @@ -519,8 +519,8 @@ def test_all_tokens_mode(self, synthetic_data): activations=synthetic_data["activations"], ) - assert "regression/layer_0_r2" in scalars - assert "regression/layer_0_projected" in projections + assert "regression/r2/layer_0" in scalars + assert "regression/projected/layer_0" in projections assert visualizations == {} def test_mixed_requirements(self, synthetic_data): @@ -546,8 +546,8 @@ def test_mixed_requirements(self, synthetic_data): activations=synthetic_data["activations"], ) - assert "regression/layer_0_r2" in scalars - assert "pca/layer_0_variance_explained" in scalars + assert "regression/r2/layer_0" in scalars + assert "pca/var_exp/layer_0" in scalars assert visualizations == {} def test_concatenated_layers(self, synthetic_data): @@ -573,11 +573,11 @@ def test_concatenated_layers(self, synthetic_data): activations=synthetic_data["activations"], ) - assert "regression/concatenated_r2" in scalars - assert "pca/concatenated_variance_explained" in scalars + assert "regression/r2/Lcat" in scalars + assert "pca/var_exp/Lcat" in scalars - assert "regression/concatenated_projected" in projections - assert "pca/concatenated_pca" in projections + assert "regression/projected/Lcat" in projections + assert "pca/pca/Lcat" in projections assert visualizations == {} def test_uniform_weights(self, synthetic_data): @@ -599,7 +599,7 @@ def test_uniform_weights(self, synthetic_data): activations=synthetic_data["activations"], ) - assert "regression/layer_0_r2" in scalars + assert "regression/r2/layer_0" in scalars assert visualizations == {} def test_multiple_configs_efficiency(self, synthetic_data): @@ -630,13 +630,13 @@ def test_multiple_configs_efficiency(self, synthetic_data): activations=synthetic_data["activations"], ) - assert "pca_all_tokens/layer_0_variance_explained" in scalars - assert "pca_last_token/layer_0_variance_explained" in scalars - assert "regression_concat/concatenated_r2" in scalars + assert "pca_all_tokens/var_exp/layer_0" in scalars + assert "pca_last_token/var_exp/layer_0" in scalars + assert "regression_concat/r2/Lcat" in scalars - assert "pca_all_tokens/layer_0_pca" in projections - assert "pca_last_token/layer_0_pca" in projections - assert "regression_concat/concatenated_projected" in projections + assert "pca_all_tokens/pca/layer_0" in projections + assert "pca_last_token/pca/layer_0" in projections + assert "regression_concat/projected/Lcat" in projections assert visualizations == {} def test_tracker_accepts_torch_inputs(self, synthetic_data): @@ -670,8 +670,8 @@ def test_tracker_accepts_torch_inputs(self, synthetic_data): activations=torch_activations, ) - assert "regression/layer_0_r2" in scalars - assert "pca/layer_0_pca" in projections + assert "regression/r2/layer_0" in scalars + assert "pca/pca/layer_0" in projections assert visualizations == {} def test_tracker_builds_visualizations(self, synthetic_data, monkeypatch): @@ -907,28 +907,28 @@ def test_linear_regression_with_multiple_factors(self, factored_belief_data): # Should have separate metrics for each factor # Format is: layer_name_factor_idx/metric_name - assert "layer_0_factor_0/r2" in scalars - assert "layer_0_factor_1/r2" in scalars - assert "layer_0_factor_0/rmse" in scalars - assert "layer_0_factor_1/rmse" in scalars - assert "layer_0_factor_0/mae" in scalars - assert "layer_0_factor_1/mae" in scalars - assert "layer_0_factor_0/dist" in scalars - assert "layer_0_factor_1/dist" in scalars - - assert "layer_1_factor_0/r2" in scalars - assert "layer_1_factor_1/r2" in scalars + assert "r2/layer_0-F0" in scalars + assert "r2/layer_0-F1" in scalars + assert "rmse/layer_0-F0" in scalars + assert "rmse/layer_0-F1" in scalars + assert "mae/layer_0-F0" in scalars + assert "mae/layer_0-F1" in scalars + assert "dist/layer_0-F0" in scalars + assert "dist/layer_0-F1" in scalars + + assert "r2/layer_1-F0" in scalars + assert "r2/layer_1-F1" in scalars # Should have separate projections for each factor - assert "layer_0_factor_0/projected" in projections - assert "layer_0_factor_1/projected" in projections - assert "layer_1_factor_0/projected" in projections - assert "layer_1_factor_1/projected" in projections + assert "projected/layer_0-F0" in projections + assert "projected/layer_0-F1" in projections + assert "projected/layer_1-F0" in projections + assert "projected/layer_1-F1" in projections # Check projection shapes batch_size = factored_belief_data["batch_size"] - assert projections["layer_0_factor_0/projected"].shape == (batch_size, factored_belief_data["factor_0_dim"]) - assert projections["layer_0_factor_1/projected"].shape == (batch_size, factored_belief_data["factor_1_dim"]) + assert projections["projected/layer_0-F0"].shape == (batch_size, factored_belief_data["factor_0_dim"]) + assert projections["projected/layer_0-F1"].shape == (batch_size, factored_belief_data["factor_1_dim"]) def test_linear_regression_svd_with_multiple_factors(self, factored_belief_data): """LinearRegressionSVDAnalysis with multi-factor tuple should regress to each factor separately.""" @@ -953,14 +953,14 @@ def test_linear_regression_svd_with_multiple_factors(self, factored_belief_data) ) # Should have separate metrics for each factor including best_rcond - assert "layer_0_factor_0/r2" in scalars - assert "layer_0_factor_1/r2" in scalars - assert "layer_0_factor_0/best_rcond" in scalars - assert "layer_0_factor_1/best_rcond" in scalars + assert "r2/layer_0-F0" in scalars + assert "r2/layer_0-F1" in scalars + assert "best_rcond/layer_0-F0" in scalars + assert "best_rcond/layer_0-F1" in scalars # Should have separate projections for each factor - assert "layer_0_factor_0/projected" in projections - assert "layer_0_factor_1/projected" in projections + assert "projected/layer_0-F0" in projections + assert "projected/layer_0-F1" in projections def test_tracker_with_factored_beliefs(self, factored_belief_data): """ActivationTracker should work with tuple belief states.""" @@ -986,16 +986,16 @@ def test_tracker_with_factored_beliefs(self, factored_belief_data): ) # Regression should have per-factor metrics - assert "regression/layer_0_factor_0/r2" in scalars - assert "regression/layer_0_factor_1/r2" in scalars + assert "regression/r2/layer_0-F0" in scalars + assert "regression/r2/layer_0-F1" in scalars # PCA should still work (doesn't use belief states) - assert "pca/layer_0_variance_explained" in scalars + assert "pca/var_exp/layer_0" in scalars # Projections should be present - assert "regression/layer_0_factor_0/projected" in projections - assert "regression/layer_0_factor_1/projected" in projections - assert "pca/layer_0_pca" in projections + assert "regression/projected/layer_0-F0" in projections + assert "regression/projected/layer_0-F1" in projections + assert "pca/pca/layer_0" in projections def test_single_factor_tuple(self, synthetic_data): """Test with a single-factor tuple (edge case).""" @@ -1043,13 +1043,13 @@ def test_linear_regression_single_factor_tuple_behaves_like_non_tuple(self, synt ) # Should have simple keys without "factor_" prefix - assert "layer_0_r2" in scalars - assert "layer_0_rmse" in scalars - assert "layer_0_projected" in projections + assert "r2/layer_0" in scalars + assert "rmse/layer_0" in scalars + assert "projected/layer_0" in projections # Should NOT have factor keys - assert "layer_0_factor_0/r2" not in scalars - assert "layer_0_factor_0/projected" not in projections + assert "r2/layer_0-F0" not in scalars + assert "projected/layer_0-F0" not in projections def test_linear_regression_concat_belief_states(self, factored_belief_data): """LinearRegressionAnalysis with concat_belief_states=True should return both factor and concat results.""" @@ -1074,20 +1074,20 @@ def test_linear_regression_concat_belief_states(self, factored_belief_data): ) # Should have per-factor results - assert "layer_0_factor_0/r2" in scalars - assert "layer_0_factor_1/r2" in scalars - assert "layer_0_factor_0/projected" in projections - assert "layer_0_factor_1/projected" in projections + assert "r2/layer_0-F0" in scalars + assert "r2/layer_0-F1" in scalars + assert "projected/layer_0-F0" in projections + assert "projected/layer_0-F1" in projections # Should ALSO have concatenated results - assert "layer_0_concat/r2" in scalars - assert "layer_0_concat/rmse" in scalars - assert "layer_0_concat/projected" in projections + assert "r2/layer_0-Fcat" in scalars + assert "rmse/layer_0-Fcat" in scalars + assert "projected/layer_0-Fcat" in projections # Check concatenated projection shape (should be sum of factor dimensions) batch_size = factored_belief_data["batch_size"] total_dim = factored_belief_data["factor_0_dim"] + factored_belief_data["factor_1_dim"] - assert projections["layer_0_concat/projected"].shape == (batch_size, total_dim) + assert projections["projected/layer_0-Fcat"].shape == (batch_size, total_dim) def test_three_factor_tuple(self, factored_belief_data): """Test with three factors to ensure generalization.""" @@ -1145,10 +1145,10 @@ def test_compute_subspace_orthogonality(self, factored_belief_data): weights=prepared.weights, ) - assert "layer_0_orthogonality_0_1/subspace_overlap" in scalars - assert "layer_0_orthogonality_0_1/max_singular_value" in scalars - assert "layer_0_orthogonality_0_1/participation_ratio" in scalars - assert "layer_0_orthogonality_0_1/effective_rank" in scalars + assert "orth/overlap/layer_0-F0,1" in scalars + assert "orth/sv_max/layer_0-F0,1" in scalars + assert "orth/p_ratio/layer_0-F0,1" in scalars + assert "orth/eff_rank/layer_0-F0,1" in scalars # SVD Linear Regression analysis_svd = LinearRegressionSVDAnalysis( @@ -1162,7 +1162,7 @@ def test_compute_subspace_orthogonality(self, factored_belief_data): weights=prepared.weights, ) - assert "layer_0_orthogonality_0_1/subspace_overlap" in scalars_svd + assert "orth/overlap/layer_0-F0,1" in scalars_svd class TestScalarSeriesMapping: diff --git a/tests/activations/test_activation_visualizations.py b/tests/activations/test_activation_visualizations.py index 8a23619f..b0b2c17d 100644 --- a/tests/activations/test_activation_visualizations.py +++ b/tests/activations/test_activation_visualizations.py @@ -198,7 +198,7 @@ def basic_viz_config(self): def test_builds_payload_with_projections(self, basic_metadata, basic_viz_config): """Test building a payload with projection data.""" - projections = {"layer_0_pca": np.array([[1.0, 2.0], [3.0, 4.0]])} + projections = {"pca/layer_0": np.array([[1.0, 2.0], [3.0, 4.0]])} payloads = build_visualization_payloads( analysis_name="test", viz_cfgs=[basic_viz_config], @@ -278,7 +278,7 @@ def test_handles_multiple_configs(self, basic_metadata): } ), ] - projections = {"layer_0_pca": np.array([[1.0, 2.0], [3.0, 4.0]])} + projections = {"pca/layer_0": np.array([[1.0, 2.0], [3.0, 4.0]])} payloads = build_visualization_payloads( analysis_name="test", viz_cfgs=configs, diff --git a/tests/activations/test_dataframe_integration.py b/tests/activations/test_dataframe_integration.py index 1fa8727d..c0e9baec 100644 --- a/tests/activations/test_dataframe_integration.py +++ b/tests/activations/test_dataframe_integration.py @@ -34,8 +34,8 @@ def test_factored_projection_dataframe_values_match(self): factor_1_data = np.array([[0.5, 0.5], [0.4, 0.6], [0.3, 0.7]]) projections = { - "layer_0_factor_0/projected": factor_0_data, - "layer_0_factor_1/projected": factor_1_data, + "projected/layer_0-F0": factor_0_data, + "projected/layer_0-F1": factor_1_data, } # Metadata columns with 3 samples @@ -49,13 +49,13 @@ def test_factored_projection_dataframe_values_match(self): mappings = { "factor_*_prob_0": ActivationVisualizationFieldRef( source="projections", - key="factor_*/projected", + key="projected/F*", component=0, group_as="factor", ), "factor_*_prob_1": ActivationVisualizationFieldRef( source="projections", - key="factor_*/projected", + key="projected/F*", component=1, group_as="factor", ), @@ -124,8 +124,8 @@ def test_factored_projection_different_component_counts(self): factor_1_data = np.array([[0.5, 0.5], [0.4, 0.6]]) # 2 components projections = { - "layer_0_factor_0/projected": factor_0_data, - "layer_0_factor_1/projected": factor_1_data, + "projected/layer_0-F0": factor_0_data, + "projected/layer_0-F1": factor_1_data, } metadata_columns = { @@ -137,7 +137,7 @@ def test_factored_projection_different_component_counts(self): mappings = { "factor_*_prob_2": ActivationVisualizationFieldRef( source="projections", - key="factor_*/projected", + key="projected/F*", component=2, group_as="factor", ), @@ -174,8 +174,8 @@ def test_combined_projections_and_beliefs_data_integrity(self): projected_values = belief_states + noise projections = { - "layer_0_factor_0/projected": projected_values[:, 0, :], - "layer_0_factor_1/projected": projected_values[:, 1, :], + "projected/layer_0-F0": projected_values[:, 0, :], + "projected/layer_0-F1": projected_values[:, 1, :], } metadata_columns = { @@ -194,7 +194,7 @@ def test_combined_projections_and_beliefs_data_integrity(self): label="prediction", mappings={ f"factor_*_prob_{i}": ActivationVisualizationFieldRef( - source="projections", key="factor_*/projected", component=i, group_as="factor" + source="projections", key="projected/F*", component=i, group_as="factor" ) for i in range(n_states) }, @@ -238,7 +238,7 @@ def test_combined_mode_multiple_layers(self): belief_states = np.random.rand(n_samples, n_factors, n_states) projections = { - f"layer_{layer_idx}_factor_{factor_idx}/projected": np.random.rand(n_samples, n_states) + f"projected/layer_{layer_idx}-F{factor_idx}": np.random.rand(n_samples, n_states) for layer_idx in range(n_layers) for factor_idx in range(n_factors) } @@ -259,7 +259,7 @@ def test_combined_mode_multiple_layers(self): label="prediction", mappings={ "factor_*_prob_0": ActivationVisualizationFieldRef( - source="projections", key="factor_*/projected", component=0, group_as="factor" + source="projections", key="projected/F*", component=0, group_as="factor" ), }, ), @@ -301,7 +301,7 @@ def test_full_visualization_pipeline_factored_vs_nonfactored(self): nf_df = _build_dataframe_for_mappings( mappings={"prob_0": ActivationVisualizationFieldRef(source="projections", key="projected", component=0)}, metadata_columns=metadata, - projections={"layer_0_projected": projection_data}, + projections={"projected/layer_0": projection_data}, scalars={}, belief_states=None, analysis_concat_layers=False, @@ -310,11 +310,11 @@ def test_full_visualization_pipeline_factored_vs_nonfactored(self): f_df = _build_dataframe_for_mappings( mappings={ "factor_*_prob_0": ActivationVisualizationFieldRef( - source="projections", key="factor_*/projected", component=0, group_as="factor" + source="projections", key="projected/F*", component=0, group_as="factor" ) }, metadata_columns=metadata, - projections={"layer_0_factor_0/projected": projection_data}, + projections={"projected/layer_0-F0": projection_data}, scalars={}, belief_states=None, analysis_concat_layers=False, @@ -344,6 +344,6 @@ def test_linear_regression_projections_match_beliefs(self): ) for f in range(n_factors): - assert scalars[f"factor_{f}/r2"] > 0.8, f"Factor {f} R² too low" - diff = np.abs(np.asarray(projections[f"factor_{f}/projected"]) - np.asarray(belief_states[f])) + assert scalars[f"r2/F{f}"] > 0.8, f"Factor {f} R² too low" + diff = np.abs(np.asarray(projections[f"projected/F{f}"]) - np.asarray(belief_states[f])) assert diff.max() < 0.2, f"Factor {f} projections differ too much from beliefs" diff --git a/tests/activations/test_field_expansion.py b/tests/activations/test_field_expansion.py index 7eabf844..ec3340b9 100644 --- a/tests/activations/test_field_expansion.py +++ b/tests/activations/test_field_expansion.py @@ -129,14 +129,14 @@ class TestComponentCount: def test_get_component_count_projections_2d(self): """Test getting component count from 2D projections.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca") - projections = {"layer_0_pca": np.random.randn(100, 10)} + projections = {"pca/layer_0": np.random.randn(100, 10)} count = _get_component_count(ref, "layer_0", projections, None, False) assert count == 10 def test_get_component_count_projections_different_sizes(self): """Test getting component count from 2D projections with different sizes.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca") - projections = {"layer_0_pca": np.random.randn(50, 15)} + projections = {"pca/layer_0": np.random.randn(50, 15)} count = _get_component_count(ref, "layer_0", projections, None, False) assert count == 15 @@ -150,14 +150,14 @@ def test_get_component_count_projections_concat_layers(self): def test_get_component_count_projections_1d_raises(self): """Test that 1D projections raise an error when getting component count.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca") - projections = {"layer_0_pca": np.random.randn(100)} + projections = {"pca/layer_0": np.random.randn(100)} with pytest.raises(ConfigValidationError, match="1D projection"): _get_component_count(ref, "layer_0", projections, None, False) def test_get_component_count_projections_3d_raises(self): """Test that 3D projections raise an error when getting component count.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca") - projections = {"layer_0_pca": np.random.randn(10, 10, 10)} + projections = {"pca/layer_0": np.random.randn(10, 10, 10)} with pytest.raises(ConfigValidationError, match="1D or 2D"): _get_component_count(ref, "layer_0", projections, None, False) @@ -201,7 +201,7 @@ class TestFieldExpansion: def test_wildcard_expansion_projections(self): """Test detection of wildcard expansion patterns.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") - projections = {"layer_0_pca": np.random.randn(50, 3)} + projections = {"pca/layer_0": np.random.randn(50, 3)} expanded = _expand_field_mapping("pc_*", ref, "layer_0", projections, {}, None, False) @@ -232,7 +232,7 @@ def test_wildcard_expansion_belief_states(self): def test_range_expansion(self): """Test detection of range expansion patterns.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="0...5") - projections = {"layer_0_pca": np.random.randn(50, 10)} + projections = {"pca/layer_0": np.random.randn(50, 10)} expanded = _expand_field_mapping("pc_0...5", ref, "layer_0", projections, {}, None, False) @@ -246,7 +246,7 @@ def test_range_expansion(self): def test_range_expansion_with_offset(self): """Test detection of range expansion patterns with offset.""" ref = ActivationVisualizationFieldRef(source="projections", key="projected", component="2...5") - projections = {"layer_0_projected": np.random.randn(50, 10)} + projections = {"projected/layer_0": np.random.randn(50, 10)} expanded = _expand_field_mapping("prob_2...5", ref, "layer_0", projections, {}, None, False) @@ -261,7 +261,7 @@ def test_range_expansion_with_offset(self): def test_wildcard_in_middle_of_name(self): """Test detection of wildcard expansion patterns.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") - projections = {"layer_0_pca": np.random.randn(50, 3)} + projections = {"pca/layer_0": np.random.randn(50, 3)} expanded = _expand_field_mapping("component_*_normalized", ref, "layer_0", projections, {}, None, False) @@ -273,7 +273,7 @@ def test_wildcard_in_middle_of_name(self): def test_no_expansion_needed(self): """Test that no expansion occurs when component is a specific integer.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca", component=0) - projections = {"layer_0_pca": np.random.randn(50, 5)} + projections = {"pca/layer_0": np.random.randn(50, 5)} expanded = _expand_field_mapping("pc_0", ref, "layer_0", projections, {}, None, False) @@ -295,7 +295,7 @@ def test_no_expansion_none_component(self): def test_field_pattern_without_component_pattern_raises(self): """Test that a field pattern without a component pattern raises an error.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca", component=0) - projections = {"layer_0_pca": np.random.randn(50, 5)} + projections = {"pca/layer_0": np.random.randn(50, 5)} with pytest.raises(ConfigValidationError, match="has pattern but component is not"): _expand_field_mapping("pc_*", ref, "layer_0", projections, {}, None, False) @@ -303,7 +303,7 @@ def test_field_pattern_without_component_pattern_raises(self): def test_component_pattern_without_field_pattern_raises(self): """Test that a component pattern without a field pattern raises an error.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") - projections = {"layer_0_pca": np.random.randn(50, 5)} + projections = {"pca/layer_0": np.random.randn(50, 5)} with pytest.raises(ConfigValidationError, match="requires field name pattern"): _expand_field_mapping("pc_0", ref, "layer_0", projections, {}, None, False) @@ -311,7 +311,7 @@ def test_component_pattern_without_field_pattern_raises(self): def test_range_exceeds_available_components(self): """Test that a range exceeding available components raises an error.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="0...20") - projections = {"layer_0_pca": np.random.randn(50, 10)} + projections = {"pca/layer_0": np.random.randn(50, 10)} with pytest.raises(ConfigValidationError, match="exceeds available components"): _expand_field_mapping("pc_0...20", ref, "layer_0", projections, {}, None, False) @@ -319,7 +319,7 @@ def test_range_exceeds_available_components(self): def test_range_partially_exceeds_available_components(self): """Test that a range partially exceeding available components raises an error.""" ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="5...15") - projections = {"layer_0_pca": np.random.randn(50, 10)} + projections = {"pca/layer_0": np.random.randn(50, 10)} with pytest.raises(ConfigValidationError, match="exceeds available components"): _expand_field_mapping("pc_5...15", ref, "layer_0", projections, {}, None, False) @@ -500,90 +500,90 @@ def test_range_pattern_in_middle(self): class TestKeyPatternExpansion: - """Test projection key pattern expansion (e.g., factor_*/projected).""" + """Test projection key pattern expansion (e.g., projected/F*).""" def test_has_key_pattern_wildcard(self): """Test that _has_key_pattern detects wildcard patterns correctly.""" - assert _has_key_pattern("factor_*/projected") - assert _has_key_pattern("*/projected") + assert _has_key_pattern("projected/F*") + assert _has_key_pattern("projected/*") assert _has_key_pattern("factor_*") def test_has_key_pattern_range(self): """Test that _has_key_pattern detects range patterns correctly.""" - assert _has_key_pattern("factor_0...3/projected") - assert _has_key_pattern("0...5/projected") + assert _has_key_pattern("projected/F0...3") + assert _has_key_pattern("projected/F0...5") def test_has_key_pattern_none(self): """Test that _has_key_pattern returns False for non-pattern keys.""" assert not _has_key_pattern(None) assert not _has_key_pattern("projected") - assert not _has_key_pattern("factor_0/projected") + assert not _has_key_pattern("projected/F0") def test_has_key_pattern_multiple_raises(self): """Test that _has_key_pattern raises an error for multiple patterns.""" with pytest.raises(ConfigValidationError, match="multiple patterns"): - _has_key_pattern("factor_*/layer_*/projected") + _has_key_pattern("projected/L*/F*") def test_expand_projection_key_pattern_wildcard(self): """Test that _expand_projection_key_pattern expands wildcard patterns correctly.""" projections = { - "layer_0_factor_0/projected": np.random.randn(10, 3), - "layer_0_factor_1/projected": np.random.randn(10, 3), - "layer_0_factor_2/projected": np.random.randn(10, 3), + "projected/layer_0-F0": np.random.randn(10, 3), + "projected/layer_0-F1": np.random.randn(10, 3), + "projected/layer_0-F2": np.random.randn(10, 3), } - result = _expand_projection_key_pattern("factor_*/projected", "layer_0", projections, False) + result = _expand_projection_key_pattern("projected/F*", "layer_0", projections, False) assert len(result) == 3 - assert result["0"] == "factor_0/projected" - assert result["1"] == "factor_1/projected" - assert result["2"] == "factor_2/projected" + assert result["0"] == "projected/F0" + assert result["1"] == "projected/F1" + assert result["2"] == "projected/F2" def test_expand_projection_key_pattern_range(self): """Test that _expand_projection_key_pattern expands range patterns correctly.""" projections = { - "layer_0_factor_0/projected": np.random.randn(10, 3), - "layer_0_factor_1/projected": np.random.randn(10, 3), - "layer_0_factor_2/projected": np.random.randn(10, 3), + "projected/layer_0-F0": np.random.randn(10, 3), + "projected/layer_0-F1": np.random.randn(10, 3), + "projected/layer_0-F2": np.random.randn(10, 3), } - result = _expand_projection_key_pattern("factor_0...2/projected", "layer_0", projections, False) + result = _expand_projection_key_pattern("projected/F0...2", "layer_0", projections, False) assert len(result) == 2 - assert result["0"] == "factor_0/projected" - assert result["1"] == "factor_1/projected" + assert result["0"] == "projected/F0" + assert result["1"] == "projected/F1" def test_expand_projection_key_pattern_concat_layers(self): """Test that _expand_projection_key_pattern works with concatenated layers.""" projections = { - "factor_0/projected": np.random.randn(10, 3), - "factor_1/projected": np.random.randn(10, 3), + "projected/F0": np.random.randn(10, 3), + "projected/F1": np.random.randn(10, 3), } - result = _expand_projection_key_pattern("factor_*/projected", "any_layer", projections, True) + result = _expand_projection_key_pattern("projected/F*", "any_layer", projections, True) assert len(result) == 2 - assert result["0"] == "factor_0/projected" - assert result["1"] == "factor_1/projected" + assert result["0"] == "projected/F0" + assert result["1"] == "projected/F1" def test_expand_projection_key_pattern_no_matches_raises(self): """Test that _expand_projection_key_pattern raises an error when no keys match.""" - projections = {"layer_0_pca": np.random.randn(10, 3)} + projections = {"pca/layer_0": np.random.randn(10, 3)} with pytest.raises(ConfigValidationError, match="No projection keys found"): - _expand_projection_key_pattern("factor_*/projected", "layer_0", projections, False) + _expand_projection_key_pattern("projected/F*", "layer_0", projections, False) def test_field_mapping_with_key_pattern(self): """Test that field mappings with key patterns are expanded correctly.""" ref = ActivationVisualizationFieldRef( source="projections", - key="factor_*/projected", + key="projected/F*", component=0, group_as="factor", ) projections = { - "layer_0_factor_0/projected": np.random.randn(10, 3), - "layer_0_factor_1/projected": np.random.randn(10, 3), + "projected/layer_0-F0": np.random.randn(10, 3), + "projected/layer_0-F1": np.random.randn(10, 3), } expanded = _expand_field_mapping("factor_*_prob", ref, "layer_0", projections, {}, None, False) @@ -591,8 +591,8 @@ def test_field_mapping_with_key_pattern(self): assert len(expanded) == 2 assert "factor_0_prob" in expanded assert "factor_1_prob" in expanded - assert expanded["factor_0_prob"].key == "factor_0/projected" - assert expanded["factor_1_prob"].key == "factor_1/projected" + assert expanded["factor_0_prob"].key == "projected/F0" + assert expanded["factor_1_prob"].key == "projected/F1" assert expanded["factor_0_prob"]._group_value == "0" # pylint: disable=protected-access assert expanded["factor_1_prob"]._group_value == "1" # pylint: disable=protected-access assert expanded["factor_0_prob"].group_as == "factor" @@ -601,13 +601,13 @@ def test_field_mapping_with_key_and_component_patterns(self): """Test that field mappings with key and component patterns are expanded correctly.""" ref = ActivationVisualizationFieldRef( source="projections", - key="factor_*/projected", + key="projected/F*", component="*", group_as="factor", ) projections = { - "layer_0_factor_0/projected": np.random.randn(10, 3), - "layer_0_factor_1/projected": np.random.randn(10, 3), + "projected/layer_0-F0": np.random.randn(10, 3), + "projected/layer_0-F1": np.random.randn(10, 3), } expanded = _expand_field_mapping("factor_*_prob_*", ref, "layer_0", projections, {}, None, False) @@ -627,8 +627,8 @@ def test_field_mapping_with_key_and_component_patterns(self): assert expanded["factor_1_prob_2"].component == 2 # Check that keys and group values are correct - assert expanded["factor_0_prob_0"].key == "factor_0/projected" - assert expanded["factor_1_prob_0"].key == "factor_1/projected" + assert expanded["factor_0_prob_0"].key == "projected/F0" + assert expanded["factor_1_prob_0"].key == "projected/F1" assert expanded["factor_0_prob_0"]._group_value == "0" # pylint: disable=protected-access assert expanded["factor_1_prob_0"]._group_value == "1" # pylint: disable=protected-access @@ -636,11 +636,11 @@ def test_key_pattern_without_field_pattern_raises(self): """Test that a key pattern without a field pattern raises an error.""" ref = ActivationVisualizationFieldRef( source="projections", - key="factor_*/projected", + key="projected/F*", component=0, group_as="factor", ) - projections = {"layer_0_factor_0/projected": np.random.randn(10, 3)} + projections = {"projected/layer_0-F0": np.random.randn(10, 3)} with pytest.raises(ConfigValidationError, match="requires field name pattern"): _expand_field_mapping("prob_0", ref, "layer_0", projections, {}, None, False) @@ -654,7 +654,7 @@ def test_key_pattern_requires_group_as(self): with pytest.raises(ConfigValidationError, match="requires `group_as`"): ActivationVisualizationFieldRef( source="projections", - key="factor_*/projected", + key="projected/F*", component=0, ) @@ -671,18 +671,18 @@ def test_valid_key_pattern_with_group_as(self): """Test that a valid key pattern with group_as is accepted.""" ref = ActivationVisualizationFieldRef( source="projections", - key="factor_*/projected", + key="projected/F*", component=0, group_as="factor", ) assert ref.group_as == "factor" - assert ref.key == "factor_*/projected" + assert ref.key == "projected/F*" def test_valid_key_pattern_with_list_group_as(self): """Test that a valid key pattern with list group_as is accepted.""" ref = ActivationVisualizationFieldRef( source="projections", - key="factor_*/projected", + key="projected/F*", component=0, group_as=["factor", "layer"], ) diff --git a/tests/activations/test_scalar_wildcard_expansion.py b/tests/activations/test_scalar_wildcard_expansion.py index b01a6156..8a68450d 100644 --- a/tests/activations/test_scalar_wildcard_expansion.py +++ b/tests/activations/test_scalar_wildcard_expansion.py @@ -12,7 +12,7 @@ class TestScalarWildcardExpansion: def test_scalar_no_pattern_returns_identity(self): """Scalars without patterns should return as-is.""" scalars = {"layer_0_rmse": 0.5} - result = _expand_scalar_keys("rmse", "layer_0_rmse", "layer_0", scalars) + result = _expand_scalar_keys("rmse", "layer_0_rmse", scalars) assert result == {"rmse": "layer_0_rmse"} @@ -25,7 +25,7 @@ def test_scalar_wildcard_expansion(self): "cumvar_3": 0.99, "other_metric": 1.0, } - result = _expand_scalar_keys("cumvar_*", "cumvar_*", "layer_0", scalars) + result = _expand_scalar_keys("cumvar_*", "cumvar_*", scalars) assert len(result) == 4 assert result == { @@ -44,7 +44,7 @@ def test_scalar_wildcard_with_prefix_suffix(self): "layer_1_cumvar_0": 0.7, "other": 1.0, } - result = _expand_scalar_keys("cv_*", "layer_0_cumvar_*", "layer_0", scalars) + result = _expand_scalar_keys("cv_*", "layer_0_cumvar_*", scalars) assert len(result) == 3 assert result == { @@ -62,7 +62,7 @@ def test_scalar_range_expansion(self): "cumvar_3": 0.99, "cumvar_4": 0.995, } - result = _expand_scalar_keys("cumvar_1...4", "cumvar_1...4", "layer_0", scalars) + result = _expand_scalar_keys("cumvar_1...4", "cumvar_1...4", scalars) assert len(result) == 3 assert result == { @@ -76,14 +76,14 @@ def test_scalar_wildcard_no_matches_raises_error(self): scalars = {"other_metric": 1.0} with pytest.raises(ConfigValidationError, match="No keys found matching pattern"): - _expand_scalar_keys("cumvar_*", "cumvar_*", "layer_0", scalars) + _expand_scalar_keys("cumvar_*", "cumvar_*", scalars) def test_scalar_wildcard_requires_key_pattern(self): """Wildcard expansion without a key should raise an error.""" scalars = {"metric": 1.0} with pytest.raises(ConfigValidationError, match="Scalar wildcard expansion requires a key pattern"): - _expand_scalar_keys("field_*", None, "layer_0", scalars) + _expand_scalar_keys("field_*", None, scalars) def test_scalar_expansion_sorts_indices(self): """Expanded scalar keys should be sorted by index.""" @@ -93,7 +93,7 @@ def test_scalar_expansion_sorts_indices(self): "var_3": 0.3, "var_2": 0.2, } - result = _expand_scalar_keys("v_*", "var_*", "layer_0", scalars) + result = _expand_scalar_keys("v_*", "var_*", scalars) # Check that keys are in sorted order keys = list(result.keys()) @@ -106,7 +106,7 @@ def test_scalar_wildcard_field_name_pattern_mismatch(self): scalars = {"metric": 1.0} # _expand_scalar_keys just returns identity if no pattern in key - result = _expand_scalar_keys("field_*", "metric", "layer_0", scalars) + result = _expand_scalar_keys("field_*", "metric", scalars) assert result == {"field_*": "metric"} def test_scalar_range_invalid_format_returns_identity(self): @@ -114,7 +114,7 @@ def test_scalar_range_invalid_format_returns_identity(self): scalars = {"metric_1..4": 1.0} # Two dots instead of three - not a valid range pattern, returns identity - result = _expand_scalar_keys("field_1..4", "metric_1..4", "layer_0", scalars) + result = _expand_scalar_keys("field_1..4", "metric_1..4", scalars) assert result == {"field_1..4": "metric_1..4"} def test_scalar_wildcard_with_non_numeric_ignored(self): @@ -125,7 +125,7 @@ def test_scalar_wildcard_with_non_numeric_ignored(self): "metric_abc": 0.2, "metric_xyz": 0.3, } - result = _expand_scalar_keys("m_*", "metric_*", "layer_0", scalars) + result = _expand_scalar_keys("m_*", "metric_*", scalars) # Only numeric indices should be included assert len(result) == 2 @@ -142,7 +142,7 @@ def test_scalar_expansion_deduplicates_indices(self): "var_01": 0.1, # This would match as index 1 if not carefully handled } # This test verifies basic behavior - exact matching prevents this issue - result = _expand_scalar_keys("v_*", "var_*", "layer_0", scalars) + result = _expand_scalar_keys("v_*", "var_*", scalars) # Should only match exact numeric patterns assert "v_1" in result @@ -155,7 +155,7 @@ def test_scalar_range_expansion_with_field_pattern(self): "metric_2": 0.2, "metric_3": 0.3, } - result = _expand_scalar_keys("m_0...3", "metric_0...3", "layer_0", scalars) + result = _expand_scalar_keys("m_0...3", "metric_0...3", scalars) assert len(result) == 3 assert result == { @@ -172,7 +172,7 @@ def test_scalar_wildcard_complex_key_pattern(self): "layer_0_pca_cumvar_2": 0.95, "layer_1_pca_cumvar_0": 0.7, } - result = _expand_scalar_keys("pc_cv_*", "layer_0_pca_cumvar_*", "layer_0", scalars) + result = _expand_scalar_keys("pc_cv_*", "layer_0_pca_cumvar_*", scalars) assert len(result) == 3 assert result == { diff --git a/tests/activations/test_visualization_modules.py b/tests/activations/test_visualization_modules.py index 9fd566f9..74c67a7e 100644 --- a/tests/activations/test_visualization_modules.py +++ b/tests/activations/test_visualization_modules.py @@ -56,14 +56,9 @@ class TestFieldResolution: """Tests for field_resolution.py functions.""" - def test_lookup_projection_array_none_key(self): - """Test that None key raises error.""" - with pytest.raises(ConfigValidationError, match="must supply a `key` value"): - _lookup_projection_array({}, "layer_0", None, False) - def test_lookup_projection_array_not_found(self): """Test that missing projection raises error.""" - projections = {"layer_0_other": np.array([1, 2, 3])} + projections = {"other/layer_0": np.array([1, 2, 3])} with pytest.raises(ConfigValidationError, match="not available for layer"): _lookup_projection_array(projections, "layer_0", "missing", False) @@ -73,9 +68,9 @@ def test_lookup_projection_array_concat_layers_exact_match(self): result = _lookup_projection_array(projections, "layer_0", "my_key", True) np.testing.assert_array_equal(result, [1, 2, 3]) - def test_lookup_projection_array_concat_layers_suffix_match(self): - """Test suffix match with concat_layers.""" - projections = {"prefix_my_key": np.array([4, 5, 6])} + def test_lookup_projection_array_concat_layers_prefix_match(self): + """Test prefix match with concat_layers.""" + projections = {"my_key/Lcat": np.array([4, 5, 6])} result = _lookup_projection_array(projections, "layer_0", "my_key", True) np.testing.assert_array_equal(result, [4, 5, 6]) @@ -85,16 +80,16 @@ def test_lookup_scalar_value_concat_layers_exact(self): result = _lookup_scalar_value(scalars, "layer_0", "my_scalar", True) assert result == 0.5 - def test_lookup_scalar_value_concat_layers_suffix(self): - """Test scalar lookup with concat_layers suffix match.""" - scalars = {"prefix_my_scalar": 0.7} + def test_lookup_scalar_value_concat_layers_prefix(self): + """Test scalar lookup with concat_layers prefix match.""" + scalars = {"my_scalar/Lcat": 0.7} result = _lookup_scalar_value(scalars, "layer_0", "my_scalar", True) assert result == 0.7 def test_lookup_scalar_value_not_found(self): """Test that missing scalar raises error.""" with pytest.raises(ConfigValidationError, match="not available for layer"): - _lookup_scalar_value({"other": 1.0}, "layer_0", "missing", False) + _lookup_scalar_value({"other/layer_0": 1.0}, "layer_0", "missing", False) def test_maybe_component_1d_with_component(self): """Test that 1D array with component raises error.""" @@ -180,7 +175,7 @@ def test_resolve_field_belief_states_missing(self): def test_resolve_field_scalars_success(self): """Test scalars source returns repeated value.""" ref = ActivationVisualizationFieldRef(source="scalars", key="my_scalar") - scalars = {"layer_0_my_scalar": 0.42} + scalars = {"my_scalar/layer_0": 0.42} result = _resolve_field(ref, "layer_0", {}, scalars, None, False, 3, {}) np.testing.assert_array_equal(result, [0.42, 0.42, 0.42]) @@ -234,14 +229,14 @@ def test_expand_pattern_to_indices_non_numeric_ignored(self): def test_get_component_count_projection_success(self): """Test getting component count from 2D projection.""" ref = ActivationVisualizationFieldRef(source="projections", key="proj", component="*") - projections = {"layer_0_proj": np.ones((10, 5))} + projections = {"proj/layer_0": np.ones((10, 5))} result = _get_component_count(ref, "layer_0", projections, None, False) assert result == 5 def test_get_component_count_1d_projection(self): """Test that 1D projection raises error for expansion.""" ref = ActivationVisualizationFieldRef(source="projections", key="proj") - projections = {"layer_0_proj": np.array([1, 2, 3])} + projections = {"proj/layer_0": np.array([1, 2, 3])} with pytest.raises(ConfigValidationError, match="Cannot expand 1D"): _get_component_count(ref, "layer_0", projections, None, False) @@ -275,7 +270,7 @@ def test_expand_projection_key_pattern_invalid_range(self): def test_expand_projection_key_pattern_no_matches(self): """Test that no matching projections raises error.""" - projections = {"layer_0_other": np.ones((3, 4))} + projections = {"other/layer_0": np.ones((3, 4))} with pytest.raises(ConfigValidationError, match="No projection keys found"): _expand_projection_key_pattern("key_*", "layer_0", projections, False) @@ -558,12 +553,12 @@ def test_scalar_series_metadata_with_scalar(self): def test_infer_scalar_series_indices_success(self): """Test inferring scalar series indices from available keys.""" mapping = ScalarSeriesMapping( - key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" + key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar" ) scalars = { - "analysis/layer_0_cumvar_0": 0.5, - "analysis/layer_0_cumvar_1": 0.7, - "analysis/layer_0_cumvar_2": 0.9, + "analysis/cumvar_0/layer_0": 0.5, + "analysis/cumvar_1/layer_0": 0.7, + "analysis/cumvar_2/layer_0": 0.9, } result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") assert result == [0, 1, 2] @@ -571,12 +566,12 @@ def test_infer_scalar_series_indices_success(self): def test_infer_scalar_series_indices_empty_body(self): """Test that empty body between prefix and suffix is skipped.""" mapping = ScalarSeriesMapping( - key_template="{layer}_pc{index}_var", index_field="component", value_field="variance" + key_template="pc{index}_var/{layer}", index_field="component", value_field="variance" ) # Key that matches prefix and suffix but has empty body scalars = { - "analysis/layer_0_pc_var": 0.5, # Empty between pc and _var - "analysis/layer_0_pc0_var": 0.3, + "analysis/pc_var/layer_0": 0.5, # Empty between pc and _var + "analysis/pc0_var/layer_0": 0.3, } result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") assert result == [0] # Only numeric index included @@ -584,7 +579,7 @@ def test_infer_scalar_series_indices_empty_body(self): def test_infer_scalar_series_indices_no_matches(self): """Test that no matching indices raises error.""" mapping = ScalarSeriesMapping( - key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" + key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar" ) scalars = {"analysis/other_metric": 1.0} with pytest.raises(ConfigValidationError, match="could not infer indices"): @@ -593,24 +588,24 @@ def test_infer_scalar_series_indices_no_matches(self): def test_infer_scalar_series_indices_with_suffix(self): """Test inferring indices when template has suffix after index.""" mapping = ScalarSeriesMapping( - key_template="{layer}_pc{index}_var", index_field="component", value_field="variance" + key_template="pc{index}_var/{layer}", index_field="component", value_field="variance" ) scalars = { - "analysis/layer_0_pc0_var": 0.5, - "analysis/layer_0_pc1_var": 0.3, - "analysis/layer_0_pc2_var": 0.2, - "analysis/layer_0_other": 1.0, # Should not match + "analysis/pc0_var/layer_0": 0.5, + "analysis/pc1_var/layer_0": 0.3, + "analysis/pc2_var/layer_0": 0.2, + "analysis/other/layer_0": 1.0, # Should not match } result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") assert result == [0, 1, 2] def test_infer_scalar_series_indices_non_numeric_skipped(self): """Test that non-numeric values are skipped.""" - mapping = ScalarSeriesMapping(key_template="{layer}_item_{index}", index_field="idx", value_field="val") + mapping = ScalarSeriesMapping(key_template="item_{index}/{layer}", index_field="idx", value_field="val") scalars = { - "analysis/layer_0_item_0": 0.5, - "analysis/layer_0_item_abc": 0.7, # Non-numeric, should be skipped - "analysis/layer_0_item_1": 0.9, + "analysis/item_0/layer_0": 0.5, + "analysis/item_abc/layer_0": 0.7, # Non-numeric, should be skipped + "analysis/item_1/layer_0": 0.9, } result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") assert result == [0, 1] @@ -618,13 +613,13 @@ def test_infer_scalar_series_indices_non_numeric_skipped(self): def test_build_scalar_series_dataframe_success(self): """Test building scalar series dataframe.""" mapping = ScalarSeriesMapping( - key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" + key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar" ) metadata = {"step": np.array([10]), "analysis": np.array(["pca"])} scalars = { - "analysis/layer_0_cumvar_0": 0.5, - "analysis/layer_0_cumvar_1": 0.7, - "analysis/layer_1_cumvar_0": 0.6, + "analysis/cumvar_0/layer_0": 0.5, + "analysis/cumvar_1/layer_0": 0.7, + "analysis/cumvar_0/layer_1": 0.6, } result = _build_scalar_series_dataframe(mapping, metadata, scalars, ["layer_0", "layer_1"], "analysis") assert len(result) == 3 @@ -635,7 +630,7 @@ def test_build_scalar_series_dataframe_success(self): def test_build_scalar_series_dataframe_no_matches(self): """Test that no matching scalars raises error.""" mapping = ScalarSeriesMapping( - key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" + key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar" ) metadata = {"step": np.array([10])} scalars = {"analysis/other_metric": 1.0} @@ -646,13 +641,13 @@ def test_build_scalar_series_dataframe_no_matches(self): def test_build_scalar_series_dataframe_with_explicit_indices(self): """Test building scalar series dataframe with explicit index_values.""" mapping = ScalarSeriesMapping( - key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar", index_values=[0, 1] + key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar", index_values=[0, 1] ) metadata = {"step": np.array([10])} scalars = { - "analysis/layer_0_cumvar_0": 0.5, - "analysis/layer_0_cumvar_1": 0.7, - "analysis/layer_0_cumvar_2": 0.9, # Not in index_values, should be skipped + "analysis/cumvar_0/layer_0": 0.5, + "analysis/cumvar_1/layer_0": 0.7, + "analysis/cumvar_2/layer_0": 0.9, # Not in index_values, should be skipped } result = _build_scalar_series_dataframe(mapping, metadata, scalars, ["layer_0"], "analysis") assert len(result) == 2 @@ -660,10 +655,10 @@ def test_build_scalar_series_dataframe_with_explicit_indices(self): def test_build_scalar_dataframe_scalar_pattern(self): """Test building scalar dataframe with scalar_pattern source.""" - mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="layer_*_rmse")} + mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="rmse/layer_*")} scalars = { - "analysis/layer_0_rmse": 0.1, - "analysis/layer_1_rmse": 0.2, + "analysis/rmse/layer_0": 0.1, + "analysis/rmse/layer_1": 0.2, } result = _build_scalar_dataframe(mappings, scalars, {}, "analysis", 5) assert len(result) == 2 @@ -701,9 +696,9 @@ def test_build_scalar_dataframe_non_scalar_source_skipped(self): """Test that non-scalar sources are skipped.""" mappings = { "proj": ActivationVisualizationFieldRef(source="projections", key="my_proj"), - "rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="layer_*_rmse"), + "rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="rmse/layer_*"), } - scalars = {"analysis/layer_0_rmse": 0.1} + scalars = {"analysis/rmse/layer_0": 0.1} result = _build_scalar_dataframe(mappings, scalars, {}, "analysis", 5) # Only scalar_pattern should be in result assert "rmse" in result.columns @@ -730,9 +725,9 @@ def test_build_scalar_dataframe_key_none(self): def test_build_scalar_dataframe_no_matching_values(self): """Test that no matching values raises error with pattern.""" - mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="layer_*_missing")} + mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="missing/layer_*")} # Scalars exist but don't match the pattern - scalars = {"analysis/layer_0_other": 0.1, "analysis/something_else": 0.2} + scalars = {"analysis/other/layer_0": 0.1, "analysis/something_else": 0.2} with pytest.raises(ConfigValidationError, match="No scalar pattern keys found"): _build_scalar_dataframe(mappings, scalars, {}, "analysis", 5) @@ -758,7 +753,7 @@ def test_build_dataframe_for_mappings_simple(self): """Test _build_dataframe_for_mappings with simple projection mapping.""" mappings = {"x": ActivationVisualizationFieldRef(source="projections", key="pca", component=0)} metadata = {"step": np.array([1, 2]), "analysis": np.array(["test", "test"])} - projections = {"layer_0_pca": np.array([[0.1, 0.2], [0.3, 0.4]])} + projections = {"pca/layer_0": np.array([[0.1, 0.2], [0.3, 0.4]])} result = _build_dataframe_for_mappings(mappings, metadata, projections, {}, None, False, ["layer_0"]) assert "x" in result.columns assert "layer" in result.columns @@ -806,11 +801,11 @@ def test_build_dataframe_for_mappings_error_wrapping(self): def test_build_dataframe_with_scalar_pattern(self): """Test _build_dataframe with scalar_pattern source.""" data_mapping = ActivationVisualizationDataMapping( - mappings={"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="layer_*_rmse")} + mappings={"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="rmse/layer_*")} ) viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) metadata = {"step": np.array([1]), "analysis": np.array(["test"])} - scalars = {"test/layer_0_rmse": 0.1, "test/layer_1_rmse": 0.2} + scalars = {"test/rmse/layer_0": 0.1, "test/rmse/layer_1": 0.2} result = _build_dataframe(viz_cfg, metadata, {}, scalars, {}, 10, None, False, ["layer_0", "layer_1"]) assert "rmse" in result.columns assert len(result) == 2 @@ -818,12 +813,12 @@ def test_build_dataframe_with_scalar_pattern(self): def test_build_dataframe_with_scalar_series(self): """Test _build_dataframe with scalar_series source.""" scalar_series = ScalarSeriesMapping( - key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" + key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar" ) data_mapping = ActivationVisualizationDataMapping(mappings={}, scalar_series=scalar_series) viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) metadata = {"step": np.array([1]), "analysis": np.array(["test"])} - scalars = {"test/layer_0_cumvar_0": 0.5, "test/layer_0_cumvar_1": 0.7} + scalars = {"test/cumvar_0/layer_0": 0.5, "test/cumvar_1/layer_0": 0.7} result = _build_dataframe(viz_cfg, metadata, {}, scalars, {}, None, None, False, ["layer_0"]) assert "component" in result.columns assert "cumvar" in result.columns @@ -844,8 +839,8 @@ def test_build_dataframe_combined_mappings(self): viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) metadata = {"step": np.array([1])} projections = { - "layer_0_pca": np.array([[0.1, 0.2]]), - "layer_0_raw": np.array([[0.5, 0.6]]), + "pca/layer_0": np.array([[0.1, 0.2]]), + "raw/layer_0": np.array([[0.5, 0.6]]), } result = _build_dataframe(viz_cfg, metadata, projections, {}, {}, None, None, False, ["layer_0"]) assert "source" in result.columns diff --git a/tests/analysis/test_layerwise_analysis.py b/tests/analysis/test_layerwise_analysis.py index 5b2c9dbd..13831d69 100644 --- a/tests/analysis/test_layerwise_analysis.py +++ b/tests/analysis/test_layerwise_analysis.py @@ -38,14 +38,14 @@ def test_layerwise_analysis_linear_regression_namespacing(analysis_inputs) -> No belief_states=belief_states, ) - assert set(scalars) >= {"layer_a_r2", "layer_b_r2"} + assert set(scalars) >= {"r2/layer_a", "r2/layer_b"} assert set(projections) == { - "layer_a_projected", - "layer_b_projected", - "layer_a_coeffs", - "layer_b_coeffs", - "layer_a_intercept", - "layer_b_intercept", + "projected/layer_a", + "projected/layer_b", + "coeffs/layer_a", + "coeffs/layer_b", + "intercept/layer_a", + "intercept/layer_b", } @@ -89,9 +89,9 @@ def test_pca_analysis_does_not_require_beliefs(analysis_inputs) -> None: weights=weights, belief_states=None, ) - assert "layer_a_cumvar_1" in scalars - assert "layer_a_n_components_50pct" in scalars - assert "layer_a_pca" in projections + assert "cumvar_1/layer_a" in scalars + assert "nc_50/layer_a" in scalars + assert "pca/layer_a" in projections def test_invalid_pca_kwargs() -> None: diff --git a/tests/analysis/test_linear_regression.py b/tests/analysis/test_linear_regression.py index c32766c1..52c3b29a 100644 --- a/tests/analysis/test_linear_regression.py +++ b/tests/analysis/test_linear_regression.py @@ -237,34 +237,34 @@ def test_layer_linear_regression_belief_states_tuple_default() -> None: ) # Should have separate metrics for each factor - assert "factor_0/r2" in scalars - assert "factor_1/r2" in scalars - assert "factor_0/rmse" in scalars - assert "factor_1/rmse" in scalars - assert "factor_0/mae" in scalars - assert "factor_1/mae" in scalars - assert "factor_0/dist" in scalars - assert "factor_1/dist" in scalars + assert "r2/F0" in scalars + assert "r2/F1" in scalars + assert "rmse/F0" in scalars + assert "rmse/F1" in scalars + assert "mae/F0" in scalars + assert "mae/F1" in scalars + assert "dist/F0" in scalars + assert "dist/F1" in scalars # Should have separate projections for each factor - assert "factor_0/projected" in arrays - assert "factor_1/projected" in arrays + assert "projected/F0" in arrays + assert "projected/F1" in arrays # Should have separate parameters for each factor - assert "factor_0/coeffs" in arrays - assert "factor_1/coeffs" in arrays + assert "coeffs/F0" in arrays + assert "coeffs/F1" in arrays # Should have separate intercepts for each factor by default - assert "factor_0/intercept" in arrays - assert "factor_1/intercept" in arrays + assert "intercept/F0" in arrays + assert "intercept/F1" in arrays # Check shapes - assert arrays["factor_0/projected"].shape == factor_0.shape - assert arrays["factor_1/projected"].shape == factor_1.shape - assert arrays["factor_0/coeffs"].shape == (x.shape[1], factor_0.shape[1]) - assert arrays["factor_1/coeffs"].shape == (x.shape[1], factor_1.shape[1]) - assert arrays["factor_0/intercept"].shape == (1, factor_0.shape[1]) - assert arrays["factor_1/intercept"].shape == (1, factor_1.shape[1]) + assert arrays["projected/F0"].shape == factor_0.shape + assert arrays["projected/F1"].shape == factor_1.shape + assert arrays["coeffs/F0"].shape == (x.shape[1], factor_0.shape[1]) + assert arrays["coeffs/F1"].shape == (x.shape[1], factor_1.shape[1]) + assert arrays["intercept/F0"].shape == (1, factor_0.shape[1]) + assert arrays["intercept/F1"].shape == (1, factor_1.shape[1]) def test_layer_linear_regression_svd_belief_states_tuple_default() -> None: @@ -287,31 +287,31 @@ def test_layer_linear_regression_svd_belief_states_tuple_default() -> None: # Should have ALL regression metrics for each factor including best_rcond for factor in [0, 1]: - assert f"factor_{factor}/r2" in scalars - assert f"factor_{factor}/rmse" in scalars - assert f"factor_{factor}/mae" in scalars - assert f"factor_{factor}/dist" in scalars - assert f"factor_{factor}/best_rcond" in scalars + assert f"r2/F{factor}" in scalars + assert f"rmse/F{factor}" in scalars + assert f"mae/F{factor}" in scalars + assert f"dist/F{factor}" in scalars + assert f"best_rcond/F{factor}" in scalars # Should have separate projections for each factor - assert "factor_0/projected" in arrays - assert "factor_1/projected" in arrays + assert "projected/F0" in arrays + assert "projected/F1" in arrays # Should have separate coefficients for each factor - assert "factor_0/coeffs" in arrays - assert "factor_1/coeffs" in arrays + assert "coeffs/F0" in arrays + assert "coeffs/F1" in arrays # Should have separate intercepts for each factor by default - assert "factor_0/intercept" in arrays - assert "factor_1/intercept" in arrays + assert "intercept/F0" in arrays + assert "intercept/F1" in arrays # Check shapes - assert arrays["factor_0/projected"].shape == factor_0.shape - assert arrays["factor_1/projected"].shape == factor_1.shape - assert arrays["factor_0/coeffs"].shape == (x.shape[1], factor_0.shape[1]) - assert arrays["factor_1/coeffs"].shape == (x.shape[1], factor_1.shape[1]) - assert arrays["factor_0/intercept"].shape == (1, factor_0.shape[1]) - assert arrays["factor_1/intercept"].shape == (1, factor_1.shape[1]) + assert arrays["projected/F0"].shape == factor_0.shape + assert arrays["projected/F1"].shape == factor_1.shape + assert arrays["coeffs/F0"].shape == (x.shape[1], factor_0.shape[1]) + assert arrays["coeffs/F1"].shape == (x.shape[1], factor_1.shape[1]) + assert arrays["intercept/F0"].shape == (1, factor_0.shape[1]) + assert arrays["intercept/F1"].shape == (1, factor_1.shape[1]) def test_layer_linear_regression_belief_states_tuple_single_factor() -> None: @@ -373,30 +373,30 @@ def test_orthogonality_with_orthogonal_subspaces() -> None: ) # Should have standard factor metrics with perfect fit - assert scalars["factor_0/r2"] > 0.99 # Should fit nearly perfectly - assert scalars["factor_1/r2"] > 0.99 + assert scalars["r2/F0"] > 0.99 # Should fit nearly perfectly + assert scalars["r2/F1"] > 0.99 # Should have ALL orthogonality metrics - assert "orthogonality_0_1/subspace_overlap" in scalars - assert "orthogonality_0_1/max_singular_value" in scalars - assert "orthogonality_0_1/min_singular_value" in scalars - assert "orthogonality_0_1/participation_ratio" in scalars - assert "orthogonality_0_1/entropy" in scalars - assert "orthogonality_0_1/effective_rank" in scalars + assert "orth/overlap/F0,1" in scalars + assert "orth/sv_max/F0,1" in scalars + assert "orth/sv_min/F0,1" in scalars + assert "orth/p_ratio/F0,1" in scalars + assert "orth/entropy/F0,1" in scalars + assert "orth/eff_rank/F0,1" in scalars # Compute principled threshold based on machine precision and problem size threshold = _compute_orthogonality_threshold(x, factor_0, factor_1) # Should indicate near-zero overlap (orthogonal by construction) - assert scalars["orthogonality_0_1/subspace_overlap"] < threshold - assert scalars["orthogonality_0_1/max_singular_value"] < threshold + assert scalars["orth/overlap/F0,1"] < threshold + assert scalars["orth/sv_max/F0,1"] < threshold # Should have singular values in arrays - assert "orthogonality_0_1/singular_values" in arrays + assert "orth/singular_values/F0,1" in arrays # Both factors have 2 dimensions, so min(2, 2) = 2 singular values - assert arrays["orthogonality_0_1/singular_values"].shape[0] == 2 + assert arrays["orth/singular_values/F0,1"].shape[0] == 2 # All singular values should be near zero (orthogonal) - assert jnp.all(arrays["orthogonality_0_1/singular_values"] < threshold) + assert jnp.all(arrays["orth/singular_values/F0,1"] < threshold) def test_orthogonality_with_aligned_subspaces() -> None: @@ -427,27 +427,27 @@ def test_orthogonality_with_aligned_subspaces() -> None: ) # Should have standard factor metrics with perfect fit - assert scalars["factor_0/r2"] > 0.99 # Should fit nearly perfectly - assert scalars["factor_1/r2"] > 0.99 + assert scalars["r2/F0"] > 0.99 # Should fit nearly perfectly + assert scalars["r2/F1"] > 0.99 # Should have ALL orthogonality metrics - assert "orthogonality_0_1/subspace_overlap" in scalars - assert "orthogonality_0_1/max_singular_value" in scalars - assert "orthogonality_0_1/min_singular_value" in scalars - assert "orthogonality_0_1/participation_ratio" in scalars - assert "orthogonality_0_1/entropy" in scalars - assert "orthogonality_0_1/effective_rank" in scalars + assert "orth/overlap/F0,1" in scalars + assert "orth/sv_max/F0,1" in scalars + assert "orth/sv_min/F0,1" in scalars + assert "orth/p_ratio/F0,1" in scalars + assert "orth/entropy/F0,1" in scalars + assert "orth/eff_rank/F0,1" in scalars # Should indicate high overlap (aligned by construction) - assert scalars["orthogonality_0_1/subspace_overlap"] > 0.99 - assert scalars["orthogonality_0_1/max_singular_value"] > 0.99 + assert scalars["orth/overlap/F0,1"] > 0.99 + assert scalars["orth/sv_max/F0,1"] > 0.99 # Should have singular values in arrays - assert "orthogonality_0_1/singular_values" in arrays + assert "orth/singular_values/F0,1" in arrays # Both factors have 2 dimensions, so min(2, 2) = 2 singular values - assert arrays["orthogonality_0_1/singular_values"].shape[0] == 2 + assert arrays["orth/singular_values/F0,1"].shape[0] == 2 # All singular values should be near 1.0 (perfectly aligned) - assert jnp.all(arrays["orthogonality_0_1/singular_values"] > 0.99) + assert jnp.all(arrays["orth/singular_values/F0,1"] > 0.99) def test_orthogonality_with_three_factors() -> None: @@ -479,34 +479,34 @@ def test_orthogonality_with_three_factors() -> None: ) # Should have standard factor metrics for all three factors - assert scalars["factor_0/r2"] > 0.99 - assert scalars["factor_1/r2"] > 0.99 - assert scalars["factor_2/r2"] > 0.99 + assert scalars["r2/F0"] > 0.99 + assert scalars["r2/F1"] > 0.99 + assert scalars["r2/F2"] > 0.99 # Compute principled threshold based on machine precision and problem size threshold = _compute_orthogonality_threshold(x, factor_0, factor_1, factor_2) # Should have ALL three pairwise orthogonality combinations - pairwise_keys = ["orthogonality_0_1", "orthogonality_0_2", "orthogonality_1_2"] + pairwise_keys = ["F0,1", "F0,2", "F1,2"] for pair_key in pairwise_keys: - assert f"{pair_key}/subspace_overlap" in scalars - assert f"{pair_key}/max_singular_value" in scalars - assert f"{pair_key}/min_singular_value" in scalars - assert f"{pair_key}/participation_ratio" in scalars - assert f"{pair_key}/entropy" in scalars - assert f"{pair_key}/effective_rank" in scalars - assert f"{pair_key}/singular_values" in arrays + assert f"orth/overlap/{pair_key}" in scalars + assert f"orth/sv_max/{pair_key}" in scalars + assert f"orth/sv_min/{pair_key}" in scalars + assert f"orth/p_ratio/{pair_key}" in scalars + assert f"orth/entropy/{pair_key}" in scalars + assert f"orth/eff_rank/{pair_key}" in scalars + assert f"orth/singular_values/{pair_key}" in arrays # All pairs should be orthogonal (near-zero overlap) - overlap = scalars[f"{pair_key}/subspace_overlap"] - assert overlap < threshold, f"{pair_key} subspace_overlap={overlap} >= threshold={threshold}" + overlap = scalars[f"orth/overlap/{pair_key}"] + assert overlap < threshold, f"{pair_key} overlap={overlap} >= threshold={threshold}" - max_sv = scalars[f"{pair_key}/max_singular_value"] - assert max_sv < threshold, f"{pair_key} max_singular_value={max_sv} >= threshold={threshold}" + max_sv = scalars[f"orth/sv_max/{pair_key}"] + assert max_sv < threshold, f"{pair_key} sv_max={max_sv} >= threshold={threshold}" # Each pair has 2D subspaces, so 2 singular values - assert arrays[f"{pair_key}/singular_values"].shape[0] == 2 - svs = arrays[f"{pair_key}/singular_values"] + assert arrays[f"orth/singular_values/{pair_key}"].shape[0] == 2 + svs = arrays[f"orth/singular_values/{pair_key}"] assert jnp.all(svs < threshold), f"{pair_key} singular_values={svs} not all < threshold={threshold}" @@ -535,23 +535,23 @@ def test_orthogonality_not_computed_by_default() -> None: ) # Should have standard factor metrics - assert "factor_0/r2" in scalars - assert "factor_1/r2" in scalars + assert "r2/F0" in scalars + assert "r2/F1" in scalars # Should NOT have any orthogonality metrics orthogonality_keys = [ - "orthogonality_0_1/subspace_overlap", - "orthogonality_0_1/max_singular_value", - "orthogonality_0_1/min_singular_value", - "orthogonality_0_1/participation_ratio", - "orthogonality_0_1/entropy", - "orthogonality_0_1/effective_rank", + "orth/overlap/F0,1", + "orth/sv_max/F0,1", + "orth/sv_min/F0,1", + "orth/p_ratio/F0,1", + "orth/entropy/F0,1", + "orth/eff_rank/F0,1", ] for key in orthogonality_keys: assert key not in scalars # Should NOT have orthogonality singular values in arrays - assert "orthogonality_0_1/singular_values" not in arrays + assert "orth/singular_values/F0,1" not in arrays def test_orthogonality_warning_for_single_belief_state(caplog: pytest.LogCaptureFixture) -> None: @@ -582,8 +582,8 @@ def test_orthogonality_warning_for_single_belief_state(caplog: pytest.LogCapture assert "projected" in arrays # Should NOT have orthogonality metrics - assert "orthogonality_0_1/subspace_overlap" not in scalars - assert "orthogonality_0_1/singular_values" not in arrays + assert "orth/overlap/F0,1" not in scalars + assert "orth/singular_values/F0,1" not in arrays def test_use_svd_flag_equivalence() -> None: @@ -688,26 +688,26 @@ def test_use_svd_with_orthogonality() -> None: ) # Should have standard factor metrics with SVD - assert "factor_0/r2" in scalars - assert "factor_1/r2" in scalars - assert "factor_0/best_rcond" in scalars - assert "factor_1/best_rcond" in scalars + assert "r2/F0" in scalars + assert "r2/F1" in scalars + assert "best_rcond/F0" in scalars + assert "best_rcond/F1" in scalars # Should have orthogonality metrics - assert "orthogonality_0_1/subspace_overlap" in scalars - assert "orthogonality_0_1/max_singular_value" in scalars - assert "orthogonality_0_1/singular_values" in arrays + assert "orth/overlap/F0,1" in scalars + assert "orth/sv_max/F0,1" in scalars + assert "orth/singular_values/F0,1" in arrays # Compute principled threshold threshold = _compute_orthogonality_threshold(x, factor_0, factor_1) # Should indicate near-zero overlap (orthogonal by construction) - assert scalars["orthogonality_0_1/subspace_overlap"] < threshold - assert scalars["orthogonality_0_1/max_singular_value"] < threshold + assert scalars["orth/overlap/F0,1"] < threshold + assert scalars["orth/sv_max/F0,1"] < threshold # Should have good regression fit - assert scalars["factor_0/r2"] > 0.99 - assert scalars["factor_1/r2"] > 0.99 + assert scalars["r2/F0"] > 0.99 + assert scalars["r2/F1"] > 0.99 def test_orthogonality_with_different_subspace_dimensions() -> None: @@ -758,24 +758,24 @@ def test_orthogonality_with_different_subspace_dimensions() -> None: ) # Should have standard factor metrics - assert scalars["factor_0/r2"] > 0.99 - assert scalars["factor_1/r2"] > 0.99 + assert scalars["r2/F0"] > 0.99 + assert scalars["r2/F1"] > 0.99 # Should have orthogonality metrics - assert "orthogonality_0_1/subspace_overlap" in scalars - assert "orthogonality_0_1/max_singular_value" in scalars - assert "orthogonality_0_1/singular_values" in arrays + assert "orth/overlap/F0,1" in scalars + assert "orth/sv_max/F0,1" in scalars + assert "orth/singular_values/F0,1" in arrays # Compute principled threshold threshold = _compute_orthogonality_threshold(x, factor_0, factor_1) # Should indicate near-zero overlap (orthogonal by construction) - assert scalars["orthogonality_0_1/subspace_overlap"] < threshold - assert scalars["orthogonality_0_1/max_singular_value"] < threshold + assert scalars["orth/overlap/F0,1"] < threshold + assert scalars["orth/sv_max/F0,1"] < threshold # Singular values shape should be min(2, 5) = 2 - assert arrays["orthogonality_0_1/singular_values"].shape[0] == 2 - assert jnp.all(arrays["orthogonality_0_1/singular_values"] < threshold) + assert arrays["orth/singular_values/F0,1"].shape[0] == 2 + assert jnp.all(arrays["orth/singular_values/F0,1"] < threshold) def test_orthogonality_with_contained_subspace() -> None: @@ -827,23 +827,23 @@ def test_orthogonality_with_contained_subspace() -> None: ) # Should have standard factor metrics - assert scalars["factor_0/r2"] > 0.99 - assert scalars["factor_1/r2"] > 0.99 + assert scalars["r2/F0"] > 0.99 + assert scalars["r2/F1"] > 0.99 # Should have orthogonality metrics - assert "orthogonality_0_1/subspace_overlap" in scalars - assert "orthogonality_0_1/max_singular_value" in scalars - assert "orthogonality_0_1/singular_values" in arrays + assert "orth/overlap/F0,1" in scalars + assert "orth/sv_max/F0,1" in scalars + assert "orth/singular_values/F0,1" in arrays # Singular values shape should be min(2, 3) = 2 - assert arrays["orthogonality_0_1/singular_values"].shape[0] == 2 + assert arrays["orth/singular_values/F0,1"].shape[0] == 2 # Since factor_0's subspace is contained in factor_1's, singular values should be near 1.0 # (indicating perfect alignment in the 2D shared subspace) - assert scalars["orthogonality_0_1/subspace_overlap"] > 0.99 - assert scalars["orthogonality_0_1/max_singular_value"] > 0.99 - assert scalars["orthogonality_0_1/min_singular_value"] > 0.99 - assert jnp.all(arrays["orthogonality_0_1/singular_values"] > 0.99) + assert scalars["orth/overlap/F0,1"] > 0.99 + assert scalars["orth/sv_max/F0,1"] > 0.99 + assert scalars["orth/sv_min/F0,1"] > 0.99 + assert jnp.all(arrays["orth/singular_values/F0,1"] > 0.99) def test_orthogonality_excludes_intercept() -> None: @@ -876,27 +876,27 @@ def test_orthogonality_excludes_intercept() -> None: ) # Should have intercepts for both factors - assert "factor_0/intercept" in arrays - assert "factor_1/intercept" in arrays + assert "intercept/F0" in arrays + assert "intercept/F1" in arrays # Should have good regression fit - assert scalars["factor_0/r2"] > 0.99 - assert scalars["factor_1/r2"] > 0.99 + assert scalars["r2/F0"] > 0.99 + assert scalars["r2/F1"] > 0.99 # Orthogonality should still be near-zero (computed from coefficients only, not intercepts) threshold = _compute_orthogonality_threshold(x, factor_0, factor_1) - assert "orthogonality_0_1/subspace_overlap" in scalars - assert "orthogonality_0_1/max_singular_value" in scalars + assert "orth/overlap/F0,1" in scalars + assert "orth/sv_max/F0,1" in scalars - overlap = scalars["orthogonality_0_1/subspace_overlap"] - assert overlap < threshold, f"subspace_overlap={overlap} >= threshold={threshold}" + overlap = scalars["orth/overlap/F0,1"] + assert overlap < threshold, f"overlap={overlap} >= threshold={threshold}" - max_sv = scalars["orthogonality_0_1/max_singular_value"] - assert max_sv < threshold, f"max_singular_value={max_sv} >= threshold={threshold}" + max_sv = scalars["orth/sv_max/F0,1"] + assert max_sv < threshold, f"sv_max={max_sv} >= threshold={threshold}" # The different intercepts should not affect orthogonality - svs = arrays["orthogonality_0_1/singular_values"] + svs = arrays["orth/singular_values/F0,1"] assert jnp.all(svs < threshold), f"singular_values={svs} not all < threshold={threshold}" @@ -1004,14 +1004,14 @@ def test_layer_linear_regression_concat_vs_separate_equivalence() -> None: ) # Concat path should also provide combined arrays - assert "concat/projected" in arrays_cat - assert "concat/coeffs" in arrays_cat - assert "concat/intercept" in arrays_cat + assert "projected/Fcat" in arrays_cat + assert "coeffs/Fcat" in arrays_cat + assert "intercept/Fcat" in arrays_cat # Per-factor arrays should match between separate and concatenated flows for k in ["projected", "coeffs", "intercept"]: - chex.assert_trees_all_close(arrays_sep[f"factor_0/{k}"], arrays_cat[f"factor_0/{k}"]) - chex.assert_trees_all_close(arrays_sep[f"factor_1/{k}"], arrays_cat[f"factor_1/{k}"]) + chex.assert_trees_all_close(arrays_sep[f"{k}/F0"], arrays_cat[f"{k}/F0"]) + chex.assert_trees_all_close(arrays_sep[f"{k}/F1"], arrays_cat[f"{k}/F1"]) def test_layer_linear_regression_svd_concat_vs_separate_equivalence_best_rcond() -> None: @@ -1055,34 +1055,34 @@ def test_layer_linear_regression_svd_concat_vs_separate_equivalence_best_rcond() ) # Concat path should provide combined arrays and best_rcond - assert "concat/projected" in arrays_cat - assert "concat/coeffs" in arrays_cat - assert "concat/intercept" in arrays_cat - assert "concat/best_rcond" in scalars_cat - assert scalars_cat["concat/best_rcond"] == pytest.approx(1e-3) + assert "projected/Fcat" in arrays_cat + assert "coeffs/Fcat" in arrays_cat + assert "intercept/Fcat" in arrays_cat + assert "best_rcond/Fcat" in scalars_cat + assert scalars_cat["best_rcond/Fcat"] == pytest.approx(1e-3) # Separate path should include per-factor best_rcond; concat-split path should not - assert "factor_0/best_rcond" in scalars_sep - assert "factor_1/best_rcond" in scalars_sep - assert "factor_0/best_rcond" not in scalars_cat - assert "factor_1/best_rcond" not in scalars_cat + assert "best_rcond/F0" in scalars_sep + assert "best_rcond/F1" in scalars_sep + assert "best_rcond/F0" not in scalars_cat + assert "best_rcond/F1" not in scalars_cat # Per-factor arrays should match between separate and concat-split flows for k in ["projected", "coeffs", "intercept"]: - chex.assert_trees_all_close(arrays_sep[f"factor_0/{k}"], arrays_cat[f"factor_0/{k}"]) - chex.assert_trees_all_close(arrays_sep[f"factor_1/{k}"], arrays_cat[f"factor_1/{k}"]) + chex.assert_trees_all_close(arrays_sep[f"{k}/F0"], arrays_cat[f"{k}/F0"]) + chex.assert_trees_all_close(arrays_sep[f"{k}/F1"], arrays_cat[f"{k}/F1"]) # Overlapping scalar metrics should agree closely across flows for metric in ["r2", "rmse", "mae", "dist"]: assert jnp.isclose( - jnp.asarray(scalars_sep[f"factor_0/{metric}"]), - jnp.asarray(scalars_cat[f"factor_0/{metric}"]), + jnp.asarray(scalars_sep[f"{metric}/F0"]), + jnp.asarray(scalars_cat[f"{metric}/F0"]), atol=1e-6, rtol=0.0, ).item() assert jnp.isclose( - jnp.asarray(scalars_sep[f"factor_1/{metric}"]), - jnp.asarray(scalars_cat[f"factor_1/{metric}"]), + jnp.asarray(scalars_sep[f"{metric}/F1"]), + jnp.asarray(scalars_cat[f"{metric}/F1"]), atol=1e-6, rtol=0.0, ).item() diff --git a/tests/analysis/test_metric_keys.py b/tests/analysis/test_metric_keys.py new file mode 100644 index 00000000..7c6cc09c --- /dev/null +++ b/tests/analysis/test_metric_keys.py @@ -0,0 +1,61 @@ +"""Tests for the metric key construction utility functions.""" + +from simplexity.analysis.metric_keys import construct_layer_specific_key, format_layer_spec + + +def test_construct_layer_specific_key_given_factor_specific_key() -> None: + """Test that the function adds layer name before the factor-specific key.""" + key = "rmse/F0" + layer_name = "L1.resid.post" + expected_key = "rmse/L1.resid.post-F0" + assert construct_layer_specific_key(key, layer_name) == expected_key + + +def test_construct_layer_specific_key_given_non_factor_specific_key() -> None: + """Test that the function adds layer name before the non-factor-specific key.""" + key = "r2" + layer_name = "L1.resid.post" + expected_key = "r2/L1.resid.post" + assert construct_layer_specific_key(key, layer_name) == expected_key + + +def test_format_layer_spec_concatenated() -> None: + """Test that the function returns the correct format for concatenated layers.""" + layer_name = "concatenated" + expected_key = "Lcat" + assert format_layer_spec(layer_name) == expected_key + + +def test_format_layer_spec_block_and_hook_layer() -> None: + """Test that the function returns the correct format for block and hook layer name.""" + layer_name = "blocks.2.hook_resid_post" + expected_key = "L2.resid.post" + assert format_layer_spec(layer_name) == expected_key + + +def test_format_layer_spec_special_layer() -> None: + """Test that the function returns the correct format for special layer name.""" + layer_name = "embed" + expected_key = "embed" + assert format_layer_spec(layer_name) == expected_key + + +def test_format_layer_spec_block_layer_with_no_hook_name() -> None: + """Test that the function returns the input layer name if it is a block layer name with no hook name.""" + layer_name = "blocks.2" + expected_key = "blocks.2" + assert format_layer_spec(layer_name) == expected_key + + +def test_format_layer_spec_block_and_hook_layer_with_no_block_number() -> None: + """Test that the function returns the input layer name if it is a block and hook layer name with no block number.""" + layer_name = "blocks.hook_resid_post" + expected_key = "blocks.hook_resid_post" + assert format_layer_spec(layer_name) == expected_key + + +def test_format_layer_spec_block_and_hook_layer_with_extra_structure() -> None: + """Test that the function returns the correct format if it is a block and hook layer name with extra structure.""" + layer_name = "blocks.2.hook_resid_post.invalid" + expected_key = "L2.resid.post.invalid" + assert format_layer_spec(layer_name) == expected_key diff --git a/tests/analysis/test_pca.py b/tests/analysis/test_pca.py index 9743fd59..b4fdebb0 100644 --- a/tests/analysis/test_pca.py +++ b/tests/analysis/test_pca.py @@ -39,7 +39,7 @@ def test_layer_pca_analysis_metrics() -> None: variance_thresholds=(0.5,), ) assert "cumvar_1" in scalars - assert "n_components_50pct" in scalars + assert "nc_50" in scalars assert "pca" in projections assert projections["pca"].shape == (3, 2) @@ -100,7 +100,7 @@ def test_layer_pca_analysis_zero_variance_threshold_reporting() -> None: belief_states=None, variance_thresholds=(0.5,), ) - assert scalars["n_components_50pct"] == 3.0 + assert scalars["nc_50"] == 3.0 assert projections["pca"].shape == (4, 3) diff --git a/tests/end_to_end/configs/activation_tracker/with_visuals.yaml b/tests/end_to_end/configs/activation_tracker/with_visuals.yaml index be729909..aa6c7a38 100644 --- a/tests/end_to_end/configs/activation_tracker/with_visuals.yaml +++ b/tests/end_to_end/configs/activation_tracker/with_visuals.yaml @@ -48,7 +48,7 @@ instance: cumulative: false data_mapping: scalar_series: - key_template: "{layer}_cumvar_{index}" + key_template: "cumvar_{index}/{layer}" index_field: n_components value_field: cumulative_explained_variance backend: altair @@ -173,7 +173,7 @@ instance: dropdown: layer # User can filter by layer in UI data_mapping: mappings: - rmse: {source: scalar_pattern, key: "blocks.*.hook_resid_post_rmse"} # Wildcard expands to all layers + rmse: {source: scalar_pattern, key: "rmse/L*.resid.post"} # Wildcard expands to all layers backend: altair layer: geometry: diff --git a/tests/structured_configs/test_activation_tracker_config.py b/tests/structured_configs/test_activation_tracker_config.py index 3db4dea0..fa9d4607 100644 --- a/tests/structured_configs/test_activation_tracker_config.py +++ b/tests/structured_configs/test_activation_tracker_config.py @@ -256,7 +256,7 @@ def test_instantiate_activation_tracker_builds_analysis_objects(tracker_cfg: Dic probs=probs, activations=activations, ) - assert "pca_custom/layer_cumvar_1" in scalars + assert "pca_custom/cumvar_1/layer" in scalars assert any(key.startswith("linear/") for key in projections) assert visualizations == {} From fb284915dc50841bc5daf53f300b4ec98fe6cc2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Casper=20L=C3=BCtzh=C3=B8ft=20Christensen?= <61698286+casperlchristensen@users.noreply.github.com> Date: Wed, 7 Jan 2026 14:05:49 -0800 Subject: [PATCH 14/35] reduce number of metrics returned from variance analysis (#162) * add xavier's leaky RRXOR (#130) * reduce number of metrics returned from variance analysis * rename * Update simplexity/activations/visualization/pattern_expansion.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * abbreviate --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- simplexity/activations/activation_analyses.py | 2 +- simplexity/activations/activation_tracker.py | 12 +- .../activations/activation_visualizations.py | 4 +- .../activations/visualization/__init__.py | 4 +- .../visualization/dataframe_builders.py | 16 +- .../visualization/field_resolution.py | 22 +- .../visualization/pattern_expansion.py | 52 ++-- .../activations/visualization_configs.py | 22 +- simplexity/analysis/layerwise_analysis.py | 12 +- simplexity/analysis/linear_regression.py | 10 +- simplexity/analysis/pca.py | 6 +- tests/activations/test_activation_analysis.py | 140 +++++------ tests/activations/test_activation_tracker.py | 12 +- .../test_activation_visualizations.py | 20 +- .../activations/test_dataframe_integration.py | 52 ++-- tests/activations/test_field_expansion.py | 224 +++++++++--------- tests/activations/test_scalar_history.py | 10 +- .../activations/test_visualization_modules.py | 82 +++---- tests/analysis/test_layerwise_analysis.py | 13 +- tests/analysis/test_pca.py | 16 +- .../with_factor_visuals.yaml | 14 +- .../activation_tracker/with_visuals.yaml | 45 +--- .../test_activation_tracker_config.py | 6 +- 23 files changed, 382 insertions(+), 414 deletions(-) diff --git a/simplexity/activations/activation_analyses.py b/simplexity/activations/activation_analyses.py index a365093c..a0069036 100644 --- a/simplexity/activations/activation_analyses.py +++ b/simplexity/activations/activation_analyses.py @@ -42,7 +42,7 @@ def analyze( weights: jax.Array, belief_states: jax.Array | tuple[jax.Array, ...] | None = None, ) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: - """Analyze activations and return scalar metrics and projections.""" + """Analyze activations and return scalar metrics and arrays.""" ... diff --git a/simplexity/activations/activation_tracker.py b/simplexity/activations/activation_tracker.py index 63e274eb..01281d7f 100644 --- a/simplexity/activations/activation_tracker.py +++ b/simplexity/activations/activation_tracker.py @@ -182,7 +182,7 @@ def analyze( preprocessing_cache[config_key] = prepared all_scalars = {} - all_projections = {} + all_arrays = {} all_visualizations: dict[str, ActivationVisualizationPayload] = {} for analysis_name, analysis in self._analyses.items(): @@ -203,7 +203,7 @@ def analyze( f"Analysis '{analysis_name}' requires belief_states but none available after preprocessing." ) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=prepared_activations, weights=prepared_weights, belief_states=prepared_beliefs, @@ -211,7 +211,7 @@ def analyze( namespaced_scalars = {f"{analysis_name}/{key}": value for key, value in scalars.items()} all_scalars.update(namespaced_scalars) - all_projections.update({f"{analysis_name}/{key}": value for key, value in projections.items()}) + all_arrays.update({f"{analysis_name}/{key}": value for key, value in arrays.items()}) if step is not None: for scalar_key, scalar_value in namespaced_scalars.items(): @@ -230,7 +230,7 @@ def analyze( np_beliefs = np.stack([np.asarray(b) for b in prepared_beliefs], axis=1) else: np_beliefs = np.asarray(prepared_beliefs) - np_projections = {key: np.asarray(value) for key, value in projections.items()} + np_arrays = {key: np.asarray(value) for key, value in arrays.items()} payloads = build_visualization_payloads( analysis_name, viz_configs, @@ -238,7 +238,7 @@ def analyze( prepared_metadata=prepared.metadata, weights=np_weights, belief_states=np_beliefs, - projections=np_projections, + arrays=np_arrays, scalars={f"{analysis_name}/{key}": float(value) for key, value in scalars.items()}, scalar_history=self._scalar_history, scalar_history_step=step, @@ -247,7 +247,7 @@ def analyze( ) all_visualizations.update({f"{analysis_name}/{payload.name}": payload for payload in payloads}) - return all_scalars, all_projections, all_visualizations + return all_scalars, all_arrays, all_visualizations def save_visualizations( self, diff --git a/simplexity/activations/activation_visualizations.py b/simplexity/activations/activation_visualizations.py index 44da4769..cd3d0e01 100644 --- a/simplexity/activations/activation_visualizations.py +++ b/simplexity/activations/activation_visualizations.py @@ -165,7 +165,7 @@ def build_visualization_payloads( prepared_metadata: PreparedMetadata, weights: np.ndarray, belief_states: np.ndarray | None, - projections: Mapping[str, np.ndarray], + arrays: Mapping[str, np.ndarray], scalars: Mapping[str, float], scalar_history: Mapping[str, list[tuple[int, float]]], scalar_history_step: int | None, @@ -179,7 +179,7 @@ def build_visualization_payloads( dataframe = _build_dataframe( viz_cfg, metadata_columns, - projections, + arrays, scalars, scalar_history, scalar_history_step, diff --git a/simplexity/activations/visualization/__init__.py b/simplexity/activations/visualization/__init__.py index 72b59cde..4d80976f 100644 --- a/simplexity/activations/visualization/__init__.py +++ b/simplexity/activations/visualization/__init__.py @@ -12,7 +12,7 @@ _build_metadata_columns, ) from simplexity.activations.visualization.field_resolution import ( - _lookup_projection_array, + _lookup_array, _lookup_scalar_value, _maybe_component, _resolve_belief_states, @@ -40,7 +40,7 @@ "_expand_field_mapping", "_has_field_pattern", "_has_key_pattern", - "_lookup_projection_array", + "_lookup_array", "_lookup_scalar_value", "_maybe_component", "_parse_component_spec", diff --git a/simplexity/activations/visualization/dataframe_builders.py b/simplexity/activations/visualization/dataframe_builders.py index a689028c..4d6164ff 100644 --- a/simplexity/activations/visualization/dataframe_builders.py +++ b/simplexity/activations/visualization/dataframe_builders.py @@ -230,7 +230,7 @@ def _scalar_series_metadata(metadata_columns: Mapping[str, Any]) -> dict[str, An def _build_dataframe_for_mappings( mappings: dict[str, ActivationVisualizationFieldRef], metadata_columns: Mapping[str, Any], - projections: Mapping[str, np.ndarray], + arrays: Mapping[str, np.ndarray], scalars: Mapping[str, float], belief_states: np.ndarray | None, analysis_concat_layers: bool, @@ -250,7 +250,7 @@ def _build_dataframe_for_mappings( for field_name, ref in mappings.items(): try: expanded = _expand_field_mapping( - field_name, ref, layer_name, projections, scalars, belief_states, analysis_concat_layers + field_name, ref, layer_name, arrays, scalars, belief_states, analysis_concat_layers ) expanded_mappings.update(expanded) except ConfigValidationError as e: @@ -295,7 +295,7 @@ def _build_dataframe_for_mappings( group_data[column] = _resolve_field( ref, layer_name, - projections, + arrays, scalars, belief_states, analysis_concat_layers, @@ -309,7 +309,7 @@ def _build_dataframe_for_mappings( group_data[base_col_name] = _resolve_field( ref, layer_name, - projections, + arrays, scalars, belief_states, analysis_concat_layers, @@ -327,7 +327,7 @@ def _build_dataframe_for_mappings( layer_data[column] = _resolve_field( ref, layer_name, - projections, + arrays, scalars, belief_states, analysis_concat_layers, @@ -342,7 +342,7 @@ def _build_dataframe_for_mappings( def _build_dataframe( viz_cfg: ActivationVisualizationConfig, metadata_columns: Mapping[str, Any], - projections: Mapping[str, np.ndarray], + arrays: Mapping[str, np.ndarray], scalars: Mapping[str, float], scalar_history: Mapping[str, list[tuple[int, float]]], scalar_history_step: int | None, @@ -361,7 +361,7 @@ def _build_dataframe( section_df = _build_dataframe_for_mappings( section.mappings, metadata_columns, - projections, + arrays, scalars, belief_states, analysis_concat_layers, @@ -409,7 +409,7 @@ def _build_dataframe( return _build_dataframe_for_mappings( viz_cfg.data_mapping.mappings, metadata_columns, - projections, + arrays, scalars, belief_states, analysis_concat_layers, diff --git a/simplexity/activations/visualization/field_resolution.py b/simplexity/activations/visualization/field_resolution.py index 913a92cc..38f5e23b 100644 --- a/simplexity/activations/visualization/field_resolution.py +++ b/simplexity/activations/visualization/field_resolution.py @@ -1,4 +1,4 @@ -"""Field resolution from projections, scalars, and belief states.""" +"""Field resolution from arrays, scalars, and belief states.""" from __future__ import annotations @@ -11,10 +11,8 @@ from simplexity.exceptions import ConfigValidationError -def _lookup_projection_array( - projections: Mapping[str, np.ndarray], layer_name: str, key: str, concat_layers: bool -) -> np.ndarray: - """Look up a projection array by key, handling layer naming conventions. +def _lookup_array(arrays: Mapping[str, np.ndarray], layer_name: str, key: str, concat_layers: bool) -> np.ndarray: + """Look up an array by key, handling layer naming conventions. Supports keys in the format "{analysis}/{layer_spec}" (e.g., "pca/L0.resid.pre") or "{analysis}/{layer_spec}-{factor_spec}" (e.g., "reg/L0.resid.pre-F0"). @@ -22,14 +20,14 @@ def _lookup_projection_array( When key contains a factor suffix (e.g., "projected/F0"), looks for the full key "{analysis}/{layer_spec}-{factor_spec}" (e.g., "projected/L0.resid.pre-F0"). """ - for full_key, value in projections.items(): + for full_key, value in arrays.items(): if concat_layers: if full_key == key or full_key.startswith(f"{key}/"): return value else: if _key_matches_layer(full_key, key, layer_name): return value - raise ConfigValidationError(f"Projection '{key}' not available for layer '{layer_name}'.") + raise ConfigValidationError(f"Array '{key}' not available for layer '{layer_name}'.") def _key_matches_layer(full_key: str, key: str, layer_name: str) -> bool: @@ -147,7 +145,7 @@ def _resolve_belief_states(belief_states: np.ndarray, ref: ActivationVisualizati def _resolve_field( ref: ActivationVisualizationFieldRef, layer_name: str, - projections: Mapping[str, np.ndarray], + arrays: Mapping[str, np.ndarray], scalars: Mapping[str, float], belief_states: np.ndarray | None, analysis_concat_layers: bool, @@ -169,10 +167,10 @@ def _resolve_field( raise ConfigValidationError("Weight metadata is unavailable for visualization mapping.") return np.asarray(metadata_columns["weight"]) - if ref.source == "projections": + if ref.source == "arrays": if ref.key is None: - raise ConfigValidationError("Projection references must supply a `key` value.") - array = _lookup_projection_array(projections, layer_name, ref.key, analysis_concat_layers) + raise ConfigValidationError("Array references must supply a `key` value.") + array = _lookup_array(arrays, layer_name, ref.key, analysis_concat_layers) if isinstance(ref.component, str): raise ConfigValidationError("Component indices should be expanded before resolution") return _maybe_component(array, ref.component) @@ -192,7 +190,7 @@ def _resolve_field( __all__ = [ - "_lookup_projection_array", + "_lookup_array", "_lookup_scalar_value", "_maybe_component", "_resolve_belief_states", diff --git a/simplexity/activations/visualization/pattern_expansion.py b/simplexity/activations/visualization/pattern_expansion.py index efe9c206..6190e1a4 100644 --- a/simplexity/activations/visualization/pattern_expansion.py +++ b/simplexity/activations/visualization/pattern_expansion.py @@ -7,7 +7,7 @@ import numpy as np -from simplexity.activations.visualization.field_resolution import _lookup_projection_array +from simplexity.activations.visualization.field_resolution import _lookup_array from simplexity.activations.visualization.pattern_utils import ( build_wildcard_regex, count_patterns, @@ -104,15 +104,15 @@ def _expand_pattern_to_indices( def _get_component_count( ref: ActivationVisualizationFieldRef, layer_name: str, - projections: Mapping[str, np.ndarray], + arrays: Mapping[str, np.ndarray], belief_states: np.ndarray | None, analysis_concat_layers: bool, ) -> int: """Get number of components available for expansion.""" - if ref.source == "projections": + if ref.source == "arrays": if ref.key is None: - raise ConfigValidationError("Projection refs require key") - array = _lookup_projection_array(projections, layer_name, ref.key, analysis_concat_layers) + raise ConfigValidationError("Array refs require key") + array = _lookup_array(arrays, layer_name, ref.key, analysis_concat_layers) np_array = np.asarray(array) if np_array.ndim == 1: raise ConfigValidationError(f"Cannot expand 1D projection '{ref.key}'. Patterns require 2D arrays.") @@ -131,18 +131,18 @@ def _get_component_count( raise ConfigValidationError(f"Component expansion not supported for source: {ref.source}") -def _expand_projection_key_pattern( +def _expand_array_key_pattern( key_pattern: str, layer_name: str, - projections: Mapping[str, np.ndarray], + arrays: Mapping[str, np.ndarray], analysis_concat_layers: bool, ) -> dict[str, str]: - """Expand projection key patterns against available keys. + """Expand array key patterns against available keys. Args: key_pattern: Pattern like "factor_*/projected" or "factor_0...3/projected" layer_name: Current layer name for matching - projections: Available projection arrays + arrays: Available arrays analysis_concat_layers: Whether layers were concatenated Returns: @@ -170,9 +170,9 @@ def _expand_projection_key_pattern( result[str(idx)] = concrete_key return result - # Match against available projection keys + # Match against available arrays result: dict[str, str] = {} - for full_key in projections: + for full_key in arrays: # Extract the key suffix for pattern matching if analysis_concat_layers: # Keys are like "analysis/Lcat" or "analysis/Lcat-F0" directly @@ -209,21 +209,21 @@ def _expand_projection_key_pattern( if not result: raise ConfigValidationError( - f"No projection keys found matching pattern '{key_pattern}' for layer '{layer_name}'. " - f"Available keys: {list(projections.keys())}" + f"No array keys found matching pattern '{key_pattern}' for layer '{layer_name}'. " + f"Available keys: {list(arrays.keys())}" ) return result -def _expand_projection_key_mapping( +def _expand_array_key_mapping( field_name: str, ref: ActivationVisualizationFieldRef, layer_name: str, - projections: Mapping[str, np.ndarray], + arrays: Mapping[str, np.ndarray], analysis_concat_layers: bool, ) -> dict[str, ActivationVisualizationFieldRef]: - """Expand projection key patterns, optionally combined with component patterns. + """Expand array key patterns, optionally combined with component patterns. Handles cross-product expansion when both key and component patterns are present. Sets _group_value on expanded refs for DataFrame construction. @@ -231,7 +231,7 @@ def _expand_projection_key_mapping( assert ref.key is not None, "Key must be provided for projection key pattern expansion" # Expand key pattern to get concrete keys - key_expansions = _expand_projection_key_pattern(ref.key, layer_name, projections, analysis_concat_layers) + key_expansions = _expand_array_key_pattern(ref.key, layer_name, arrays, analysis_concat_layers) # Check if component expansion is also needed spec_type, start_idx, end_idx = _parse_component_spec(ref.component) @@ -245,7 +245,7 @@ def _expand_projection_key_mapping( for group_idx, concrete_key in sorted(key_expansions.items(), key=lambda x: int(x[0])): if needs_component_expansion: # Get component count for this specific key - array = _lookup_projection_array(projections, layer_name, concrete_key, analysis_concat_layers) + array = _lookup_array(arrays, layer_name, concrete_key, analysis_concat_layers) np_array = np.asarray(array) if np_array.ndim != 2: raise ConfigValidationError( @@ -281,7 +281,7 @@ def _expand_projection_key_mapping( ) expanded[expanded_name] = ActivationVisualizationFieldRef( - source="projections", + source="arrays", key=concrete_key, component=comp_idx, reducer=ref.reducer, @@ -293,7 +293,7 @@ def _expand_projection_key_mapping( expanded_name = substitute_pattern(field_name, int(group_idx)) expanded[expanded_name] = ActivationVisualizationFieldRef( - source="projections", + source="arrays", key=concrete_key, component=ref.component, # Keep original (could be None or int) reducer=ref.reducer, @@ -501,7 +501,7 @@ def _expand_field_mapping( field_name: str, ref: ActivationVisualizationFieldRef, layer_name: str, - projections: Mapping[str, np.ndarray], + arrays: Mapping[str, np.ndarray], scalars: Mapping[str, float], belief_states: np.ndarray | None, analysis_concat_layers: bool, @@ -511,7 +511,7 @@ def _expand_field_mapping( Returns dict of expanded field_name → FieldRef with concrete component/key values. """ # Check for projection key patterns FIRST (allows multiple field patterns for key+component) - if ref.source == "projections" and ref.key and _has_key_pattern(ref.key): + if ref.source == "arrays" and ref.key and _has_key_pattern(ref.key): # For key pattern expansion, we allow up to 2 patterns in field name # (one for key expansion, one for component expansion) total_field_patterns = count_patterns(field_name) @@ -523,7 +523,7 @@ def _expand_field_mapping( f"Field name '{field_name}' has too many patterns (max 2 for key+component expansion)" ) - return _expand_projection_key_mapping(field_name, ref, layer_name, projections, analysis_concat_layers) + return _expand_array_key_mapping(field_name, ref, layer_name, arrays, analysis_concat_layers) # Check for belief state factor patterns if ref.source == "belief_states" and ref.factor is not None and isinstance(ref.factor, str): @@ -572,7 +572,7 @@ def _expand_field_mapping( if not needs_expansion: return {field_name: ref} - max_components = _get_component_count(ref, layer_name, projections, belief_states, analysis_concat_layers) + max_components = _get_component_count(ref, layer_name, arrays, belief_states, analysis_concat_layers) if spec_type == "wildcard": components = list(range(max_components)) @@ -603,8 +603,8 @@ def _expand_field_mapping( "_expand_belief_factor_mapping", "_expand_field_mapping", "_expand_pattern_to_indices", - "_expand_projection_key_mapping", - "_expand_projection_key_pattern", + "_expand_array_key_mapping", + "_expand_array_key_pattern", "_expand_scalar_keys", "_expand_scalar_pattern_keys", "_expand_scalar_pattern_ranges", diff --git a/simplexity/activations/visualization_configs.py b/simplexity/activations/visualization_configs.py index b9417c19..027ad45b 100644 --- a/simplexity/activations/visualization_configs.py +++ b/simplexity/activations/visualization_configs.py @@ -18,9 +18,7 @@ PlotSizeConfig, ) -FieldSource = Literal[ - "projections", "scalars", "belief_states", "weights", "metadata", "scalar_pattern", "scalar_history" -] +FieldSource = Literal["arrays", "scalars", "belief_states", "weights", "metadata", "scalar_pattern", "scalar_history"] ReducerType = Literal["argmax", "l2_norm"] T = TypeVar("T") @@ -133,7 +131,7 @@ class ActivationVisualizationFieldRef: _group_value: str | None = None # Internal: populated during key/factor pattern expansion def __post_init__(self) -> None: - if self.source == "projections" and not self.key: + if self.source == "arrays" and not self.key: raise ConfigValidationError("Projection field references must specify the `key` to read from.") if self.source == "scalars" and not self.key: raise ConfigValidationError("Scalar field references must specify the `key` to read from.") @@ -147,13 +145,13 @@ def __post_init__(self) -> None: if isinstance(self.component, str): if self.component != "*" and not is_valid_range(self.component): raise ConfigValidationError(f"Component pattern '{self.component}' invalid. Use '*' or 'N...M'") - if self.source not in ("projections", "belief_states"): + if self.source not in ("arrays", "belief_states"): raise ConfigValidationError( - f"Component patterns only supported for projections/belief_states, not '{self.source}'" + f"Component patterns only supported for arrays/belief_states, not '{self.source}'" ) - # Validate key patterns for projections - if self.source == "projections" and self.key: + # Validate key patterns for arrays + if self.source == "arrays" and self.key: has_key_pattern = "*" in self.key or is_valid_range(self.key) # Key patterns require group_as to name the resulting column(s) if has_key_pattern and self.group_as is None: @@ -173,10 +171,8 @@ def __post_init__(self) -> None: ) # Validate group_as - if self.group_as is not None and self.source not in ("projections", "belief_states"): - raise ConfigValidationError( - f"`group_as` is only supported for projections/belief_states, not '{self.source}'" - ) + if self.group_as is not None and self.source not in ("arrays", "belief_states"): + raise ConfigValidationError(f"`group_as` is only supported for arrays/belief_states, not '{self.source}'") @dataclass @@ -200,7 +196,7 @@ def __post_init__(self) -> None: class CombinedMappingSection: """A labeled section of field mappings for combining multiple data sources. - Used to combine projections and ground truth belief states into a single + Used to combine arrays and ground truth belief states into a single DataFrame with a label column for faceting (e.g., row faceting by data_type). """ diff --git a/simplexity/analysis/layerwise_analysis.py b/simplexity/analysis/layerwise_analysis.py index fe47b9cd..12a87226 100644 --- a/simplexity/analysis/layerwise_analysis.py +++ b/simplexity/analysis/layerwise_analysis.py @@ -184,13 +184,13 @@ def analyze( weights: jax.Array, belief_states: jax.Array | tuple[jax.Array, ...] | None = None, ) -> tuple[Mapping[str, float], Mapping[str, jax.Array]]: - """Analyze activations and return namespaced scalar metrics and projections.""" + """Analyze activations and return namespaced scalar metrics and arrays.""" if self._requires_belief_states and belief_states is None: raise ValueError("This analysis requires belief_states") scalars: dict[str, float] = {} - projections: dict[str, jax.Array] = {} + arrays: dict[str, jax.Array] = {} for layer_name, layer_activations in activations.items(): - layer_scalars, layer_projections = self._analysis_fn( + layer_scalars, layer_arrays = self._analysis_fn( layer_activations, weights, belief_states, @@ -200,10 +200,10 @@ def analyze( for key, value in layer_scalars.items(): constructed_key = construct_layer_specific_key(key, formatted_layer_name) scalars[constructed_key] = value - for key, value in layer_projections.items(): + for key, value in layer_arrays.items(): constructed_key = construct_layer_specific_key(key, formatted_layer_name) - projections[constructed_key] = value - return scalars, projections + arrays[constructed_key] = value + return scalars, arrays __all__ = ["LayerwiseAnalysis", "ANALYSIS_REGISTRY"] diff --git a/simplexity/analysis/linear_regression.py b/simplexity/analysis/linear_regression.py index 9a37658b..421c43fb 100644 --- a/simplexity/analysis/linear_regression.py +++ b/simplexity/analysis/linear_regression.py @@ -506,18 +506,18 @@ def _apply_layer_regression( """Apply a regression function, optionally per-factor.""" if to_factors: scalars: dict[str, float] = {} - projections: dict[str, jax.Array] = {} + arrays: dict[str, jax.Array] = {} if not isinstance(belief_states, tuple): raise ValueError("belief_states must be a tuple when to_factors is True") for factor_idx, factor in enumerate(belief_states): if not isinstance(factor, jax.Array): raise ValueError("Each factor in belief_states must be a jax.Array") - factor_scalars, factor_projections = regression_fn(layer_activations, factor, weights, **kwargs) + factor_scalars, factor_arrays = regression_fn(layer_activations, factor, weights, **kwargs) for key, value in factor_scalars.items(): scalars[f"factor_{factor_idx}/{key}"] = value - for key, value in factor_projections.items(): - projections[f"factor_{factor_idx}/{key}"] = value - return scalars, projections + for key, value in factor_arrays.items(): + arrays[f"factor_{factor_idx}/{key}"] = value + return scalars, arrays else: targets = jnp.concatenate(belief_states, axis=-1) if isinstance(belief_states, tuple) else belief_states return regression_fn(layer_activations, targets, weights, **kwargs) diff --git a/simplexity/analysis/pca.py b/simplexity/analysis/pca.py index 625260f7..99221fb9 100644 --- a/simplexity/analysis/pca.py +++ b/simplexity/analysis/pca.py @@ -115,8 +115,6 @@ def layer_pca_analysis( cumulative_variance = jnp.cumsum(result["explained_variance_ratio"]) scalars: dict[str, float] = {} - for idx, value in enumerate(cumulative_variance, start=1): - scalars[f"cumvar_{idx}"] = float(value) scalars["var_exp"] = float(cumulative_variance[-1]) threshold_counts = variance_threshold_counts( @@ -127,8 +125,8 @@ def layer_pca_analysis( percentage = int(threshold * 100) scalars[f"nc_{percentage}"] = float(count) - projections = {"pca": result["X_proj"]} - return scalars, projections + arrays = {"pca": result["X_proj"], "cev": cumulative_variance} + return scalars, arrays __all__ = [ diff --git a/tests/activations/test_activation_analysis.py b/tests/activations/test_activation_analysis.py index 256221f7..15439376 100644 --- a/tests/activations/test_activation_analysis.py +++ b/tests/activations/test_activation_analysis.py @@ -231,7 +231,7 @@ def test_basic_regression(self, synthetic_data): ), ) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=prepared.activations, belief_states=prepared.belief_states, weights=prepared.weights, @@ -243,13 +243,13 @@ def test_basic_regression(self, synthetic_data): assert "dist/layer_0" in scalars assert "r2/layer_1" in scalars - assert "projected/layer_0" in projections - assert "projected/layer_1" in projections + assert "projected/layer_0" in arrays + assert "projected/layer_1" in arrays assert prepared.belief_states is not None assert isinstance(prepared.belief_states, jax.Array) - assert projections["projected/layer_0"].shape == prepared.belief_states.shape - assert projections["projected/layer_1"].shape == prepared.belief_states.shape + assert arrays["projected/layer_0"].shape == prepared.belief_states.shape + assert arrays["projected/layer_1"].shape == prepared.belief_states.shape def test_requires_belief_states(self, synthetic_data): """Test that analysis raises error without belief_states.""" @@ -292,14 +292,14 @@ def test_uniform_weights(self, synthetic_data): ), ) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=prepared.activations, belief_states=prepared.belief_states, weights=prepared.weights, ) assert "r2/layer_0" in scalars - assert "projected/layer_0" in projections + assert "projected/layer_0" in arrays class TestLinearRegressionSVDAnalysis: @@ -321,7 +321,7 @@ def test_basic_regression_svd(self, synthetic_data): ), ) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=prepared.activations, belief_states=prepared.belief_states, weights=prepared.weights, @@ -335,13 +335,13 @@ def test_basic_regression_svd(self, synthetic_data): assert "r2/layer_1" in scalars assert "best_rcond/layer_1" in scalars - assert "projected/layer_0" in projections - assert "projected/layer_1" in projections + assert "projected/layer_0" in arrays + assert "projected/layer_1" in arrays assert prepared.belief_states is not None assert isinstance(prepared.belief_states, jax.Array) - assert projections["projected/layer_0"].shape == prepared.belief_states.shape - assert projections["projected/layer_1"].shape == prepared.belief_states.shape + assert arrays["projected/layer_0"].shape == prepared.belief_states.shape + assert arrays["projected/layer_1"].shape == prepared.belief_states.shape # Check that best_rcond is one of the provided values assert scalars["best_rcond/layer_0"] in [1e-15, 1e-10, 1e-8] @@ -391,27 +391,27 @@ def test_basic_pca(self, synthetic_data): ), ) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=prepared.activations, belief_states=prepared.belief_states, weights=prepared.weights, ) - assert "cumvar_1/layer_0" in scalars - assert "cumvar_2/layer_0" in scalars - assert "cumvar_3/layer_0" in scalars - assert scalars["cumvar_1/layer_0"] <= scalars["cumvar_2/layer_0"] - assert scalars["cumvar_2/layer_0"] <= scalars["cumvar_3/layer_0"] + assert "var_exp/layer_0" in scalars assert "nc_80/layer_0" in scalars assert "nc_90/layer_0" in scalars - assert "cumvar_1/layer_1" in scalars + assert "var_exp/layer_1" in scalars - assert "pca/layer_0" in projections - assert "pca/layer_1" in projections + assert "pca/layer_0" in arrays + assert "pca/layer_1" in arrays + assert "cev/layer_0" in arrays + assert "cev/layer_1" in arrays batch_size = prepared.activations["layer_0"].shape[0] - assert projections["pca/layer_0"].shape == (batch_size, 3) - assert projections["pca/layer_1"].shape == (batch_size, 3) + assert arrays["pca/layer_0"].shape == (batch_size, 3) + assert arrays["pca/layer_1"].shape == (batch_size, 3) + assert arrays["cev/layer_0"].shape == (3,) + assert arrays["cev/layer_1"].shape == (3,) def test_pca_without_belief_states(self, synthetic_data): """Test PCA works without belief_states.""" @@ -431,15 +431,15 @@ def test_pca_without_belief_states(self, synthetic_data): prepared.belief_states = None - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=prepared.activations, belief_states=prepared.belief_states, weights=prepared.weights, ) - assert "cumvar_1/layer_0" in scalars - assert "cumvar_2/layer_0" in scalars - assert "pca/layer_0" in projections + assert "var_exp/layer_0" in scalars + assert "pca/layer_0" in arrays + assert "cev/layer_0" in arrays def test_pca_all_components(self, synthetic_data): """Test PCA with n_components=None computes all components.""" @@ -457,7 +457,7 @@ def test_pca_all_components(self, synthetic_data): ), ) - _, projections = analysis.analyze( + _, arrays = analysis.analyze( activations=prepared.activations, belief_states=prepared.belief_states, weights=prepared.weights, @@ -465,7 +465,7 @@ def test_pca_all_components(self, synthetic_data): batch_size = prepared.activations["layer_0"].shape[0] d_layer0 = synthetic_data["d_layer0"] - assert projections["pca/layer_0"].shape == (batch_size, min(batch_size, d_layer0)) + assert arrays["pca/layer_0"].shape == (batch_size, min(batch_size, d_layer0)) class TestActivationTracker: @@ -487,7 +487,7 @@ def test_basic_tracking(self, synthetic_data): } ) - scalars, projections, visualizations = tracker.analyze( + scalars, arrays, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], @@ -497,8 +497,8 @@ def test_basic_tracking(self, synthetic_data): assert "regression/r2/layer_0" in scalars assert "pca/var_exp/layer_0" in scalars - assert "regression/projected/layer_0" in projections - assert "pca/pca/layer_0" in projections + assert "regression/projected/layer_0" in arrays + assert "pca/pca/layer_0" in arrays assert visualizations == {} def test_all_tokens_mode(self, synthetic_data): @@ -512,7 +512,7 @@ def test_all_tokens_mode(self, synthetic_data): } ) - scalars, projections, visualizations = tracker.analyze( + scalars, arrays, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], @@ -520,7 +520,7 @@ def test_all_tokens_mode(self, synthetic_data): ) assert "regression/r2/layer_0" in scalars - assert "regression/projected/layer_0" in projections + assert "regression/projected/layer_0" in arrays assert visualizations == {} def test_mixed_requirements(self, synthetic_data): @@ -566,7 +566,7 @@ def test_concatenated_layers(self, synthetic_data): } ) - scalars, projections, visualizations = tracker.analyze( + scalars, arrays, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], @@ -576,8 +576,8 @@ def test_concatenated_layers(self, synthetic_data): assert "regression/r2/Lcat" in scalars assert "pca/var_exp/Lcat" in scalars - assert "regression/projected/Lcat" in projections - assert "pca/pca/Lcat" in projections + assert "regression/projected/Lcat" in arrays + assert "pca/pca/Lcat" in arrays assert visualizations == {} def test_uniform_weights(self, synthetic_data): @@ -623,7 +623,7 @@ def test_multiple_configs_efficiency(self, synthetic_data): } ) - scalars, projections, visualizations = tracker.analyze( + scalars, arrays, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], @@ -634,9 +634,9 @@ def test_multiple_configs_efficiency(self, synthetic_data): assert "pca_last_token/var_exp/layer_0" in scalars assert "regression_concat/r2/Lcat" in scalars - assert "pca_all_tokens/pca/layer_0" in projections - assert "pca_last_token/pca/layer_0" in projections - assert "regression_concat/projected/Lcat" in projections + assert "pca_all_tokens/pca/layer_0" in arrays + assert "pca_last_token/pca/layer_0" in arrays + assert "regression_concat/projected/Lcat" in arrays assert visualizations == {} def test_tracker_accepts_torch_inputs(self, synthetic_data): @@ -663,7 +663,7 @@ def test_tracker_accepts_torch_inputs(self, synthetic_data): name: torch.tensor(np.asarray(layer)) for name, layer in synthetic_data["activations"].items() } - scalars, projections, visualizations = tracker.analyze( + scalars, arrays, visualizations = tracker.analyze( inputs=torch_inputs, beliefs=torch_beliefs, probs=torch_probs, @@ -671,7 +671,7 @@ def test_tracker_accepts_torch_inputs(self, synthetic_data): ) assert "regression/r2/layer_0" in scalars - assert "pca/pca/layer_0" in projections + assert "pca/pca/layer_0" in arrays assert visualizations == {} def test_tracker_builds_visualizations(self, synthetic_data, monkeypatch): @@ -694,7 +694,7 @@ def test_tracker_builds_visualizations(self, synthetic_data, monkeypatch): "name": "pca_projection", "data_mapping": { "mappings": { - "pc0": {"source": "projections", "key": "pca", "component": 0}, + "pc0": {"source": "arrays", "key": "pca", "component": 0}, "belief_state": {"source": "belief_states", "reducer": "argmax"}, } }, @@ -899,7 +899,7 @@ def test_linear_regression_with_multiple_factors(self, factored_belief_data): ), ) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=prepared.activations, belief_states=prepared.belief_states, weights=prepared.weights, @@ -919,16 +919,16 @@ def test_linear_regression_with_multiple_factors(self, factored_belief_data): assert "r2/layer_1-F0" in scalars assert "r2/layer_1-F1" in scalars - # Should have separate projections for each factor - assert "projected/layer_0-F0" in projections - assert "projected/layer_0-F1" in projections - assert "projected/layer_1-F0" in projections - assert "projected/layer_1-F1" in projections + # Should have separate arrays for each factor + assert "projected/layer_0-F0" in arrays + assert "projected/layer_0-F1" in arrays + assert "projected/layer_1-F0" in arrays + assert "projected/layer_1-F1" in arrays # Check projection shapes batch_size = factored_belief_data["batch_size"] - assert projections["projected/layer_0-F0"].shape == (batch_size, factored_belief_data["factor_0_dim"]) - assert projections["projected/layer_0-F1"].shape == (batch_size, factored_belief_data["factor_1_dim"]) + assert arrays["projected/layer_0-F0"].shape == (batch_size, factored_belief_data["factor_0_dim"]) + assert arrays["projected/layer_0-F1"].shape == (batch_size, factored_belief_data["factor_1_dim"]) def test_linear_regression_svd_with_multiple_factors(self, factored_belief_data): """LinearRegressionSVDAnalysis with multi-factor tuple should regress to each factor separately.""" @@ -946,7 +946,7 @@ def test_linear_regression_svd_with_multiple_factors(self, factored_belief_data) ), ) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=prepared.activations, belief_states=prepared.belief_states, weights=prepared.weights, @@ -958,9 +958,9 @@ def test_linear_regression_svd_with_multiple_factors(self, factored_belief_data) assert "best_rcond/layer_0-F0" in scalars assert "best_rcond/layer_0-F1" in scalars - # Should have separate projections for each factor - assert "projected/layer_0-F0" in projections - assert "projected/layer_0-F1" in projections + # Should have separate arrays for each factor + assert "projected/layer_0-F0" in arrays + assert "projected/layer_0-F1" in arrays def test_tracker_with_factored_beliefs(self, factored_belief_data): """ActivationTracker should work with tuple belief states.""" @@ -978,7 +978,7 @@ def test_tracker_with_factored_beliefs(self, factored_belief_data): } ) - scalars, projections, _ = tracker.analyze( + scalars, arrays, _ = tracker.analyze( inputs=factored_belief_data["inputs"], beliefs=factored_belief_data["factored_beliefs"], probs=factored_belief_data["probs"], @@ -992,10 +992,10 @@ def test_tracker_with_factored_beliefs(self, factored_belief_data): # PCA should still work (doesn't use belief states) assert "pca/var_exp/layer_0" in scalars - # Projections should be present - assert "regression/projected/layer_0-F0" in projections - assert "regression/projected/layer_0-F1" in projections - assert "pca/pca/layer_0" in projections + # Arrays should be present + assert "regression/projected/layer_0-F0" in arrays + assert "regression/projected/layer_0-F1" in arrays + assert "pca/pca/layer_0" in arrays def test_single_factor_tuple(self, synthetic_data): """Test with a single-factor tuple (edge case).""" @@ -1036,7 +1036,7 @@ def test_linear_regression_single_factor_tuple_behaves_like_non_tuple(self, synt ), ) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=prepared.activations, belief_states=prepared.belief_states, weights=prepared.weights, @@ -1045,11 +1045,11 @@ def test_linear_regression_single_factor_tuple_behaves_like_non_tuple(self, synt # Should have simple keys without "factor_" prefix assert "r2/layer_0" in scalars assert "rmse/layer_0" in scalars - assert "projected/layer_0" in projections + assert "projected/layer_0" in arrays # Should NOT have factor keys assert "r2/layer_0-F0" not in scalars - assert "projected/layer_0-F0" not in projections + assert "projected/layer_0-F0" not in arrays def test_linear_regression_concat_belief_states(self, factored_belief_data): """LinearRegressionAnalysis with concat_belief_states=True should return both factor and concat results.""" @@ -1067,7 +1067,7 @@ def test_linear_regression_concat_belief_states(self, factored_belief_data): ), ) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=prepared.activations, belief_states=prepared.belief_states, weights=prepared.weights, @@ -1076,18 +1076,18 @@ def test_linear_regression_concat_belief_states(self, factored_belief_data): # Should have per-factor results assert "r2/layer_0-F0" in scalars assert "r2/layer_0-F1" in scalars - assert "projected/layer_0-F0" in projections - assert "projected/layer_0-F1" in projections + assert "projected/layer_0-F0" in arrays + assert "projected/layer_0-F1" in arrays # Should ALSO have concatenated results assert "r2/layer_0-Fcat" in scalars assert "rmse/layer_0-Fcat" in scalars - assert "projected/layer_0-Fcat" in projections + assert "projected/layer_0-Fcat" in arrays # Check concatenated projection shape (should be sum of factor dimensions) batch_size = factored_belief_data["batch_size"] total_dim = factored_belief_data["factor_0_dim"] + factored_belief_data["factor_1_dim"] - assert projections["projected/layer_0-Fcat"].shape == (batch_size, total_dim) + assert arrays["projected/layer_0-Fcat"].shape == (batch_size, total_dim) def test_three_factor_tuple(self, factored_belief_data): """Test with three factors to ensure generalization.""" @@ -1139,7 +1139,7 @@ def test_compute_subspace_orthogonality(self, factored_belief_data): compute_subspace_orthogonality=True, ) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=prepared.activations, belief_states=prepared.belief_states, weights=prepared.weights, diff --git a/tests/activations/test_activation_tracker.py b/tests/activations/test_activation_tracker.py index 1a80d9a0..f4ac76e8 100644 --- a/tests/activations/test_activation_tracker.py +++ b/tests/activations/test_activation_tracker.py @@ -246,14 +246,14 @@ def test_analyze_without_visualizations(self, synthetic_data): tracker = ActivationTracker( analyses={"pca": PcaAnalysis(n_components=1, last_token_only=False, concat_layers=False)}, ) - scalars, projections, visualizations = tracker.analyze( + scalars, arrays, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], activations=synthetic_data["activations"], ) assert len(scalars) > 0 - assert len(projections) > 0 + assert len(arrays) > 0 assert len(visualizations) == 0 def test_analyze_records_scalar_history(self, synthetic_data): @@ -293,7 +293,7 @@ def test_analyze_with_tuple_beliefs_creates_stacked_array(self, synthetic_data): "name": "test_viz", "data_mapping": { "mappings": { - "pc0": {"source": "projections", "key": "pca", "component": 0}, + "pc0": {"source": "arrays", "key": "pca", "component": 0}, }, }, "layer": { @@ -307,7 +307,7 @@ def test_analyze_with_tuple_beliefs_creates_stacked_array(self, synthetic_data): analyses={"pca": PcaAnalysis(n_components=1, last_token_only=False, concat_layers=False)}, visualizations={"pca": [viz_cfg]}, ) - scalars, projections, visualizations = tracker.analyze( + scalars, arrays, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=beliefs_tuple, probs=synthetic_data["probs"], @@ -322,7 +322,7 @@ def test_analyze_with_none_beliefs(self, synthetic_data): "name": "test_viz", "data_mapping": { "mappings": { - "pc0": {"source": "projections", "key": "pca", "component": 0}, + "pc0": {"source": "arrays", "key": "pca", "component": 0}, }, }, "layer": { @@ -337,7 +337,7 @@ def test_analyze_with_none_beliefs(self, synthetic_data): visualizations={"pca": [viz_cfg]}, ) # PCA doesn't require beliefs, so this should work - scalars, projections, visualizations = tracker.analyze( + scalars, arrays, visualizations = tracker.analyze( inputs=synthetic_data["inputs"], beliefs=synthetic_data["beliefs"], probs=synthetic_data["probs"], diff --git a/tests/activations/test_activation_visualizations.py b/tests/activations/test_activation_visualizations.py index b0b2c17d..c771a4ea 100644 --- a/tests/activations/test_activation_visualizations.py +++ b/tests/activations/test_activation_visualizations.py @@ -184,7 +184,7 @@ def basic_viz_config(self): "name": "test_viz", "data_mapping": { "mappings": { - "pc0": {"source": "projections", "key": "pca", "component": 0}, + "pc0": {"source": "arrays", "key": "pca", "component": 0}, }, }, "layer": { @@ -196,9 +196,9 @@ def basic_viz_config(self): } ) - def test_builds_payload_with_projections(self, basic_metadata, basic_viz_config): - """Test building a payload with projection data.""" - projections = {"pca/layer_0": np.array([[1.0, 2.0], [3.0, 4.0]])} + def test_builds_payload_with_arrays(self, basic_metadata, basic_viz_config): + """Test building a payload with array data.""" + arrays = {"pca/layer_0": np.array([[1.0, 2.0], [3.0, 4.0]])} payloads = build_visualization_payloads( analysis_name="test", viz_cfgs=[basic_viz_config], @@ -206,7 +206,7 @@ def test_builds_payload_with_projections(self, basic_metadata, basic_viz_config) prepared_metadata=basic_metadata, weights=np.array([0.5, 0.5]), belief_states=None, - projections=projections, + arrays=arrays, scalars={}, scalar_history={}, scalar_history_step=None, @@ -244,7 +244,7 @@ def test_builds_payload_with_belief_states(self, basic_metadata): prepared_metadata=basic_metadata, weights=np.array([0.5, 0.5]), belief_states=belief_states, - projections={}, + arrays={}, scalars={}, scalar_history={}, scalar_history_step=None, @@ -260,7 +260,7 @@ def test_handles_multiple_configs(self, basic_metadata): build_activation_visualization_config( { "name": "viz_1", - "data_mapping": {"mappings": {"pc0": {"source": "projections", "key": "pca", "component": 0}}}, + "data_mapping": {"mappings": {"pc0": {"source": "arrays", "key": "pca", "component": 0}}}, "layer": { "geometry": {"type": "point"}, "aesthetics": {"x": {"field": "pc0", "type": "quantitative"}}, @@ -270,7 +270,7 @@ def test_handles_multiple_configs(self, basic_metadata): build_activation_visualization_config( { "name": "viz_2", - "data_mapping": {"mappings": {"pc1": {"source": "projections", "key": "pca", "component": 1}}}, + "data_mapping": {"mappings": {"pc1": {"source": "arrays", "key": "pca", "component": 1}}}, "layer": { "geometry": {"type": "point"}, "aesthetics": {"x": {"field": "pc1", "type": "quantitative"}}, @@ -278,7 +278,7 @@ def test_handles_multiple_configs(self, basic_metadata): } ), ] - projections = {"pca/layer_0": np.array([[1.0, 2.0], [3.0, 4.0]])} + arrays = {"pca/layer_0": np.array([[1.0, 2.0], [3.0, 4.0]])} payloads = build_visualization_payloads( analysis_name="test", viz_cfgs=configs, @@ -286,7 +286,7 @@ def test_handles_multiple_configs(self, basic_metadata): prepared_metadata=basic_metadata, weights=np.array([0.5, 0.5]), belief_states=None, - projections=projections, + arrays=arrays, scalars={}, scalar_history={}, scalar_history_step=None, diff --git a/tests/activations/test_dataframe_integration.py b/tests/activations/test_dataframe_integration.py index c0e9baec..bb6dd4b6 100644 --- a/tests/activations/test_dataframe_integration.py +++ b/tests/activations/test_dataframe_integration.py @@ -25,7 +25,7 @@ class TestProjectionDataframeIntegration: def test_factored_projection_dataframe_values_match(self): """Test that factored projection values are correctly associated with each factor. - This is a regression test for the bug where projections looked 'random' + This is a regression test for the bug where arrays looked 'random' when visualizing factored linear regression results. """ # Simulate projection keys as produced by LayerwiseAnalysis with to_factors=True @@ -33,7 +33,7 @@ def test_factored_projection_dataframe_values_match(self): factor_0_data = np.array([[0.1, 0.8, 0.1], [0.2, 0.7, 0.1], [0.3, 0.6, 0.1]]) factor_1_data = np.array([[0.5, 0.5], [0.4, 0.6], [0.3, 0.7]]) - projections = { + arrays = { "projected/layer_0-F0": factor_0_data, "projected/layer_0-F1": factor_1_data, } @@ -48,13 +48,13 @@ def test_factored_projection_dataframe_values_match(self): # Note: Each mapping is for a SPECIFIC component, not a wildcard mappings = { "factor_*_prob_0": ActivationVisualizationFieldRef( - source="projections", + source="arrays", key="projected/F*", component=0, group_as="factor", ), "factor_*_prob_1": ActivationVisualizationFieldRef( - source="projections", + source="arrays", key="projected/F*", component=1, group_as="factor", @@ -65,7 +65,7 @@ def test_factored_projection_dataframe_values_match(self): df = _build_dataframe_for_mappings( mappings=mappings, metadata_columns=metadata_columns, - projections=projections, + arrays=arrays, scalars={}, belief_states=None, analysis_concat_layers=False, @@ -123,7 +123,7 @@ def test_factored_projection_different_component_counts(self): factor_0_data = np.array([[0.1, 0.8, 0.1], [0.2, 0.7, 0.1]]) # 3 components factor_1_data = np.array([[0.5, 0.5], [0.4, 0.6]]) # 2 components - projections = { + arrays = { "projected/layer_0-F0": factor_0_data, "projected/layer_0-F1": factor_1_data, } @@ -136,7 +136,7 @@ def test_factored_projection_different_component_counts(self): # Request component 2 - this should fail for factor_1 which only has 2 components mappings = { "factor_*_prob_2": ActivationVisualizationFieldRef( - source="projections", + source="arrays", key="projected/F*", component=2, group_as="factor", @@ -148,15 +148,15 @@ def test_factored_projection_different_component_counts(self): _build_dataframe_for_mappings( mappings=mappings, metadata_columns=metadata_columns, - projections=projections, + arrays=arrays, scalars={}, belief_states=None, analysis_concat_layers=False, layer_names=["layer_0"], ) - def test_combined_projections_and_beliefs_data_integrity(self): - """Test combined mode with projections and belief states.""" + def test_combined_arrays_and_beliefs_data_integrity(self): + """Test combined mode with arrays and belief states.""" n_samples = 4 n_factors = 2 n_states = 3 @@ -173,7 +173,7 @@ def test_combined_projections_and_beliefs_data_integrity(self): noise = np.random.default_rng(42).standard_normal((n_samples, n_factors, n_states)) * 0.01 projected_values = belief_states + noise - projections = { + arrays = { "projected/layer_0-F0": projected_values[:, 0, :], "projected/layer_0-F1": projected_values[:, 1, :], } @@ -194,7 +194,7 @@ def test_combined_projections_and_beliefs_data_integrity(self): label="prediction", mappings={ f"factor_*_prob_{i}": ActivationVisualizationFieldRef( - source="projections", key="projected/F*", component=i, group_as="factor" + source="arrays", key="projected/F*", component=i, group_as="factor" ) for i in range(n_states) }, @@ -216,7 +216,7 @@ def test_combined_projections_and_beliefs_data_integrity(self): df = _build_dataframe( viz_cfg=config, metadata_columns=metadata_columns, - projections=projections, + arrays=arrays, scalars={}, scalar_history={}, scalar_history_step=None, @@ -237,7 +237,7 @@ def test_combined_mode_multiple_layers(self): n_states = 3 belief_states = np.random.rand(n_samples, n_factors, n_states) - projections = { + arrays = { f"projected/layer_{layer_idx}-F{factor_idx}": np.random.rand(n_samples, n_states) for layer_idx in range(n_layers) for factor_idx in range(n_factors) @@ -259,7 +259,7 @@ def test_combined_mode_multiple_layers(self): label="prediction", mappings={ "factor_*_prob_0": ActivationVisualizationFieldRef( - source="projections", key="projected/F*", component=0, group_as="factor" + source="arrays", key="projected/F*", component=0, group_as="factor" ), }, ), @@ -279,7 +279,7 @@ def test_combined_mode_multiple_layers(self): df = _build_dataframe( viz_cfg=config, metadata_columns=metadata_columns, - projections=projections, + arrays=arrays, scalars={}, scalar_history={}, scalar_history_step=None, @@ -294,14 +294,14 @@ def test_combined_mode_multiple_layers(self): assert set(np.unique(np.asarray(gt_df["layer"]))) == {"_no_layer_"} def test_full_visualization_pipeline_factored_vs_nonfactored(self): - """Test that factored and non-factored projections produce same results for single factor.""" + """Test that factored and non-factored arrays produce same results for single factor.""" projection_data = np.array([[0.7, 0.2, 0.1], [0.1, 0.8, 0.1], [0.2, 0.2, 0.6]]) metadata = {"step": np.array([1, 1, 1]), "sample_index": np.arange(3)} nf_df = _build_dataframe_for_mappings( - mappings={"prob_0": ActivationVisualizationFieldRef(source="projections", key="projected", component=0)}, + mappings={"prob_0": ActivationVisualizationFieldRef(source="arrays", key="projected", component=0)}, metadata_columns=metadata, - projections={"projected/layer_0": projection_data}, + arrays={"projected/layer_0": projection_data}, scalars={}, belief_states=None, analysis_concat_layers=False, @@ -310,11 +310,11 @@ def test_full_visualization_pipeline_factored_vs_nonfactored(self): f_df = _build_dataframe_for_mappings( mappings={ "factor_*_prob_0": ActivationVisualizationFieldRef( - source="projections", key="projected/F*", component=0, group_as="factor" + source="arrays", key="projected/F*", component=0, group_as="factor" ) }, metadata_columns=metadata, - projections={"projected/layer_0-F0": projection_data}, + arrays={"projected/layer_0-F0": projection_data}, scalars={}, belief_states=None, analysis_concat_layers=False, @@ -328,8 +328,8 @@ def test_full_visualization_pipeline_factored_vs_nonfactored(self): np.asarray(f_filtered["prob_0"]), ) - def test_linear_regression_projections_match_beliefs(self): - """Test that linear regression projections closely match original beliefs.""" + def test_linear_regression_arrays_match_beliefs(self): + """Test that linear regression arrays closely match original beliefs.""" n_samples, n_features, n_factors, n_states = 50, 10, 3, 3 rng = np.random.default_rng(42) @@ -339,11 +339,11 @@ def test_linear_regression_projections_match_beliefs(self): beliefs_softmax = beliefs_softmax / beliefs_softmax.sum(axis=2, keepdims=True) belief_states = tuple(jnp.array(beliefs_softmax[:, f, :]) for f in range(n_factors)) - scalars, projections = layer_linear_regression( + scalars, arrays = layer_linear_regression( jnp.array(ds), jnp.ones(n_samples) / n_samples, belief_states, use_svd=True ) for f in range(n_factors): assert scalars[f"r2/F{f}"] > 0.8, f"Factor {f} R² too low" - diff = np.abs(np.asarray(projections[f"projected/F{f}"]) - np.asarray(belief_states[f])) - assert diff.max() < 0.2, f"Factor {f} projections differ too much from beliefs" + diff = np.abs(np.asarray(arrays[f"projected/F{f}"]) - np.asarray(belief_states[f])) + assert diff.max() < 0.2, f"Factor {f} arrays differ too much from beliefs" diff --git a/tests/activations/test_field_expansion.py b/tests/activations/test_field_expansion.py index ec3340b9..b10182b4 100644 --- a/tests/activations/test_field_expansion.py +++ b/tests/activations/test_field_expansion.py @@ -10,9 +10,9 @@ _resolve_belief_states, ) from simplexity.activations.visualization.pattern_expansion import ( + _expand_array_key_pattern, _expand_belief_factor_mapping, _expand_field_mapping, - _expand_projection_key_pattern, _get_component_count, _has_field_pattern, _has_key_pattern, @@ -126,40 +126,40 @@ def test_is_expansion_pattern_multiple_patterns(self): class TestComponentCount: """Test component count determination.""" - def test_get_component_count_projections_2d(self): - """Test getting component count from 2D projections.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca") - projections = {"pca/layer_0": np.random.randn(100, 10)} - count = _get_component_count(ref, "layer_0", projections, None, False) + def test_get_component_count_arrays_2d(self): + """Test getting component count from 2D arrays.""" + ref = ActivationVisualizationFieldRef(source="arrays", key="pca") + arrays = {"pca/layer_0": np.random.randn(100, 10)} + count = _get_component_count(ref, "layer_0", arrays, None, False) assert count == 10 - def test_get_component_count_projections_different_sizes(self): - """Test getting component count from 2D projections with different sizes.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca") - projections = {"pca/layer_0": np.random.randn(50, 15)} - count = _get_component_count(ref, "layer_0", projections, None, False) + def test_get_component_count_arrays_different_sizes(self): + """Test getting component count from 2D arrays with different sizes.""" + ref = ActivationVisualizationFieldRef(source="arrays", key="pca") + arrays = {"pca/layer_0": np.random.randn(50, 15)} + count = _get_component_count(ref, "layer_0", arrays, None, False) assert count == 15 - def test_get_component_count_projections_concat_layers(self): - """Test getting component count from concatenated layer projections.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca") - projections = {"pca": np.random.randn(200, 20)} - count = _get_component_count(ref, "any_layer", projections, None, True) + def test_get_component_count_arrays_concat_layers(self): + """Test getting component count from concatenated layer arrays.""" + ref = ActivationVisualizationFieldRef(source="arrays", key="pca") + arrays = {"pca": np.random.randn(200, 20)} + count = _get_component_count(ref, "any_layer", arrays, None, True) assert count == 20 - def test_get_component_count_projections_1d_raises(self): - """Test that 1D projections raise an error when getting component count.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca") - projections = {"pca/layer_0": np.random.randn(100)} + def test_get_component_count_arrays_1d_raises(self): + """Test that 1D arrays raise an error when getting component count.""" + ref = ActivationVisualizationFieldRef(source="arrays", key="pca") + arrays = {"pca/layer_0": np.random.randn(100)} with pytest.raises(ConfigValidationError, match="1D projection"): - _get_component_count(ref, "layer_0", projections, None, False) + _get_component_count(ref, "layer_0", arrays, None, False) - def test_get_component_count_projections_3d_raises(self): - """Test that 3D projections raise an error when getting component count.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca") - projections = {"pca/layer_0": np.random.randn(10, 10, 10)} + def test_get_component_count_arrays_3d_raises(self): + """Test that 3D arrays raise an error when getting component count.""" + ref = ActivationVisualizationFieldRef(source="arrays", key="pca") + arrays = {"pca/layer_0": np.random.randn(10, 10, 10)} with pytest.raises(ConfigValidationError, match="1D or 2D"): - _get_component_count(ref, "layer_0", projections, None, False) + _get_component_count(ref, "layer_0", arrays, None, False) def test_get_component_count_belief_states(self): """Test getting component count from belief states.""" @@ -198,12 +198,12 @@ def test_get_component_count_unsupported_source(self): class TestFieldExpansion: """Test field mapping expansion.""" - def test_wildcard_expansion_projections(self): + def test_wildcard_expansion_arrays(self): """Test detection of wildcard expansion patterns.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") - projections = {"pca/layer_0": np.random.randn(50, 3)} + ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="*") + arrays = {"pca/layer_0": np.random.randn(50, 3)} - expanded = _expand_field_mapping("pc_*", ref, "layer_0", projections, {}, None, False) + expanded = _expand_field_mapping("pc_*", ref, "layer_0", arrays, {}, None, False) assert len(expanded) == 3 assert "pc_0" in expanded @@ -213,7 +213,7 @@ def test_wildcard_expansion_projections(self): assert expanded["pc_1"].component == 1 assert expanded["pc_2"].component == 2 assert all(r.key == "pca" for r in expanded.values()) - assert all(r.source == "projections" for r in expanded.values()) + assert all(r.source == "arrays" for r in expanded.values()) def test_wildcard_expansion_belief_states(self): """Test detection of wildcard expansion patterns.""" @@ -231,10 +231,10 @@ def test_wildcard_expansion_belief_states(self): def test_range_expansion(self): """Test detection of range expansion patterns.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="0...5") - projections = {"pca/layer_0": np.random.randn(50, 10)} + ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="0...5") + arrays = {"pca/layer_0": np.random.randn(50, 10)} - expanded = _expand_field_mapping("pc_0...5", ref, "layer_0", projections, {}, None, False) + expanded = _expand_field_mapping("pc_0...5", ref, "layer_0", arrays, {}, None, False) assert len(expanded) == 5 assert "pc_0" in expanded @@ -245,10 +245,10 @@ def test_range_expansion(self): def test_range_expansion_with_offset(self): """Test detection of range expansion patterns with offset.""" - ref = ActivationVisualizationFieldRef(source="projections", key="projected", component="2...5") - projections = {"projected/layer_0": np.random.randn(50, 10)} + ref = ActivationVisualizationFieldRef(source="arrays", key="projected", component="2...5") + arrays = {"projected/layer_0": np.random.randn(50, 10)} - expanded = _expand_field_mapping("prob_2...5", ref, "layer_0", projections, {}, None, False) + expanded = _expand_field_mapping("prob_2...5", ref, "layer_0", arrays, {}, None, False) assert len(expanded) == 3 assert "prob_2" in expanded @@ -260,10 +260,10 @@ def test_range_expansion_with_offset(self): def test_wildcard_in_middle_of_name(self): """Test detection of wildcard expansion patterns.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") - projections = {"pca/layer_0": np.random.randn(50, 3)} + ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="*") + arrays = {"pca/layer_0": np.random.randn(50, 3)} - expanded = _expand_field_mapping("component_*_normalized", ref, "layer_0", projections, {}, None, False) + expanded = _expand_field_mapping("component_*_normalized", ref, "layer_0", arrays, {}, None, False) assert len(expanded) == 3 assert "component_0_normalized" in expanded @@ -272,10 +272,10 @@ def test_wildcard_in_middle_of_name(self): def test_no_expansion_needed(self): """Test that no expansion occurs when component is a specific integer.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca", component=0) - projections = {"pca/layer_0": np.random.randn(50, 5)} + ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component=0) + arrays = {"pca/layer_0": np.random.randn(50, 5)} - expanded = _expand_field_mapping("pc_0", ref, "layer_0", projections, {}, None, False) + expanded = _expand_field_mapping("pc_0", ref, "layer_0", arrays, {}, None, False) assert len(expanded) == 1 assert "pc_0" in expanded @@ -284,9 +284,9 @@ def test_no_expansion_needed(self): def test_no_expansion_none_component(self): """Test that no expansion occurs when component is None.""" ref = ActivationVisualizationFieldRef(source="metadata", key="step") - projections = {} + arrays = {} - expanded = _expand_field_mapping("step", ref, "layer_0", projections, {}, None, False) + expanded = _expand_field_mapping("step", ref, "layer_0", arrays, {}, None, False) assert len(expanded) == 1 assert "step" in expanded @@ -294,35 +294,35 @@ def test_no_expansion_none_component(self): def test_field_pattern_without_component_pattern_raises(self): """Test that a field pattern without a component pattern raises an error.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca", component=0) - projections = {"pca/layer_0": np.random.randn(50, 5)} + ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component=0) + arrays = {"pca/layer_0": np.random.randn(50, 5)} with pytest.raises(ConfigValidationError, match="has pattern but component is not"): - _expand_field_mapping("pc_*", ref, "layer_0", projections, {}, None, False) + _expand_field_mapping("pc_*", ref, "layer_0", arrays, {}, None, False) def test_component_pattern_without_field_pattern_raises(self): """Test that a component pattern without a field pattern raises an error.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") - projections = {"pca/layer_0": np.random.randn(50, 5)} + ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="*") + arrays = {"pca/layer_0": np.random.randn(50, 5)} with pytest.raises(ConfigValidationError, match="requires field name pattern"): - _expand_field_mapping("pc_0", ref, "layer_0", projections, {}, None, False) + _expand_field_mapping("pc_0", ref, "layer_0", arrays, {}, None, False) def test_range_exceeds_available_components(self): """Test that a range exceeding available components raises an error.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="0...20") - projections = {"pca/layer_0": np.random.randn(50, 10)} + ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="0...20") + arrays = {"pca/layer_0": np.random.randn(50, 10)} with pytest.raises(ConfigValidationError, match="exceeds available components"): - _expand_field_mapping("pc_0...20", ref, "layer_0", projections, {}, None, False) + _expand_field_mapping("pc_0...20", ref, "layer_0", arrays, {}, None, False) def test_range_partially_exceeds_available_components(self): """Test that a range partially exceeding available components raises an error.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="5...15") - projections = {"pca/layer_0": np.random.randn(50, 10)} + ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="5...15") + arrays = {"pca/layer_0": np.random.randn(50, 10)} with pytest.raises(ConfigValidationError, match="exceeds available components"): - _expand_field_mapping("pc_5...15", ref, "layer_0", projections, {}, None, False) + _expand_field_mapping("pc_5...15", ref, "layer_0", arrays, {}, None, False) def test_expansion_preserves_reducer(self): """Test that expansion preserves the reducer attribute.""" @@ -334,11 +334,11 @@ def test_expansion_preserves_reducer(self): assert all(r.reducer == "l2_norm" for r in expanded.values()) def test_expansion_with_concat_layers(self): - """Test expansion when projections are concatenated across layers.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") - projections = {"pca": np.random.randn(50, 5)} + """Test expansion when arrays are concatenated across layers.""" + ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="*") + arrays = {"pca": np.random.randn(50, 5)} - expanded = _expand_field_mapping("pc_*", ref, "layer_0", projections, {}, None, True) + expanded = _expand_field_mapping("pc_*", ref, "layer_0", arrays, {}, None, True) assert len(expanded) == 5 assert all(f"pc_{i}" in expanded for i in range(5)) @@ -347,14 +347,14 @@ def test_expansion_with_concat_layers(self): class TestFieldRefValidation: """Test ActivationVisualizationFieldRef validation.""" - def test_valid_wildcard_projections(self): - """Test that wildcard patterns in projections are valid.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="*") + def test_valid_wildcard_arrays(self): + """Test that wildcard patterns in arrays are valid.""" + ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="*") assert ref.component == "*" - def test_valid_range_projections(self): - """Test that range patterns in projections are valid.""" - ref = ActivationVisualizationFieldRef(source="projections", key="pca", component="0...10") + def test_valid_range_arrays(self): + """Test that range patterns in arrays are valid.""" + ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="0...10") assert ref.component == "0...10" def test_valid_wildcard_belief_states(self): @@ -365,26 +365,26 @@ def test_valid_wildcard_belief_states(self): def test_invalid_pattern_format(self): """Test that invalid pattern formats raise a ConfigValidationError.""" with pytest.raises(ConfigValidationError, match="invalid"): - ActivationVisualizationFieldRef(source="projections", key="pca", component="invalid_pattern") + ActivationVisualizationFieldRef(source="arrays", key="pca", component="invalid_pattern") def test_invalid_range_wrong_separator(self): """Test that invalid range separators raise a ConfigValidationError.""" with pytest.raises(ConfigValidationError, match="invalid"): - ActivationVisualizationFieldRef(source="projections", key="pca", component="0..10") + ActivationVisualizationFieldRef(source="arrays", key="pca", component="0..10") def test_pattern_on_unsupported_source_scalars(self): """Test that pattern expansion is not supported for scalars source.""" - with pytest.raises(ConfigValidationError, match="only supported for projections/belief_states"): + with pytest.raises(ConfigValidationError, match="only supported for arrays/belief_states"): ActivationVisualizationFieldRef(source="scalars", key="some_scalar", component="*") def test_pattern_on_unsupported_source_metadata(self): """Test that pattern expansion is not supported for metadata source.""" - with pytest.raises(ConfigValidationError, match="only supported for projections/belief_states"): + with pytest.raises(ConfigValidationError, match="only supported for arrays/belief_states"): ActivationVisualizationFieldRef(source="metadata", key="step", component="*") def test_pattern_on_unsupported_source_weights(self): """Test that pattern expansion is not supported for weights source.""" - with pytest.raises(ConfigValidationError, match="only supported for projections/belief_states"): + with pytest.raises(ConfigValidationError, match="only supported for arrays/belief_states"): ActivationVisualizationFieldRef(source="weights", component="*") @@ -524,69 +524,69 @@ def test_has_key_pattern_multiple_raises(self): with pytest.raises(ConfigValidationError, match="multiple patterns"): _has_key_pattern("projected/L*/F*") - def test_expand_projection_key_pattern_wildcard(self): - """Test that _expand_projection_key_pattern expands wildcard patterns correctly.""" - projections = { + def test_expand_array_key_pattern_wildcard(self): + """Test that _expand_array_key_pattern expands wildcard patterns correctly.""" + arrays = { "projected/layer_0-F0": np.random.randn(10, 3), "projected/layer_0-F1": np.random.randn(10, 3), "projected/layer_0-F2": np.random.randn(10, 3), } - result = _expand_projection_key_pattern("projected/F*", "layer_0", projections, False) + result = _expand_array_key_pattern("projected/F*", "layer_0", arrays, False) assert len(result) == 3 assert result["0"] == "projected/F0" assert result["1"] == "projected/F1" assert result["2"] == "projected/F2" - def test_expand_projection_key_pattern_range(self): - """Test that _expand_projection_key_pattern expands range patterns correctly.""" - projections = { + def test_expand_array_key_pattern_range(self): + """Test that _expand_array_key_pattern expands range patterns correctly.""" + arrays = { "projected/layer_0-F0": np.random.randn(10, 3), "projected/layer_0-F1": np.random.randn(10, 3), "projected/layer_0-F2": np.random.randn(10, 3), } - result = _expand_projection_key_pattern("projected/F0...2", "layer_0", projections, False) + result = _expand_array_key_pattern("projected/F0...2", "layer_0", arrays, False) assert len(result) == 2 assert result["0"] == "projected/F0" assert result["1"] == "projected/F1" - def test_expand_projection_key_pattern_concat_layers(self): - """Test that _expand_projection_key_pattern works with concatenated layers.""" - projections = { + def test_expand_array_key_pattern_concat_layers(self): + """Test that _expand_array_key_pattern works with concatenated layers.""" + arrays = { "projected/F0": np.random.randn(10, 3), "projected/F1": np.random.randn(10, 3), } - result = _expand_projection_key_pattern("projected/F*", "any_layer", projections, True) + result = _expand_array_key_pattern("projected/F*", "any_layer", arrays, True) assert len(result) == 2 assert result["0"] == "projected/F0" assert result["1"] == "projected/F1" - def test_expand_projection_key_pattern_no_matches_raises(self): - """Test that _expand_projection_key_pattern raises an error when no keys match.""" - projections = {"pca/layer_0": np.random.randn(10, 3)} + def test_expand_array_key_pattern_no_matches_raises(self): + """Test that _expand_array_key_pattern raises an error when no keys match.""" + arrays = {"pca/layer_0": np.random.randn(10, 3)} - with pytest.raises(ConfigValidationError, match="No projection keys found"): - _expand_projection_key_pattern("projected/F*", "layer_0", projections, False) + with pytest.raises(ConfigValidationError, match="No array keys found"): + _expand_array_key_pattern("projected/F*", "layer_0", arrays, False) def test_field_mapping_with_key_pattern(self): """Test that field mappings with key patterns are expanded correctly.""" ref = ActivationVisualizationFieldRef( - source="projections", + source="arrays", key="projected/F*", component=0, group_as="factor", ) - projections = { + arrays = { "projected/layer_0-F0": np.random.randn(10, 3), "projected/layer_0-F1": np.random.randn(10, 3), } - expanded = _expand_field_mapping("factor_*_prob", ref, "layer_0", projections, {}, None, False) + expanded = _expand_field_mapping("factor_*_prob", ref, "layer_0", arrays, {}, None, False) assert len(expanded) == 2 assert "factor_0_prob" in expanded @@ -600,17 +600,17 @@ def test_field_mapping_with_key_pattern(self): def test_field_mapping_with_key_and_component_patterns(self): """Test that field mappings with key and component patterns are expanded correctly.""" ref = ActivationVisualizationFieldRef( - source="projections", + source="arrays", key="projected/F*", component="*", group_as="factor", ) - projections = { + arrays = { "projected/layer_0-F0": np.random.randn(10, 3), "projected/layer_0-F1": np.random.randn(10, 3), } - expanded = _expand_field_mapping("factor_*_prob_*", ref, "layer_0", projections, {}, None, False) + expanded = _expand_field_mapping("factor_*_prob_*", ref, "layer_0", arrays, {}, None, False) # Cross-product: 2 factors * 3 components = 6 expanded fields assert len(expanded) == 6 @@ -635,15 +635,15 @@ def test_field_mapping_with_key_and_component_patterns(self): def test_key_pattern_without_field_pattern_raises(self): """Test that a key pattern without a field pattern raises an error.""" ref = ActivationVisualizationFieldRef( - source="projections", + source="arrays", key="projected/F*", component=0, group_as="factor", ) - projections = {"projected/layer_0-F0": np.random.randn(10, 3)} + arrays = {"projected/layer_0-F0": np.random.randn(10, 3)} with pytest.raises(ConfigValidationError, match="requires field name pattern"): - _expand_field_mapping("prob_0", ref, "layer_0", projections, {}, None, False) + _expand_field_mapping("prob_0", ref, "layer_0", arrays, {}, None, False) class TestGroupAsValidation: @@ -653,14 +653,14 @@ def test_key_pattern_requires_group_as(self): """Test that a key pattern requires the group_as parameter.""" with pytest.raises(ConfigValidationError, match="requires `group_as`"): ActivationVisualizationFieldRef( - source="projections", + source="arrays", key="projected/F*", component=0, ) - def test_group_as_only_for_projections(self): - """Test that group_as is only valid for projections source.""" - with pytest.raises(ConfigValidationError, match="only supported for projections"): + def test_group_as_only_for_arrays(self): + """Test that group_as is only valid for arrays source.""" + with pytest.raises(ConfigValidationError, match="only supported for arrays"): ActivationVisualizationFieldRef( source="scalars", key="some_key", @@ -670,7 +670,7 @@ def test_group_as_only_for_projections(self): def test_valid_key_pattern_with_group_as(self): """Test that a valid key pattern with group_as is accepted.""" ref = ActivationVisualizationFieldRef( - source="projections", + source="arrays", key="projected/F*", component=0, group_as="factor", @@ -681,7 +681,7 @@ def test_valid_key_pattern_with_group_as(self): def test_valid_key_pattern_with_list_group_as(self): """Test that a valid key pattern with list group_as is accepted.""" ref = ActivationVisualizationFieldRef( - source="projections", + source="arrays", key="projected/F*", component=0, group_as=["factor", "layer"], @@ -719,7 +719,7 @@ def test_valid_combined_section(self): section = CombinedMappingSection( label="prediction", mappings={ - "prob_0": ActivationVisualizationFieldRef(source="projections", key="proj", component=0), + "prob_0": ActivationVisualizationFieldRef(source="arrays", key="proj", component=0), }, ) assert section.label == "prediction" @@ -740,7 +740,7 @@ def test_valid_combined_mapping(self): combined=[ CombinedMappingSection( label="prediction", - mappings={"prob_0": ActivationVisualizationFieldRef(source="projections", key="proj", component=0)}, + mappings={"prob_0": ActivationVisualizationFieldRef(source="arrays", key="proj", component=0)}, ), CombinedMappingSection( label="ground_truth", @@ -760,9 +760,7 @@ def test_combined_without_combine_as_raises(self): combined=[ CombinedMappingSection( label="prediction", - mappings={ - "prob_0": ActivationVisualizationFieldRef(source="projections", key="proj", component=0) - }, + mappings={"prob_0": ActivationVisualizationFieldRef(source="arrays", key="proj", component=0)}, ), ], ) @@ -771,13 +769,11 @@ def test_combined_with_mappings_raises(self): """Test that a DataMapping with both 'mappings' and 'combined' raises ConfigValidationError.""" with pytest.raises(ConfigValidationError, match="Cannot use both"): ActivationVisualizationDataMapping( - mappings={"prob_0": ActivationVisualizationFieldRef(source="projections", key="proj", component=0)}, + mappings={"prob_0": ActivationVisualizationFieldRef(source="arrays", key="proj", component=0)}, combined=[ CombinedMappingSection( label="prediction", - mappings={ - "prob_1": ActivationVisualizationFieldRef(source="projections", key="proj", component=1) - }, + mappings={"prob_1": ActivationVisualizationFieldRef(source="arrays", key="proj", component=1)}, ), ], combine_as="data_type", @@ -791,7 +787,7 @@ def test_factor_field_only_for_belief_states(self): """Test that factor field is only supported for belief_states source.""" with pytest.raises(ConfigValidationError, match="only supported for belief_states"): ActivationVisualizationFieldRef( - source="projections", + source="arrays", key="proj", factor=0, ) diff --git a/tests/activations/test_scalar_history.py b/tests/activations/test_scalar_history.py index f95c6615..f5aa1e9a 100644 --- a/tests/activations/test_scalar_history.py +++ b/tests/activations/test_scalar_history.py @@ -275,7 +275,7 @@ def test_scalar_history_dataframe_requires_step(self): _build_dataframe( viz_cfg, self._metadata(), - projections={}, + arrays={}, scalars={"analysis/layer_0_rmse": 0.1}, scalar_history={}, scalar_history_step=None, @@ -290,7 +290,7 @@ def test_scalar_history_dataframe_uses_current_step(self): df = _build_dataframe( viz_cfg, self._metadata(), - projections={}, + arrays={}, scalars={"analysis/layer_0_rmse": 0.42}, scalar_history={}, scalar_history_step=7, @@ -326,7 +326,7 @@ def test_scalar_history_pattern_matches_complex_keys(self): df = _build_dataframe( viz_cfg, self._metadata(), - projections={}, + arrays={}, scalars=scalars, scalar_history={}, scalar_history_step=11, @@ -366,7 +366,7 @@ def test_scalar_history_pattern_matches_without_prefix(self): df = _build_dataframe( viz_cfg, self._metadata(), - projections={}, + arrays={}, scalars=scalars, scalar_history={}, scalar_history_step=3, @@ -395,7 +395,7 @@ def test_scalar_history_pattern_requires_match(self): _build_dataframe( viz_cfg, self._metadata(), - projections={}, + arrays={}, scalars={"analysis/other_metric": 0.1}, scalar_history={}, scalar_history_step=0, diff --git a/tests/activations/test_visualization_modules.py b/tests/activations/test_visualization_modules.py index 74c67a7e..3ff0a074 100644 --- a/tests/activations/test_visualization_modules.py +++ b/tests/activations/test_visualization_modules.py @@ -19,17 +19,17 @@ _scalar_series_metadata, ) from simplexity.activations.visualization.field_resolution import ( - _lookup_projection_array, + _lookup_array, _lookup_scalar_value, _maybe_component, _resolve_belief_states, _resolve_field, ) from simplexity.activations.visualization.pattern_expansion import ( + _expand_array_key_pattern, _expand_belief_factor_mapping, _expand_field_mapping, _expand_pattern_to_indices, - _expand_projection_key_pattern, _expand_scalar_pattern_ranges, _get_component_count, _parse_component_spec, @@ -56,22 +56,22 @@ class TestFieldResolution: """Tests for field_resolution.py functions.""" - def test_lookup_projection_array_not_found(self): - """Test that missing projection raises error.""" - projections = {"other/layer_0": np.array([1, 2, 3])} + def test_lookup_array_not_found(self): + """Test that missing array raises error.""" + arrays = {"other/layer_0": np.array([1, 2, 3])} with pytest.raises(ConfigValidationError, match="not available for layer"): - _lookup_projection_array(projections, "layer_0", "missing", False) + _lookup_array(arrays, "layer_0", "missing", False) - def test_lookup_projection_array_concat_layers_exact_match(self): + def test_lookup_array_concat_layers_exact_match(self): """Test exact key match with concat_layers.""" - projections = {"my_key": np.array([1, 2, 3])} - result = _lookup_projection_array(projections, "layer_0", "my_key", True) + arrays = {"my_key": np.array([1, 2, 3])} + result = _lookup_array(arrays, "layer_0", "my_key", True) np.testing.assert_array_equal(result, [1, 2, 3]) - def test_lookup_projection_array_concat_layers_prefix_match(self): + def test_lookup_array_concat_layers_prefix_match(self): """Test prefix match with concat_layers.""" - projections = {"my_key/Lcat": np.array([4, 5, 6])} - result = _lookup_projection_array(projections, "layer_0", "my_key", True) + arrays = {"my_key/Lcat": np.array([4, 5, 6])} + result = _lookup_array(arrays, "layer_0", "my_key", True) np.testing.assert_array_equal(result, [4, 5, 6]) def test_lookup_scalar_value_concat_layers_exact(self): @@ -228,17 +228,17 @@ def test_expand_pattern_to_indices_non_numeric_ignored(self): def test_get_component_count_projection_success(self): """Test getting component count from 2D projection.""" - ref = ActivationVisualizationFieldRef(source="projections", key="proj", component="*") - projections = {"proj/layer_0": np.ones((10, 5))} - result = _get_component_count(ref, "layer_0", projections, None, False) + ref = ActivationVisualizationFieldRef(source="arrays", key="proj", component="*") + arrays = {"proj/layer_0": np.ones((10, 5))} + result = _get_component_count(ref, "layer_0", arrays, None, False) assert result == 5 def test_get_component_count_1d_projection(self): """Test that 1D projection raises error for expansion.""" - ref = ActivationVisualizationFieldRef(source="projections", key="proj") - projections = {"proj/layer_0": np.array([1, 2, 3])} + ref = ActivationVisualizationFieldRef(source="arrays", key="proj") + arrays = {"proj/layer_0": np.array([1, 2, 3])} with pytest.raises(ConfigValidationError, match="Cannot expand 1D"): - _get_component_count(ref, "layer_0", projections, None, False) + _get_component_count(ref, "layer_0", arrays, None, False) def test_get_component_count_belief_states_missing(self): """Test that missing belief states raises error.""" @@ -258,21 +258,21 @@ def test_get_component_count_unsupported_source(self): with pytest.raises(ConfigValidationError, match="not supported"): _get_component_count(ref, "layer_0", {}, None, False) - def test_expand_projection_key_pattern_invalid(self): + def test_expand_array_key_pattern_invalid(self): """Test that invalid key pattern raises error.""" with pytest.raises(ConfigValidationError, match="Invalid key pattern"): - _expand_projection_key_pattern("plain_key", "layer_0", {}, False) + _expand_array_key_pattern("plain_key", "layer_0", {}, False) - def test_expand_projection_key_pattern_invalid_range(self): + def test_expand_array_key_pattern_invalid_range(self): """Test that invalid range in key pattern raises error.""" with pytest.raises(ConfigValidationError, match="Invalid range"): - _expand_projection_key_pattern("key_5...3", "layer_0", {}, False) + _expand_array_key_pattern("key_5...3", "layer_0", {}, False) - def test_expand_projection_key_pattern_no_matches(self): - """Test that no matching projections raises error.""" - projections = {"other/layer_0": np.ones((3, 4))} - with pytest.raises(ConfigValidationError, match="No projection keys found"): - _expand_projection_key_pattern("key_*", "layer_0", projections, False) + def test_expand_array_key_pattern_no_matches(self): + """Test that no matching arrays raises error.""" + arrays = {"other/layer_0": np.ones((3, 4))} + with pytest.raises(ConfigValidationError, match="No array keys found"): + _expand_array_key_pattern("key_*", "layer_0", arrays, False) def test_expand_belief_factor_mapping_wrong_dim(self): """Test that non-3D beliefs for factor expansion raises error.""" @@ -309,13 +309,13 @@ def test_expand_scalar_pattern_ranges_invalid(self): def test_expand_field_mapping_projection_no_field_pattern(self): """Test projection key pattern without field pattern raises error.""" - ref = ActivationVisualizationFieldRef(source="projections", key="factor_*", group_as="factor") + ref = ActivationVisualizationFieldRef(source="arrays", key="factor_*", group_as="factor") with pytest.raises(ConfigValidationError, match="requires field name pattern"): _expand_field_mapping("plain_field", ref, "layer_0", {}, {}, None, False) def test_expand_field_mapping_projection_too_many_patterns(self): """Test projection with too many field patterns raises error.""" - ref = ActivationVisualizationFieldRef(source="projections", key="factor_*", group_as="factor") + ref = ActivationVisualizationFieldRef(source="arrays", key="factor_*", group_as="factor") with pytest.raises(ConfigValidationError, match="too many patterns"): _expand_field_mapping("f_*_g_*_h_*", ref, "layer_0", {}, {}, None, False) @@ -695,7 +695,7 @@ def test_build_scalar_dataframe_no_matches(self): def test_build_scalar_dataframe_non_scalar_source_skipped(self): """Test that non-scalar sources are skipped.""" mappings = { - "proj": ActivationVisualizationFieldRef(source="projections", key="my_proj"), + "proj": ActivationVisualizationFieldRef(source="arrays", key="my_proj"), "rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="rmse/layer_*"), } scalars = {"analysis/rmse/layer_0": 0.1} @@ -750,11 +750,11 @@ def test_build_metadata_columns(self): assert list(result["weight"]) == [1.0, 0.5] def test_build_dataframe_for_mappings_simple(self): - """Test _build_dataframe_for_mappings with simple projection mapping.""" - mappings = {"x": ActivationVisualizationFieldRef(source="projections", key="pca", component=0)} + """Test _build_dataframe_for_mappings with simple array mapping.""" + mappings = {"x": ActivationVisualizationFieldRef(source="arrays", key="pca", component=0)} metadata = {"step": np.array([1, 2]), "analysis": np.array(["test", "test"])} - projections = {"pca/layer_0": np.array([[0.1, 0.2], [0.3, 0.4]])} - result = _build_dataframe_for_mappings(mappings, metadata, projections, {}, None, False, ["layer_0"]) + arrays = {"pca/layer_0": np.array([[0.1, 0.2], [0.3, 0.4]])} + result = _build_dataframe_for_mappings(mappings, metadata, arrays, {}, None, False, ["layer_0"]) assert "x" in result.columns assert "layer" in result.columns assert len(result) == 2 @@ -791,9 +791,9 @@ def test_build_dataframe_for_mappings_with_groups(self): def test_build_dataframe_for_mappings_error_wrapping(self): """Test that errors from _expand_field_mapping are wrapped with context.""" - # Create a mapping with a key pattern that will fail expansion due to no matching projections - # The key "factor_*" is a pattern that needs expansion, which fails when no projections match - mappings = {"x_*": ActivationVisualizationFieldRef(source="projections", key="factor_*", group_as="factor")} + # Create a mapping with a key pattern that will fail expansion due to no matching arrays + # The key "factor_*" is a pattern that needs expansion, which fails when no arrays match + mappings = {"x_*": ActivationVisualizationFieldRef(source="arrays", key="factor_*", group_as="factor")} metadata = {"step": np.array([1])} with pytest.raises(ConfigValidationError, match="Error expanding 'x_\\*' for layer"): _build_dataframe_for_mappings(mappings, metadata, {}, {}, None, False, ["layer_0"]) @@ -828,21 +828,21 @@ def test_build_dataframe_combined_mappings(self): combined = [ CombinedMappingSection( label="projected", - mappings={"x": ActivationVisualizationFieldRef(source="projections", key="pca", component=0)}, + mappings={"x": ActivationVisualizationFieldRef(source="arrays", key="pca", component=0)}, ), CombinedMappingSection( label="raw", - mappings={"x": ActivationVisualizationFieldRef(source="projections", key="raw", component=0)}, + mappings={"x": ActivationVisualizationFieldRef(source="arrays", key="raw", component=0)}, ), ] data_mapping = ActivationVisualizationDataMapping(mappings={}, combined=combined, combine_as="source") viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) metadata = {"step": np.array([1])} - projections = { + arrays = { "pca/layer_0": np.array([[0.1, 0.2]]), "raw/layer_0": np.array([[0.5, 0.6]]), } - result = _build_dataframe(viz_cfg, metadata, projections, {}, {}, None, None, False, ["layer_0"]) + result = _build_dataframe(viz_cfg, metadata, arrays, {}, {}, None, None, False, ["layer_0"]) assert "source" in result.columns assert set(result["source"]) == {"projected", "raw"} assert len(result) == 2 diff --git a/tests/analysis/test_layerwise_analysis.py b/tests/analysis/test_layerwise_analysis.py index 13831d69..eb1881f4 100644 --- a/tests/analysis/test_layerwise_analysis.py +++ b/tests/analysis/test_layerwise_analysis.py @@ -27,19 +27,19 @@ def analysis_inputs() -> tuple[dict[str, jnp.ndarray], jnp.ndarray, jnp.ndarray] def test_layerwise_analysis_linear_regression_namespacing(analysis_inputs) -> None: - """Metrics and projections should be namespace-qualified per layer.""" + """Metrics and arrays should be namespace-qualified per layer.""" activations, weights, belief_states = analysis_inputs analysis = LayerwiseAnalysis("linear_regression", last_token_only=True) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=activations, weights=weights, belief_states=belief_states, ) assert set(scalars) >= {"r2/layer_a", "r2/layer_b"} - assert set(projections) == { + assert set(arrays) == { "projected/layer_a", "projected/layer_b", "coeffs/layer_a", @@ -84,14 +84,15 @@ def test_pca_analysis_does_not_require_beliefs(analysis_inputs) -> None: "pca", analysis_kwargs={"n_components": 2, "variance_thresholds": (0.5,)}, ) - scalars, projections = analysis.analyze( + scalars, arrays = analysis.analyze( activations=activations, weights=weights, belief_states=None, ) - assert "cumvar_1/layer_a" in scalars + assert "var_exp/layer_a" in scalars assert "nc_50/layer_a" in scalars - assert "pca/layer_a" in projections + assert "pca/layer_a" in arrays + assert "cev/layer_a" in arrays def test_invalid_pca_kwargs() -> None: diff --git a/tests/analysis/test_pca.py b/tests/analysis/test_pca.py index b4fdebb0..f7e16f25 100644 --- a/tests/analysis/test_pca.py +++ b/tests/analysis/test_pca.py @@ -28,20 +28,22 @@ def test_variance_threshold_counts_increasing() -> None: def test_layer_pca_analysis_metrics() -> None: - """Layer PCA wrapper returns metrics and projections without beliefs.""" + """Layer PCA wrapper returns metrics and arrays without beliefs.""" activations = jnp.array([[1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]) weights = jnp.ones(3) / 3.0 - scalars, projections = layer_pca_analysis( + scalars, arrays = layer_pca_analysis( activations, weights, belief_states=None, n_components=2, variance_thresholds=(0.5,), ) - assert "cumvar_1" in scalars assert "nc_50" in scalars - assert "pca" in projections - assert projections["pca"].shape == (3, 2) + assert "var_exp" in scalars + assert "pca" in arrays + assert arrays["pca"].shape == (3, 2) + assert "cev" in arrays + assert arrays["cev"].shape == (2,) def test_compute_weighted_pca_rejects_bad_weights_shape() -> None: @@ -94,14 +96,14 @@ def test_layer_pca_analysis_zero_variance_threshold_reporting() -> None: """Layer PCA should propagate fallback counts into the scalar outputs.""" activations = jnp.ones((4, 3)) weights = jnp.ones(4) / 4.0 - scalars, projections = layer_pca_analysis( + scalars, arrays = layer_pca_analysis( activations, weights, belief_states=None, variance_thresholds=(0.5,), ) assert scalars["nc_50"] == 3.0 - assert projections["pca"].shape == (4, 3) + assert arrays["pca"].shape == (4, 3) def test_compute_weighted_pca_requires_two_dimensional_inputs() -> None: diff --git a/tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml b/tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml index 8a9156c5..1529c9ff 100644 --- a/tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml +++ b/tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml @@ -1,4 +1,4 @@ -name: tracker_with_factor_projections +name: tracker_with_factor_arrays instance: _target_: simplexity.activations.activation_tracker.ActivationTracker analyses: @@ -28,17 +28,17 @@ instance: - label: prediction mappings: factor_*_prob_0: - source: projections + source: arrays key: "factor_*/projected" component: 0 group_as: factor factor_*_prob_1: - source: projections + source: arrays key: "factor_*/projected" component: 1 group_as: factor factor_*_prob_2: - source: projections + source: arrays key: "factor_*/projected" component: 2 group_as: factor @@ -97,17 +97,17 @@ instance: - label: prediction mappings: factor_*_prob_0: - source: projections + source: arrays key: "factor_*/projected" component: 0 group_as: factor factor_*_prob_1: - source: projections + source: arrays key: "factor_*/projected" component: 1 group_as: factor factor_*_prob_2: - source: projections + source: arrays key: "factor_*/projected" component: 2 group_as: factor diff --git a/tests/end_to_end/configs/activation_tracker/with_visuals.yaml b/tests/end_to_end/configs/activation_tracker/with_visuals.yaml index aa6c7a38..aece8498 100644 --- a/tests/end_to_end/configs/activation_tracker/with_visuals.yaml +++ b/tests/end_to_end/configs/activation_tracker/with_visuals.yaml @@ -1,4 +1,4 @@ -name: tracker_with_projections +name: tracker_with_arrays instance: _target_: simplexity.activations.activation_tracker.ActivationTracker analyses: @@ -18,7 +18,7 @@ instance: cumulative: false data_mapping: mappings: - pc_*: {source: projections, key: pca, component: "*"} + pc_*: {source: arrays, key: pca, component: "*"} belief_*: {source: belief_states, component: "*"} preprocessing: - type: combine_rgb @@ -41,29 +41,6 @@ instance: guides: title: "PCA Projection (3D)" subtitle: "All tokens, weighted by prefix probability" - - name: cumulative_explained_variance # <- NEW VISUALIZATION - controls: - dropdown: layer - accumulate_steps: true - cumulative: false - data_mapping: - scalar_series: - key_template: "cumvar_{index}/{layer}" - index_field: n_components - value_field: cumulative_explained_variance - backend: altair - layer: - geometry: - type: line - props: {} - aesthetics: - x: {field: n_components, type: quantitative, title: "Number of PCA Components"} - y: {field: cumulative_explained_variance, type: quantitative, title: "Cumulative Explained Variance"} - color: {field: step, type: nominal, title: Step} - size: {width: 600, height: 400} - guides: - title: "Cumulative Explained Variance by PCA Components" - subtitle: "All tokens, weighted by probability" regression: # <- THIS RETURNS SCALARS AND PROJECTIONS instance: @@ -80,9 +57,9 @@ instance: cumulative: false data_mapping: mappings: - prob_0: {source: projections, key: projected, component: 0} - prob_1: {source: projections, key: projected, component: 1} - prob_2: {source: projections, key: projected, component: 2} + prob_0: {source: arrays, key: projected, component: 0} + prob_1: {source: arrays, key: projected, component: 1} + prob_2: {source: arrays, key: projected, component: 2} belief_r: {source: belief_states, component: 0} belief_g: {source: belief_states, component: 1} belief_b: {source: belief_states, component: 2} @@ -113,9 +90,9 @@ instance: cumulative: false data_mapping: mappings: - prob_0: {source: projections, key: projected, component: 0} - prob_1: {source: projections, key: projected, component: 1} - prob_2: {source: projections, key: projected, component: 2} + prob_0: {source: arrays, key: projected, component: 0} + prob_1: {source: arrays, key: projected, component: 1} + prob_2: {source: arrays, key: projected, component: 2} preprocessing: - type: combine_rgb input_fields: [prob_0, prob_1, prob_2] @@ -203,9 +180,9 @@ instance: cumulative: false data_mapping: mappings: - prob_0: {source: projections, key: projected, component: 0} - prob_1: {source: projections, key: projected, component: 1} - prob_2: {source: projections, key: projected, component: 2} + prob_0: {source: arrays, key: projected, component: 0} + prob_1: {source: arrays, key: projected, component: 1} + prob_2: {source: arrays, key: projected, component: 2} preprocessing: - type: combine_rgb input_fields: [prob_0, prob_1, prob_2] diff --git a/tests/structured_configs/test_activation_tracker_config.py b/tests/structured_configs/test_activation_tracker_config.py index fa9d4607..e31b15f8 100644 --- a/tests/structured_configs/test_activation_tracker_config.py +++ b/tests/structured_configs/test_activation_tracker_config.py @@ -250,14 +250,14 @@ def test_instantiate_activation_tracker_builds_analysis_objects(tracker_cfg: Dic probs = jnp.ones((1, 2), dtype=jnp.float32) * 0.5 activations = {"layer": jnp.ones((1, 2, 4), dtype=jnp.float32)} - scalars, projections, visualizations = tracker.analyze( + scalars, arrays, visualizations = tracker.analyze( inputs=inputs, beliefs=beliefs, probs=probs, activations=activations, ) - assert "pca_custom/cumvar_1/layer" in scalars - assert any(key.startswith("linear/") for key in projections) + assert "pca_custom/var_exp/layer" in scalars + assert any(key.startswith("linear/") for key in arrays) assert visualizations == {} From 4050f50b8f85c15925445bedb1342483615e632f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Casper=20L=C3=BCtzh=C3=B8ft=20Christensen?= <61698286+casperlchristensen@users.noreply.github.com> Date: Wed, 7 Jan 2026 15:36:19 -0800 Subject: [PATCH 15/35] return targets (#163) --- simplexity/analysis/linear_regression.py | 11 ++++++++--- tests/analysis/test_layerwise_analysis.py | 2 ++ 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/simplexity/analysis/linear_regression.py b/simplexity/analysis/linear_regression.py index 421c43fb..d6b3f7e7 100644 --- a/simplexity/analysis/linear_regression.py +++ b/simplexity/analysis/linear_regression.py @@ -84,12 +84,14 @@ def linear_regression( if fit_intercept: arrays = { "projected": predictions, + "targets": y_arr, "coeffs": beta[1:], # Linear coefficients (excluding intercept) "intercept": beta[:1], # Intercept term (keep 2D: [1, n_targets]) } else: arrays = { "projected": predictions, + "targets": y_arr, "coeffs": beta, # All parameters are coefficients when no intercept } @@ -187,12 +189,14 @@ def linear_regression_svd( if fit_intercept: arrays = { "projected": best_pred, + "targets": y_arr, "coeffs": best_beta[1:], # Linear coefficients (excluding intercept) "intercept": best_beta[:1], # Intercept term (keep 2D: [1, n_targets]) } else: arrays = { "projected": best_pred, + "targets": y_arr, "coeffs": best_beta, # All parameters are coefficients when no intercept } @@ -255,6 +259,7 @@ def _split_concat_results( coeffs_list = jnp.split(concat_arrays["coeffs"], split_indices, axis=-1) projections_list = jnp.split(concat_arrays["projected"], split_indices, axis=-1) + targets_list = jnp.split(concat_arrays["targets"], split_indices, axis=-1) # Handle intercept - split if present if "intercept" in concat_arrays: @@ -267,8 +272,8 @@ def _split_concat_results( metrics_kwargs = {k: v for k, v in kwargs.items() if k != "rcond_values"} results = [] - for factor, coeffs, intercept, projections in zip( - belief_states, coeffs_list, intercepts_list, projections_list, strict=True + for factor, coeffs, intercept, projections, targets in zip( + belief_states, coeffs_list, intercepts_list, projections_list, targets_list, strict=True ): # Reconstruct full beta for metrics computation if intercept is not None: @@ -286,7 +291,7 @@ def _split_concat_results( ) # Build factor arrays - include intercept only if present - factor_arrays = {"projected": projections, "coeffs": coeffs} + factor_arrays = {"projected": projections, "targets": targets, "coeffs": coeffs} if intercept is not None: factor_arrays["intercept"] = intercept diff --git a/tests/analysis/test_layerwise_analysis.py b/tests/analysis/test_layerwise_analysis.py index eb1881f4..1b837b46 100644 --- a/tests/analysis/test_layerwise_analysis.py +++ b/tests/analysis/test_layerwise_analysis.py @@ -42,6 +42,8 @@ def test_layerwise_analysis_linear_regression_namespacing(analysis_inputs) -> No assert set(arrays) == { "projected/layer_a", "projected/layer_b", + "targets/layer_a", + "targets/layer_b", "coeffs/layer_a", "coeffs/layer_b", "intercept/layer_a", From 1c06eaa483a5027e9ffe4e6bf10002bed03bd147 Mon Sep 17 00:00:00 2001 From: Loren AC Date: Tue, 13 Jan 2026 16:50:09 -0500 Subject: [PATCH 16/35] Extend format_layer_spec to handle all TransformerLens layer patterns (#167) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add support for: - Top-level hooks (hook_embed → embed) - Block component hooks (blocks.N.{comp}.hook_X → LN.{comp}.X) - ln_final hooks (ln_final.hook_X → ln_final.X) Co-authored-by: Claude Opus 4.5 --- simplexity/analysis/metric_keys.py | 42 ++++++++++++++++++++-------- tests/analysis/test_metric_keys.py | 44 ++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+), 12 deletions(-) diff --git a/simplexity/analysis/metric_keys.py b/simplexity/analysis/metric_keys.py index f66e6b41..f7b3cc26 100644 --- a/simplexity/analysis/metric_keys.py +++ b/simplexity/analysis/metric_keys.py @@ -23,9 +23,12 @@ def format_layer_spec(layer_name: str) -> str: """Format layer name into compact layer specification. Converts verbose layer names to compact specs: - - Block layers: "blocks.N.hook_X_Y" → "LN.X.Y" - - Special layers: "embed", "pos_embed", "ln_final" → unchanged - Concatenated: "concatenated" → "Lcat" + - Top-level hooks: "hook_embed" → "embed", "hook_pos_embed" → "pos_embed" + - Block direct hooks: "blocks.N.hook_X_Y" → "LN.X.Y" + - Block component hooks: "blocks.N.{comp}.hook_X" → "LN.{comp}.X" + - ln_final hooks: "ln_final.hook_X" → "ln_final.X" + - Other layers: unchanged Args: layer_name: Original layer name from activations dict @@ -36,27 +39,42 @@ def format_layer_spec(layer_name: str) -> str: Examples: >>> format_layer_spec("blocks.2.hook_resid_post") "L2.resid.post" - >>> format_layer_spec("blocks.0.hook_resid_pre") - "L0.resid.pre" - >>> format_layer_spec("blocks.10.hook_mlp_out") - "L10.mlp.out" - >>> format_layer_spec("embed") + >>> format_layer_spec("blocks.0.attn.hook_q") + "L0.attn.q" + >>> format_layer_spec("hook_embed") "embed" + >>> format_layer_spec("ln_final.hook_scale") + "ln_final.scale" >>> format_layer_spec("concatenated") "Lcat" """ if layer_name == "concatenated": return "Lcat" + if layer_name.startswith("hook_"): + return layer_name[5:] + + ln_final_pattern = r"^ln_final\.hook_(?P.+)$" + match = re.match(ln_final_pattern, layer_name) + if match: + return f"ln_final.{match.group('hook_name')}" + if not layer_name.startswith("blocks."): return layer_name - block_pattern = r"^blocks\.(?P\d+)\.hook_(?P.+)$" - match = re.match(block_pattern, layer_name) + direct_hook_pattern = r"^blocks\.(?P\d+)\.hook_(?P.+)$" + match = re.match(direct_hook_pattern, layer_name) + if match: + block_num = match.group("block_num") + hook_name = match.group("hook_name").replace("_", ".") + return f"L{block_num}.{hook_name}" + + component_hook_pattern = r"^blocks\.(?P\d+)\.(?P\w+)\.hook_(?P.+)$" + match = re.match(component_hook_pattern, layer_name) if match: block_num = match.group("block_num") - hook_name = match.group("hook_name") - simplified_hook_name = hook_name.replace("_", ".") - return f"L{block_num}.{simplified_hook_name}" + component = match.group("component") + hook_name = match.group("hook_name").replace("_", ".") + return f"L{block_num}.{component}.{hook_name}" return layer_name diff --git a/tests/analysis/test_metric_keys.py b/tests/analysis/test_metric_keys.py index 7c6cc09c..1b8c07bb 100644 --- a/tests/analysis/test_metric_keys.py +++ b/tests/analysis/test_metric_keys.py @@ -59,3 +59,47 @@ def test_format_layer_spec_block_and_hook_layer_with_extra_structure() -> None: layer_name = "blocks.2.hook_resid_post.invalid" expected_key = "L2.resid.post.invalid" assert format_layer_spec(layer_name) == expected_key + + +def test_format_layer_spec_hook_embed() -> None: + """Test that hook_embed is formatted to embed.""" + assert format_layer_spec("hook_embed") == "embed" + + +def test_format_layer_spec_hook_pos_embed() -> None: + """Test that hook_pos_embed is formatted to pos_embed.""" + assert format_layer_spec("hook_pos_embed") == "pos_embed" + + +def test_format_layer_spec_block_component_attn_hook() -> None: + """Test that block attention component hooks are formatted correctly.""" + assert format_layer_spec("blocks.0.attn.hook_q") == "L0.attn.q" + assert format_layer_spec("blocks.1.attn.hook_k") == "L1.attn.k" + assert format_layer_spec("blocks.2.attn.hook_v") == "L2.attn.v" + assert format_layer_spec("blocks.0.attn.hook_attn_scores") == "L0.attn.attn.scores" + assert format_layer_spec("blocks.0.attn.hook_pattern") == "L0.attn.pattern" + assert format_layer_spec("blocks.0.attn.hook_z") == "L0.attn.z" + + +def test_format_layer_spec_block_component_ln_hook() -> None: + """Test that block layer norm hooks are formatted correctly.""" + assert format_layer_spec("blocks.0.ln1.hook_scale") == "L0.ln1.scale" + assert format_layer_spec("blocks.0.ln1.hook_normalized") == "L0.ln1.normalized" + assert format_layer_spec("blocks.1.ln2.hook_scale") == "L1.ln2.scale" + + +def test_format_layer_spec_block_component_mlp_hook() -> None: + """Test that block MLP hooks are formatted correctly.""" + assert format_layer_spec("blocks.0.mlp.hook_pre") == "L0.mlp.pre" + assert format_layer_spec("blocks.0.mlp.hook_post") == "L0.mlp.post" + + +def test_format_layer_spec_ln_final_hook() -> None: + """Test that ln_final hooks are formatted correctly.""" + assert format_layer_spec("ln_final.hook_scale") == "ln_final.scale" + assert format_layer_spec("ln_final.hook_normalized") == "ln_final.normalized" + + +def test_format_layer_spec_ln_final_passthrough() -> None: + """Test that ln_final without hook prefix is passed through unchanged.""" + assert format_layer_spec("ln_final") == "ln_final" From 83a95280812cf32e72feb6b7624f2760e61c49af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Casper=20L=C3=BCtzh=C3=B8ft=20Christensen?= <61698286+casperlchristensen@users.noreply.github.com> Date: Tue, 13 Jan 2026 13:50:13 -0800 Subject: [PATCH 17/35] option for no deduplication (#166) --- simplexity/activations/activation_analyses.py | 11 + simplexity/activations/activation_tracker.py | 4 + simplexity/analysis/layerwise_analysis.py | 7 + simplexity/utils/analysis_utils.py | 116 +++++++++- tests/utils/test_analysis_utils.py | 212 ++++++++++++++++++ 5 files changed, 349 insertions(+), 1 deletion(-) diff --git a/simplexity/activations/activation_analyses.py b/simplexity/activations/activation_analyses.py index a0069036..c235bada 100644 --- a/simplexity/activations/activation_analyses.py +++ b/simplexity/activations/activation_analyses.py @@ -31,6 +31,11 @@ def skip_first_token(self) -> bool: """Whether to skip the first token (useful for off-manifold initial states).""" ... + @property + def skip_deduplication(self) -> bool: + """Whether to skip prefix/sequence deduplication (faster for large vocabularies).""" + ... + @property def requires_belief_states(self) -> bool: """Whether the analysis needs belief state targets.""" @@ -58,6 +63,7 @@ def __init__( concat_layers: bool = False, use_probs_as_weights: bool = True, skip_first_token: bool = False, + skip_deduplication: bool = False, ) -> None: analysis_kwargs: dict[str, Any] = { "n_components": n_components, @@ -69,6 +75,7 @@ def __init__( concat_layers=concat_layers, use_probs_as_weights=use_probs_as_weights, skip_first_token=skip_first_token, + skip_deduplication=skip_deduplication, analysis_kwargs=analysis_kwargs, ) @@ -83,6 +90,7 @@ def __init__( concat_layers: bool = False, use_probs_as_weights: bool = True, skip_first_token: bool = False, + skip_deduplication: bool = False, fit_intercept: bool = True, concat_belief_states: bool = False, compute_subspace_orthogonality: bool = False, @@ -93,6 +101,7 @@ def __init__( concat_layers=concat_layers, use_probs_as_weights=use_probs_as_weights, skip_first_token=skip_first_token, + skip_deduplication=skip_deduplication, analysis_kwargs={ "fit_intercept": fit_intercept, "concat_belief_states": concat_belief_states, @@ -111,6 +120,7 @@ def __init__( concat_layers: bool = False, use_probs_as_weights: bool = True, skip_first_token: bool = False, + skip_deduplication: bool = False, rcond_values: Sequence[float] | None = None, fit_intercept: bool = True, concat_belief_states: bool = False, @@ -129,5 +139,6 @@ def __init__( concat_layers=concat_layers, use_probs_as_weights=use_probs_as_weights, skip_first_token=skip_first_token, + skip_deduplication=skip_deduplication, analysis_kwargs=analysis_kwargs, ) diff --git a/simplexity/activations/activation_tracker.py b/simplexity/activations/activation_tracker.py index 01281d7f..949644eb 100644 --- a/simplexity/activations/activation_tracker.py +++ b/simplexity/activations/activation_tracker.py @@ -51,6 +51,7 @@ class PrepareOptions(NamedTuple): concat_layers: bool use_probs_as_weights: bool skip_first_token: bool = False + skip_deduplication: bool = False def _get_uniform_weights(n_samples: int, dtype: DTypeLike) -> jax.Array: @@ -99,6 +100,7 @@ def prepare_activations( activations_by_layer=activations, select_last_token=prepare_options.last_token_only, skip_first_token=prepare_options.skip_first_token, + skip_deduplication=prepare_options.skip_deduplication, ) layer_acts = dataset.activations_by_layer @@ -168,6 +170,7 @@ def analyze( analysis.concat_layers, analysis.use_probs_as_weights, analysis.skip_first_token, + analysis.skip_deduplication, ) config_key = prepare_options @@ -191,6 +194,7 @@ def analyze( analysis.concat_layers, analysis.use_probs_as_weights, analysis.skip_first_token, + analysis.skip_deduplication, ) prepared = preprocessing_cache[prepare_options] diff --git a/simplexity/analysis/layerwise_analysis.py b/simplexity/analysis/layerwise_analysis.py index 12a87226..70317486 100644 --- a/simplexity/analysis/layerwise_analysis.py +++ b/simplexity/analysis/layerwise_analysis.py @@ -140,6 +140,7 @@ def __init__( concat_layers: bool = False, use_probs_as_weights: bool = True, skip_first_token: bool = False, + skip_deduplication: bool = False, analysis_kwargs: Mapping[str, Any] | None = None, ) -> None: if analysis_type not in ANALYSIS_REGISTRY: @@ -152,6 +153,7 @@ def __init__( self._concat_layers = concat_layers self._use_probs_as_weights = use_probs_as_weights self._skip_first_token = skip_first_token + self._skip_deduplication = skip_deduplication @property def last_token_only(self) -> bool: @@ -178,6 +180,11 @@ def skip_first_token(self) -> bool: """Whether to skip the first token (useful for off-manifold initial states).""" return self._skip_first_token + @property + def skip_deduplication(self) -> bool: + """Whether to skip prefix/sequence deduplication (faster for large vocabularies).""" + return self._skip_deduplication + def analyze( self, activations: Mapping[str, jax.Array], diff --git a/simplexity/utils/analysis_utils.py b/simplexity/utils/analysis_utils.py index 22879124..f0c68ab4 100644 --- a/simplexity/utils/analysis_utils.py +++ b/simplexity/utils/analysis_utils.py @@ -173,6 +173,102 @@ class DeduplicatedDataset: activations_by_layer: dict[str, jax.Array] +def build_raw_dataset( + inputs: jax.Array, + beliefs: jax.Array | tuple[jax.Array, ...], + probs: jax.Array, + activations_by_layer: dict[str, jax.Array], + skip_first_token: bool = False, +) -> DeduplicatedDataset: + """Return dataset without deduplication - flatten batch x seq_len using vectorized operations.""" + if skip_first_token: + inputs = inputs[:, 1:] + if isinstance(beliefs, tuple): + beliefs = tuple(b[:, 1:, ...] for b in beliefs) + else: + beliefs = beliefs[:, 1:, ...] + probs = probs[:, 1:] + activations_by_layer = {name: acts[:, 1:, ...] for name, acts in activations_by_layer.items()} + + batch_size, seq_len = inputs.shape + n_samples = batch_size * seq_len + + # Flatten beliefs: (batch, seq_len, ...) -> (n_samples, ...) + if isinstance(beliefs, tuple): + flat_beliefs: jax.Array | tuple[jax.Array, ...] = tuple(b.reshape(n_samples, *b.shape[2:]) for b in beliefs) + else: + flat_beliefs = beliefs.reshape(n_samples, *beliefs.shape[2:]) + + # Flatten and normalize probs + flat_probs = probs.reshape(n_samples) + total_mass = flat_probs.sum() + if total_mass > 0: + flat_probs = flat_probs / total_mass + else: + raise ValueError("Total probability mass is zero") + + # Flatten activations + flat_activations = {name: acts.reshape(n_samples, *acts.shape[2:]) for name, acts in activations_by_layer.items()} + + # Generate sequences for metadata using numpy (faster than JAX for tuple creation) + inputs_np = np.asarray(inputs) + sequences: list[tuple[int, ...]] = [ + tuple(inputs_np[i, : j + 1].tolist()) for i in range(batch_size) for j in range(seq_len) + ] + + return DeduplicatedDataset( + sequences=sequences, + beliefs=flat_beliefs, + probs=flat_probs, + activations_by_layer=flat_activations, + ) + + +def build_raw_last_token_dataset( + inputs: jax.Array, + beliefs: jax.Array | tuple[jax.Array, ...], + probs: jax.Array, + activations_by_layer: dict[str, jax.Array], + skip_first_token: bool = False, +) -> DeduplicatedDataset: + """Return last-token dataset without deduplication - keep all batch samples.""" + if skip_first_token: + inputs = inputs[:, 1:] + if isinstance(beliefs, tuple): + beliefs = tuple(b[:, 1:, ...] for b in beliefs) + else: + beliefs = beliefs[:, 1:, ...] + probs = probs[:, 1:] + activations_by_layer = {name: acts[:, 1:, ...] for name, acts in activations_by_layer.items()} + + # Select last token + if isinstance(beliefs, tuple): + last_beliefs: jax.Array | tuple[jax.Array, ...] = tuple(b[:, -1, :] for b in beliefs) + else: + last_beliefs = beliefs[:, -1, :] + last_probs = probs[:, -1] + last_activations = {name: acts[:, -1, :] for name, acts in activations_by_layer.items()} + + # Normalize probs + total_mass = last_probs.sum() + if total_mass > 0: + last_probs = last_probs / total_mass + else: + raise ValueError("Total probability mass is zero") + + # Generate sequences for metadata + inputs_np = np.asarray(inputs) + batch_size = inputs.shape[0] + sequences: list[tuple[int, ...]] = [tuple(inputs_np[i].tolist()) for i in range(batch_size)] + + return DeduplicatedDataset( + sequences=sequences, + beliefs=last_beliefs, + probs=last_probs, + activations_by_layer=last_activations, + ) + + def build_deduplicated_dataset( inputs: jax.Array, beliefs: jax.Array | tuple[jax.Array, ...], @@ -180,8 +276,26 @@ def build_deduplicated_dataset( activations_by_layer: dict[str, jax.Array], select_last_token: bool = False, skip_first_token: bool = False, + skip_deduplication: bool = False, ) -> DeduplicatedDataset: - """Deduplicate everything by prefix.""" + """Build dataset, optionally deduplicating by prefix or sequence.""" + if skip_deduplication: + if select_last_token: + return build_raw_last_token_dataset( + inputs, + beliefs, + probs, + activations_by_layer, + skip_first_token=skip_first_token, + ) + else: + return build_raw_dataset( + inputs, + beliefs, + probs, + activations_by_layer, + skip_first_token=skip_first_token, + ) if select_last_token: return build_last_token_dataset( inputs, diff --git a/tests/utils/test_analysis_utils.py b/tests/utils/test_analysis_utils.py index ebb9d2bb..e6c10cf9 100644 --- a/tests/utils/test_analysis_utils.py +++ b/tests/utils/test_analysis_utils.py @@ -5,8 +5,11 @@ import pytest from simplexity.utils.analysis_utils import ( + build_deduplicated_dataset, build_last_token_dataset, build_prefix_dataset, + build_raw_dataset, + build_raw_last_token_dataset, dedup_last_token_probs_sum, dedup_last_token_tensor_first, dedup_probs_sum, @@ -391,3 +394,212 @@ def test_preserves_dimensions(self, simple_inputs, simple_beliefs, simple_probs, # Check layer dimensions assert dataset.activations_by_layer["layer_0"].shape[1] == 4 assert dataset.activations_by_layer["layer_1"].shape[1] == 6 + + +class TestBuildRawDataset: + """Test build_raw_dataset (skip deduplication) function.""" + + def test_flattens_batch_and_seq_len(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that batch and seq_len dimensions are flattened.""" + batch_size, seq_len = simple_inputs.shape + expected_samples = batch_size * seq_len + + dataset = build_raw_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + + assert isinstance(dataset.beliefs, jax.Array) + assert dataset.beliefs.shape[0] == expected_samples + assert dataset.probs.shape[0] == expected_samples + for layer_acts in dataset.activations_by_layer.values(): + assert layer_acts.shape[0] == expected_samples + + def test_preserves_feature_dimensions(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that feature dimensions are preserved after flattening.""" + dataset = build_raw_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + + # Beliefs should have 2 features + assert isinstance(dataset.beliefs, jax.Array) + assert dataset.beliefs.shape[1] == 2 + + # Layer 0 should have 4 features, Layer 1 should have 6 features + assert dataset.activations_by_layer["layer_0"].shape[1] == 4 + assert dataset.activations_by_layer["layer_1"].shape[1] == 6 + + def test_probs_normalized(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that probabilities are normalized to sum to 1.""" + dataset = build_raw_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + + assert jnp.allclose(jnp.sum(dataset.probs), 1.0) + + def test_sequences_generated(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that sequences metadata is generated correctly.""" + batch_size, seq_len = simple_inputs.shape + dataset = build_raw_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + + # Should have batch_size * seq_len sequences + assert len(dataset.sequences) == batch_size * seq_len + + # First sequence should be prefix of length 1 from first batch item + assert dataset.sequences[0] == (1,) + # Second sequence should be prefix of length 2 from first batch item + assert dataset.sequences[1] == (1, 2) + # Third sequence should be full prefix from first batch item + assert dataset.sequences[2] == (1, 2, 3) + + def test_skip_first_token(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test skip_first_token option.""" + batch_size, seq_len = simple_inputs.shape + expected_samples = batch_size * (seq_len - 1) + + dataset = build_raw_dataset( + simple_inputs, simple_beliefs, simple_probs, simple_activations, skip_first_token=True + ) + + assert isinstance(dataset.beliefs, jax.Array) + assert dataset.beliefs.shape[0] == expected_samples + assert dataset.probs.shape[0] == expected_samples + + def test_tuple_beliefs(self, simple_inputs, simple_probs, simple_activations): + """Test with tuple beliefs (factored processes).""" + batch_size, seq_len = simple_inputs.shape + # Create tuple of beliefs for factored process + beliefs_factor_0 = jnp.ones((batch_size, seq_len, 3)) * 0.1 + beliefs_factor_1 = jnp.ones((batch_size, seq_len, 4)) * 0.2 + tuple_beliefs = (beliefs_factor_0, beliefs_factor_1) + + dataset = build_raw_dataset(simple_inputs, tuple_beliefs, simple_probs, simple_activations) + + assert isinstance(dataset.beliefs, tuple) + assert len(dataset.beliefs) == 2 + assert dataset.beliefs[0].shape == (batch_size * seq_len, 3) + assert dataset.beliefs[1].shape == (batch_size * seq_len, 4) + + def test_more_samples_than_dedup(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that raw dataset has more samples than deduplicated (when duplicates exist).""" + raw_dataset = build_raw_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + dedup_dataset = build_prefix_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + + # Raw should have all batch*seq_len samples + assert isinstance(raw_dataset.beliefs, jax.Array) + assert isinstance(dedup_dataset.beliefs, jax.Array) + assert raw_dataset.beliefs.shape[0] >= dedup_dataset.beliefs.shape[0] + + +class TestBuildRawLastTokenDataset: + """Test build_raw_last_token_dataset (skip deduplication, last token only) function.""" + + def test_keeps_all_batch_samples(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that all batch samples are kept (no deduplication).""" + batch_size = simple_inputs.shape[0] + + dataset = build_raw_last_token_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + + assert isinstance(dataset.beliefs, jax.Array) + assert dataset.beliefs.shape[0] == batch_size + assert dataset.probs.shape[0] == batch_size + + def test_selects_last_token(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that last token is selected from each sequence.""" + dataset = build_raw_last_token_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + + # Check beliefs match last token + assert isinstance(dataset.beliefs, jax.Array) + assert jnp.allclose(dataset.beliefs[0], simple_beliefs[0, -1, :]) + assert jnp.allclose(dataset.beliefs[1], simple_beliefs[1, -1, :]) + + def test_probs_normalized(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that probabilities are normalized.""" + dataset = build_raw_last_token_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + + assert jnp.allclose(jnp.sum(dataset.probs), 1.0) + + def test_sequences_are_full_sequences(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that sequences metadata contains full sequences.""" + dataset = build_raw_last_token_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + + # Should have one sequence per batch item + assert len(dataset.sequences) == simple_inputs.shape[0] + + # Each should be a full sequence + assert dataset.sequences[0] == (1, 2, 3) + assert dataset.sequences[1] == (1, 2, 4) + assert dataset.sequences[2] == (1, 2, 3) + + def test_skip_first_token(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test skip_first_token option.""" + dataset = build_raw_last_token_dataset( + simple_inputs, simple_beliefs, simple_probs, simple_activations, skip_first_token=True + ) + + # Sequences should start from second token + assert dataset.sequences[0] == (2, 3) + assert dataset.sequences[1] == (2, 4) + + def test_tuple_beliefs(self, simple_inputs, simple_probs, simple_activations): + """Test with tuple beliefs (factored processes).""" + batch_size, seq_len = simple_inputs.shape + beliefs_factor_0 = jnp.ones((batch_size, seq_len, 3)) * 0.1 + beliefs_factor_1 = jnp.ones((batch_size, seq_len, 4)) * 0.2 + tuple_beliefs = (beliefs_factor_0, beliefs_factor_1) + + dataset = build_raw_last_token_dataset(simple_inputs, tuple_beliefs, simple_probs, simple_activations) + + assert isinstance(dataset.beliefs, tuple) + assert len(dataset.beliefs) == 2 + assert dataset.beliefs[0].shape == (batch_size, 3) + assert dataset.beliefs[1].shape == (batch_size, 4) + + def test_more_samples_than_dedup(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that raw dataset has more samples than deduplicated when duplicates exist.""" + raw_dataset = build_raw_last_token_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + dedup_dataset = build_last_token_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + + # Raw should have 3 samples, dedup should have 2 (sequences 0 and 2 are identical) + assert isinstance(raw_dataset.beliefs, jax.Array) + assert isinstance(dedup_dataset.beliefs, jax.Array) + assert raw_dataset.beliefs.shape[0] == 3 + assert dedup_dataset.beliefs.shape[0] == 2 + + +class TestBuildDeduplicatedDatasetSkipDeduplication: + """Test build_deduplicated_dataset with skip_deduplication flag.""" + + def test_skip_deduplication_false_uses_dedup(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that skip_deduplication=False uses deduplication.""" + dataset = build_deduplicated_dataset( + simple_inputs, simple_beliefs, simple_probs, simple_activations, skip_deduplication=False + ) + + # With deduplication, should have fewer samples due to duplicate prefixes + assert isinstance(dataset.beliefs, jax.Array) + dedup_dataset = build_prefix_dataset(simple_inputs, simple_beliefs, simple_probs, simple_activations) + assert isinstance(dedup_dataset.beliefs, jax.Array) + assert dataset.beliefs.shape[0] == dedup_dataset.beliefs.shape[0] + + def test_skip_deduplication_true_skips_dedup(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test that skip_deduplication=True skips deduplication.""" + batch_size, seq_len = simple_inputs.shape + + dataset = build_deduplicated_dataset( + simple_inputs, simple_beliefs, simple_probs, simple_activations, skip_deduplication=True + ) + + # Should have all batch*seq_len samples + assert isinstance(dataset.beliefs, jax.Array) + assert dataset.beliefs.shape[0] == batch_size * seq_len + + def test_skip_deduplication_with_last_token(self, simple_inputs, simple_beliefs, simple_probs, simple_activations): + """Test skip_deduplication with select_last_token=True.""" + batch_size = simple_inputs.shape[0] + + dataset = build_deduplicated_dataset( + simple_inputs, + simple_beliefs, + simple_probs, + simple_activations, + select_last_token=True, + skip_deduplication=True, + ) + + # Should have all batch samples (not deduplicated) + assert isinstance(dataset.beliefs, jax.Array) + assert dataset.beliefs.shape[0] == batch_size From d4f6021d799654392884683908b918b36e8f8492 Mon Sep 17 00:00:00 2001 From: Loren AC Date: Thu, 15 Jan 2026 13:07:52 -0800 Subject: [PATCH 18/35] Add IndependentFactoredGenerativeProcess for frozen factor support (#168) * Add IndependentFactoredGenerativeProcess for frozen factor support Introduces a new generative process subclass that samples emissions from each factor independently and supports "frozen" factors whose sequences are identical across batch samples. This enables generating datasets where k factors share realizations while (n-k) factors vary independently. Key features: - Per-factor independent emission sampling (not from joint distribution) - Frozen factors specified via frozen_factor_indices and frozen_key - Dual key stream approach: frozen factors use shared key, unfrozen use per-sample keys Co-Authored-By: Claude Opus 4.5 * Simplify IndependentFactoredGenerativeProcess implementation - Remove _generate_with_frozen method, merge logic into single generate method - Move None handling to edges (emit_observation and generate) so _emit_observation_per_factor always receives valid arrays - Remove unnecessary super().generate() delegation - Cleaner code structure with fewer methods and one code path Co-Authored-By: Claude Opus 4.5 * Fix formatting * Fix pylint warnings in IndependentFactoredGenerativeProcess tests Replace unnecessary lambdas with direct method references and add pylint disable for too-few-public-methods on TestStateTransitions. Co-Authored-By: Claude Opus 4.5 --------- Co-authored-by: Claude Opus 4.5 --- ...independent_factored_generative_process.py | 189 ++++++++++ ...independent_factored_generative_process.py | 352 ++++++++++++++++++ 2 files changed, 541 insertions(+) create mode 100644 simplexity/generative_processes/independent_factored_generative_process.py create mode 100644 tests/generative_processes/test_independent_factored_generative_process.py diff --git a/simplexity/generative_processes/independent_factored_generative_process.py b/simplexity/generative_processes/independent_factored_generative_process.py new file mode 100644 index 00000000..acb83710 --- /dev/null +++ b/simplexity/generative_processes/independent_factored_generative_process.py @@ -0,0 +1,189 @@ +"""Independent factored generative process with per-factor sampling and frozen factors.""" + +from __future__ import annotations + +from collections.abc import Sequence + +import chex +import equinox as eqx +import jax +import jax.numpy as jnp + +from simplexity.generative_processes.factored_generative_process import ( + ComponentType, + FactoredGenerativeProcess, + FactoredState, +) +from simplexity.generative_processes.structures import ConditionalStructure +from simplexity.generative_processes.structures.independent import IndependentStructure +from simplexity.logger import SIMPLEXITY_LOGGER +from simplexity.utils.factoring_utils import compute_obs_dist_for_variant + + +class IndependentFactoredGenerativeProcess(FactoredGenerativeProcess): + """Factored generative process with independent per-factor sampling and frozen factors. + + This variant samples emissions from each factor independently (not from the joint + distribution), then combines them using TokenEncoder.tuple_to_token. It also supports + "frozen" factors whose entire emission sequences are identical across batch samples. + + Frozen factors use keys derived from a stored `frozen_key`, while unfrozen factors + use keys derived from the per-sample key. Since the same `frozen_key` produces the + same derived keys across all batch samples, frozen factors naturally produce + identical sequences. + + Attributes: + frozen_factor_indices: frozenset of factor indices that are frozen + frozen_key: JAX random key used for generating frozen sequences + """ + + frozen_factor_indices: frozenset[int] + frozen_key: jax.Array | None + + def __init__( + self, + *, + component_types: Sequence[ComponentType], + transition_matrices: Sequence[jax.Array], + normalizing_eigenvectors: Sequence[jax.Array], + initial_states: Sequence[jax.Array], + structure: ConditionalStructure, + device: str | None = None, + frozen_factor_indices: frozenset[int] = frozenset(), + frozen_key: jax.Array | None = None, + ) -> None: + """Initialize independent factored generative process. + + Args: + component_types: Type of each factor ("hmm" or "ghmm") + transition_matrices: Per-factor transition tensors. + transition_matrices[i] has shape [K_i, V_i, S_i, S_i] + normalizing_eigenvectors: Per-factor eigenvectors for GHMM. + normalizing_eigenvectors[i] has shape [K_i, S_i] + initial_states: Initial state per factor (shape [S_i]) + structure: Conditional structure defining factor interactions + device: Device to place arrays on (e.g., "cpu", "gpu") + frozen_factor_indices: Indices of factors whose sequences are frozen across batch + frozen_key: JAX random key for frozen sequence generation. Required if + frozen_factor_indices is non-empty. + + Raises: + ValueError: If frozen_factor_indices is non-empty but frozen_key is None + ValueError: If frozen_factor_indices contains invalid indices + """ + super().__init__( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + device=device, + ) + + num_factors = len(component_types) + for idx in frozen_factor_indices: + if idx < 0 or idx >= num_factors: + raise ValueError(f"Invalid frozen factor index {idx}. Must be in [0, {num_factors})") + + if frozen_factor_indices and frozen_key is None: + raise ValueError("frozen_key is required when frozen_factor_indices is non-empty") + + if not isinstance(structure, IndependentStructure): + SIMPLEXITY_LOGGER.warning( + "IndependentFactoredGenerativeProcess is designed for IndependentStructure. " + "Using %s may produce unexpected results.", + type(structure).__name__, + ) + + self.frozen_factor_indices = frozen_factor_indices + self.frozen_key = frozen_key + + def _emit_observation_per_factor(self, state: FactoredState, key: jax.Array, frozen_key: jax.Array) -> jax.Array: + """Sample each factor independently, choosing key based on frozen status. + + Args: + state: Tuple of state vectors (one per factor) + key: JAX random key for unfrozen factors + frozen_key: JAX random key for frozen factors + + Returns: + Composite observation (scalar token) + """ + num_factors = len(self.component_types) + + factor_keys = jax.random.split(key, num_factors) + frozen_factor_keys = jax.random.split(frozen_key, num_factors) + + per_factor_tokens = [] + for i in range(num_factors): + if i in self.frozen_factor_indices: + factor_key = frozen_factor_keys[i] + else: + factor_key = factor_keys[i] + + T_i = self.transition_matrices[i][0] + norm_i = self.normalizing_eigenvectors[i][0] if self.component_types[i] == "ghmm" else None + p_i = compute_obs_dist_for_variant(self.component_types[i], state[i], T_i, norm_i) + + token_i = jax.random.categorical(factor_key, jnp.log(p_i)) + per_factor_tokens.append(token_i) + + return self.encoder.tuple_to_token(tuple(per_factor_tokens)) + + @eqx.filter_jit + def emit_observation(self, state: FactoredState, key: jax.Array) -> jax.Array: + """Sample composite observation by independently sampling each factor. + + Args: + state: Tuple of state vectors (one per factor) + key: JAX random key + + Returns: + Composite observation (scalar token) + """ + frozen_key = self.frozen_key if self.frozen_key is not None else key + return self._emit_observation_per_factor(state, key, frozen_key) + + @eqx.filter_vmap(in_axes=(None, 0, 0, None, None)) + def generate( + self, state: FactoredState, key: chex.PRNGKey, sequence_len: int, return_all_states: bool + ) -> tuple[FactoredState, chex.Array]: + """Generate sequences with frozen factor support. + + For frozen factors, the same key stream is used across all batch samples, + producing identical emission sequences. For unfrozen factors, each batch + sample uses its own key stream, producing varying sequences. + + Args: + state: Initial states, one per factor + key: Random key for this batch sample + sequence_len: Number of timesteps to generate + return_all_states: Whether to return all intermediate states + + Returns: + Tuple of (final_states or all_states, observations) + """ + keys = jax.random.split(key, sequence_len) + frozen_keys = jax.random.split(self.frozen_key, sequence_len) if self.frozen_key is not None else keys + + def gen_obs( + carry_state: FactoredState, inputs: tuple[jax.Array, jax.Array] + ) -> tuple[FactoredState, chex.Array]: + key_t, frozen_key_t = inputs + obs = self._emit_observation_per_factor(carry_state, key_t, frozen_key_t) + new_state = self.transition_states(carry_state, obs) + return new_state, obs + + def gen_states_and_obs( + carry_state: FactoredState, inputs: tuple[jax.Array, jax.Array] + ) -> tuple[FactoredState, tuple[FactoredState, chex.Array]]: + key_t, frozen_key_t = inputs + obs = self._emit_observation_per_factor(carry_state, key_t, frozen_key_t) + new_state = self.transition_states(carry_state, obs) + return new_state, (carry_state, obs) + + if return_all_states: + _, (states, obs) = jax.lax.scan(gen_states_and_obs, state, (keys, frozen_keys)) + return states, obs + + return jax.lax.scan(gen_obs, state, (keys, frozen_keys)) diff --git a/tests/generative_processes/test_independent_factored_generative_process.py b/tests/generative_processes/test_independent_factored_generative_process.py new file mode 100644 index 00000000..03673eee --- /dev/null +++ b/tests/generative_processes/test_independent_factored_generative_process.py @@ -0,0 +1,352 @@ +"""Tests for IndependentFactoredGenerativeProcess.""" + +import chex +import jax +import jax.numpy as jnp +import pytest + +from simplexity.generative_processes.independent_factored_generative_process import ( + IndependentFactoredGenerativeProcess, +) +from simplexity.generative_processes.structures import IndependentStructure, SequentialConditional + + +def _tensor_from_probs(variant_probs): + arr = jnp.asarray(variant_probs, dtype=jnp.float32) + return arr[..., None, None] + + +@pytest.fixture +def two_factor_independent_process(): + """Simple two-factor process with IndependentStructure.""" + component_types = ("hmm", "hmm") + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3]]), + ) + normalizing_eigenvectors = ( + jnp.ones((1, 1), dtype=jnp.float32), + jnp.ones((1, 1), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = IndependentStructure() + return IndependentFactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + +@pytest.fixture +def three_factor_process_with_frozen(): + """Three-factor process with factor 1 frozen.""" + component_types = ("hmm", "hmm", "hmm") + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3]]), + _tensor_from_probs([[0.8, 0.2]]), + ) + normalizing_eigenvectors = ( + jnp.ones((1, 1), dtype=jnp.float32), + jnp.ones((1, 1), dtype=jnp.float32), + jnp.ones((1, 1), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = IndependentStructure() + return IndependentFactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + frozen_factor_indices=frozenset({1}), + frozen_key=jax.random.PRNGKey(42), + ) + + +class TestEmitObservation: + """Tests for emit_observation method.""" + + def test_emit_observation_returns_valid_token(self, two_factor_independent_process): + """emit_observation should return a valid composite token.""" + process = two_factor_independent_process + token = process.emit_observation(process.initial_state, jax.random.PRNGKey(0)) + assert token.shape == () + assert 0 <= int(token) < process.vocab_size + + def test_emit_observation_samples_independently(self, two_factor_independent_process): + """Samples should match product of marginal distributions.""" + process = two_factor_independent_process + n_samples = 10000 + keys = jax.random.split(jax.random.PRNGKey(0), n_samples) + + tokens = jax.vmap(lambda k: process.emit_observation(process.initial_state, k))(keys) + factor_tokens = process.encoder.extract_factors_vectorized(tokens) + + # Expected marginals: factor 0 is [0.6, 0.4], factor 1 is [0.7, 0.3] + empirical_0 = jnp.bincount(factor_tokens[:, 0], length=2) / n_samples + empirical_1 = jnp.bincount(factor_tokens[:, 1], length=2) / n_samples + + chex.assert_trees_all_close(empirical_0, jnp.array([0.6, 0.4]), atol=0.05) + chex.assert_trees_all_close(empirical_1, jnp.array([0.7, 0.3]), atol=0.05) + + +class TestGenerate: + """Tests for generate method.""" + + def test_generate_produces_correct_shapes(self, two_factor_independent_process): + """generate should produce correctly shaped outputs.""" + process = two_factor_independent_process + batch_size = 4 + seq_len = 10 + + batch_states = tuple(jnp.tile(s[None, :], (batch_size, 1)) for s in process.initial_state) + keys = jax.random.split(jax.random.PRNGKey(0), batch_size) + + final_states, observations = process.generate(batch_states, keys, seq_len, False) + + assert observations.shape == (batch_size, seq_len) + assert final_states[0].shape == (batch_size, 1) + assert final_states[1].shape == (batch_size, 1) + + def test_generate_returns_all_states_when_requested(self, two_factor_independent_process): + """generate with return_all_states=True should return state sequences.""" + process = two_factor_independent_process + batch_size = 4 + seq_len = 10 + + batch_states = tuple(jnp.tile(s[None, :], (batch_size, 1)) for s in process.initial_state) + keys = jax.random.split(jax.random.PRNGKey(0), batch_size) + + all_states, observations = process.generate(batch_states, keys, seq_len, True) + + assert observations.shape == (batch_size, seq_len) + assert all_states[0].shape == (batch_size, seq_len, 1) + assert all_states[1].shape == (batch_size, seq_len, 1) + + +class TestFrozenFactors: + """Tests for frozen factor behavior.""" + + def test_frozen_factor_same_across_batch(self, three_factor_process_with_frozen): + """Frozen factor should produce identical sequences across batch samples.""" + process = three_factor_process_with_frozen + batch_size = 8 + seq_len = 20 + + batch_states = tuple(jnp.tile(s[None, :], (batch_size, 1)) for s in process.initial_state) + keys = jax.random.split(jax.random.PRNGKey(123), batch_size) + + _, observations = process.generate(batch_states, keys, seq_len, False) + factor_tokens = jax.vmap(process.encoder.extract_factors_vectorized)(observations) + + # Factor 1 (index 1) is frozen - should be identical across batch + frozen_factor_sequences = factor_tokens[:, :, 1] + for i in range(1, batch_size): + chex.assert_trees_all_equal(frozen_factor_sequences[0], frozen_factor_sequences[i]) + + def test_unfrozen_factors_vary_across_batch(self, three_factor_process_with_frozen): + """Unfrozen factors should vary across batch samples.""" + process = three_factor_process_with_frozen + batch_size = 8 + seq_len = 20 + + batch_states = tuple(jnp.tile(s[None, :], (batch_size, 1)) for s in process.initial_state) + keys = jax.random.split(jax.random.PRNGKey(456), batch_size) + + _, observations = process.generate(batch_states, keys, seq_len, False) + factor_tokens = jax.vmap(process.encoder.extract_factors_vectorized)(observations) + + # Factors 0 and 2 are unfrozen - should differ across batch + unfrozen_0_sequences = factor_tokens[:, :, 0] + unfrozen_2_sequences = factor_tokens[:, :, 2] + + # Check that not all samples are identical + assert not jnp.all(unfrozen_0_sequences[0] == unfrozen_0_sequences[1]) + assert not jnp.all(unfrozen_2_sequences[0] == unfrozen_2_sequences[1]) + + def test_frozen_sequences_reproducible(self, three_factor_process_with_frozen): + """Frozen factor sequences should be reproducible across generate() calls.""" + process = three_factor_process_with_frozen + batch_size = 4 + seq_len = 15 + + batch_states = tuple(jnp.tile(s[None, :], (batch_size, 1)) for s in process.initial_state) + + # First generation + keys1 = jax.random.split(jax.random.PRNGKey(100), batch_size) + _, obs1 = process.generate(batch_states, keys1, seq_len, False) + factor_tokens1 = jax.vmap(process.encoder.extract_factors_vectorized)(obs1) + + # Second generation with different sample keys + keys2 = jax.random.split(jax.random.PRNGKey(200), batch_size) + _, obs2 = process.generate(batch_states, keys2, seq_len, False) + factor_tokens2 = jax.vmap(process.encoder.extract_factors_vectorized)(obs2) + + # Frozen factor should be the same in both calls + chex.assert_trees_all_equal(factor_tokens1[:, :, 1], factor_tokens2[:, :, 1]) + + def test_all_factors_frozen(self): + """With all factors frozen, all batch samples should be identical.""" + component_types = ("hmm", "hmm") + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3]]), + ) + normalizing_eigenvectors = ( + jnp.ones((1, 1), dtype=jnp.float32), + jnp.ones((1, 1), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = IndependentStructure() + process = IndependentFactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + frozen_factor_indices=frozenset({0, 1}), + frozen_key=jax.random.PRNGKey(999), + ) + + batch_size = 4 + seq_len = 10 + batch_states = tuple(jnp.tile(s[None, :], (batch_size, 1)) for s in process.initial_state) + keys = jax.random.split(jax.random.PRNGKey(0), batch_size) + + _, observations = process.generate(batch_states, keys, seq_len, False) + + # All batch samples should be identical + for i in range(1, batch_size): + chex.assert_trees_all_equal(observations[0], observations[i]) + + def test_no_frozen_factors_matches_normal_behavior(self, two_factor_independent_process): + """With no frozen factors, behavior should match normal generation.""" + process = two_factor_independent_process + batch_size = 4 + seq_len = 10 + + batch_states = tuple(jnp.tile(s[None, :], (batch_size, 1)) for s in process.initial_state) + keys = jax.random.split(jax.random.PRNGKey(0), batch_size) + + _, observations = process.generate(batch_states, keys, seq_len, False) + + # All tokens should be valid + assert jnp.all(observations >= 0) + assert jnp.all(observations < process.vocab_size) + + # Batch samples should differ (with high probability) + assert not jnp.all(observations[0] == observations[1]) + + +class TestValidation: + """Tests for constructor validation.""" + + def test_requires_frozen_key_when_frozen_indices_nonempty(self): + """Should raise ValueError if frozen_factor_indices is non-empty but frozen_key is None.""" + component_types = ("hmm",) + transition_matrices = (_tensor_from_probs([[0.6, 0.4]]),) + normalizing_eigenvectors = (jnp.ones((1, 1), dtype=jnp.float32),) + initial_states = (jnp.array([1.0], dtype=jnp.float32),) + structure = IndependentStructure() + + with pytest.raises(ValueError, match="frozen_key is required"): + IndependentFactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + frozen_factor_indices=frozenset({0}), + frozen_key=None, + ) + + def test_rejects_invalid_frozen_factor_index(self): + """Should raise ValueError for out-of-range frozen factor indices.""" + component_types = ("hmm", "hmm") + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3]]), + ) + normalizing_eigenvectors = ( + jnp.ones((1, 1), dtype=jnp.float32), + jnp.ones((1, 1), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = IndependentStructure() + + with pytest.raises(ValueError, match="Invalid frozen factor index 5"): + IndependentFactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + frozen_factor_indices=frozenset({0, 5}), + frozen_key=jax.random.PRNGKey(0), + ) + + def test_warns_for_non_independent_structure(self, caplog): + """Should log warning when structure is not IndependentStructure.""" + component_types = ("hmm", "hmm") + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3], [0.2, 0.8]]), + ) + normalizing_eigenvectors = ( + jnp.ones((1, 1), dtype=jnp.float32), + jnp.ones((2, 1), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = SequentialConditional( + control_maps=(None, jnp.array([0, 1], dtype=jnp.int32)), + vocab_sizes=jnp.array([2, 2], dtype=jnp.int32), + ) + + IndependentFactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + + assert "IndependentFactoredGenerativeProcess is designed for IndependentStructure" in caplog.text + + +class TestStateTransitions: # pylint: disable=too-few-public-methods + """Tests for state transitions with frozen factors.""" + + def test_frozen_factor_states_match_across_batch(self, three_factor_process_with_frozen): + """Frozen factor states should be identical across batch samples.""" + process = three_factor_process_with_frozen + batch_size = 4 + seq_len = 10 + + batch_states = tuple(jnp.tile(s[None, :], (batch_size, 1)) for s in process.initial_state) + keys = jax.random.split(jax.random.PRNGKey(789), batch_size) + + all_states, _ = process.generate(batch_states, keys, seq_len, True) + + # Factor 1 states should be identical across batch + frozen_factor_states = all_states[1] + for i in range(1, batch_size): + chex.assert_trees_all_close(frozen_factor_states[0], frozen_factor_states[i]) From c982f4410a69cbf18ba909f008f52e4c88f954e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Casper=20L=C3=BCtzh=C3=B8ft=20Christensen?= <61698286+casperlchristensen@users.noreply.github.com> Date: Fri, 16 Jan 2026 14:59:46 -0800 Subject: [PATCH 19/35] noises process option (#165) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * return targets (#163) * Apply Eric's review suggestions - Use tuple(map(int, vocab_sizes)) in factored_generative_process.py - Use math.prod(vocab_sizes) in noisy_channel.py for cleaner code Co-authored-by: Casper Lützhøft Christensen --------- Co-authored-by: claude[bot] <41898282+claude[bot]@users.noreply.github.com> Co-authored-by: Casper Lützhøft Christensen --- simplexity/generative_processes/builder.py | 22 +++-- .../factored_generative_process.py | 22 ++++- .../generalized_hidden_markov_model.py | 5 + .../hidden_markov_model.py | 5 + .../generative_processes/noisy_channel.py | 69 ++++++++++++++ .../test_factored_generative_process.py | 73 ++++++++++++++ .../test_hidden_markov_model.py | 28 ++++++ .../test_noisy_channel.py | 94 +++++++++++++++++++ 8 files changed, 308 insertions(+), 10 deletions(-) create mode 100644 simplexity/generative_processes/noisy_channel.py create mode 100644 tests/generative_processes/test_noisy_channel.py diff --git a/simplexity/generative_processes/builder.py b/simplexity/generative_processes/builder.py index 9f0fc929..8331c9c1 100644 --- a/simplexity/generative_processes/builder.py +++ b/simplexity/generative_processes/builder.py @@ -75,12 +75,13 @@ def build_hidden_markov_model( process_params: Mapping[str, Any] | None = None, initial_state: jax.Array | Sequence[float] | None = None, device: str | None = None, + noise_epsilon: float = 0.0, ) -> HiddenMarkovModel: """Build a hidden Markov model.""" process_params = process_params or {} initial_state = jnp.array(initial_state) if initial_state is not None else None transition_matrices = build_transition_matrices(HMM_MATRIX_FUNCTIONS, process_name, process_params, device=device) - return HiddenMarkovModel(transition_matrices, initial_state, device=device) + return HiddenMarkovModel(transition_matrices, initial_state, device=device, noise_epsilon=noise_epsilon) def build_generalized_hidden_markov_model( @@ -88,12 +89,13 @@ def build_generalized_hidden_markov_model( process_params: Mapping[str, Any] | None = None, initial_state: jax.Array | Sequence[float] | None = None, device: str | None = None, + noise_epsilon: float = 0.0, ) -> GeneralizedHiddenMarkovModel: """Build a generalized hidden Markov model.""" process_params = process_params or {} initial_state = jnp.array(initial_state) if initial_state is not None else None transition_matrices = build_transition_matrices(GHMM_MATRIX_FUNCTIONS, process_name, process_params, device=device) - return GeneralizedHiddenMarkovModel(transition_matrices, initial_state, device=device) + return GeneralizedHiddenMarkovModel(transition_matrices, initial_state, device=device, noise_epsilon=noise_epsilon) def build_nonergodic_transition_matrices( @@ -161,6 +163,7 @@ def build_factored_process( transition_matrices: Sequence[jax.Array], normalizing_eigenvectors: Sequence[jax.Array], initial_states: Sequence[jax.Array], + noise_epsilon: float = 0.0, **structure_kwargs, ) -> FactoredGenerativeProcess: """Factory function for building factored processes with different conditional structures. @@ -171,6 +174,7 @@ def build_factored_process( transition_matrices: Per-factor transition tensors (shape [K_i, V_i, S_i, S_i]) normalizing_eigenvectors: Per-factor eigenvectors (shape [K_i, S_i]) initial_states: Initial state per factor (shape [S_i]) + noise_epsilon: Noisy channel epsilon value **structure_kwargs: Structure-specific keyword arguments: - For "independent": (none) - For "chain": control_maps @@ -218,12 +222,14 @@ def build_factored_process( normalizing_eigenvectors=normalizing_eigenvectors, initial_states=initial_states, structure=structure, + noise_epsilon=noise_epsilon, ) def build_factored_process_from_spec( structure_type: Literal["independent", "chain", "symmetric", "transition_coupled"], spec: Sequence[dict[str, Any]], + noise_epsilon: float = 0.0, **structure_params, ) -> FactoredGenerativeProcess: """Unified builder for factored processes from specification. @@ -235,6 +241,7 @@ def build_factored_process_from_spec( - For "chain": List of component dicts with control_maps - For "symmetric": List of component dicts - For "transition_coupled": List of component dicts + noise_epsilon: Noisy channel epsilon value **structure_params: Additional structure-specific parameters: - For "independent": (none) - For "chain": (none, uses spec's control_map fields) @@ -255,13 +262,6 @@ def build_factored_process_from_spec( {"component_type": "hmm", "variants": [{"process_name": "mess3", "x": 0.5, "a": 0.6}]}, ], ) - - # Symmetric - process = build_factored_process_from_spec( - structure_type="symmetric", - spec=[...], - control_maps=[[0, 1, 0, 1], [1, 0, 1, 0]], - ) ``` """ if structure_type == "independent": @@ -272,6 +272,7 @@ def build_factored_process_from_spec( transition_matrices=transition_matrices, normalizing_eigenvectors=normalizing_eigenvectors, initial_states=initial_states, + noise_epsilon=noise_epsilon, ) elif structure_type == "chain": component_types, transition_matrices, normalizing_eigenvectors, initial_states, control_maps = ( @@ -283,6 +284,7 @@ def build_factored_process_from_spec( transition_matrices=transition_matrices, normalizing_eigenvectors=normalizing_eigenvectors, initial_states=initial_states, + noise_epsilon=noise_epsilon, control_maps=control_maps, ) elif structure_type == "symmetric": @@ -301,6 +303,7 @@ def build_factored_process_from_spec( transition_matrices=transition_matrices, normalizing_eigenvectors=normalizing_eigenvectors, initial_states=initial_states, + noise_epsilon=noise_epsilon, control_maps=control_maps_arrays, ) elif structure_type == "transition_coupled": @@ -328,6 +331,7 @@ def build_factored_process_from_spec( transition_matrices=transition_matrices, normalizing_eigenvectors=normalizing_eigenvectors, initial_states=initial_states, + noise_epsilon=noise_epsilon, control_maps_transition=control_maps_arrays, emission_variant_indices=emission_variant_indices_array, emission_control_maps=emission_control_maps_arrays, diff --git a/simplexity/generative_processes/factored_generative_process.py b/simplexity/generative_processes/factored_generative_process.py index 2a842b51..90384b33 100644 --- a/simplexity/generative_processes/factored_generative_process.py +++ b/simplexity/generative_processes/factored_generative_process.py @@ -11,6 +11,7 @@ import jax.numpy as jnp from simplexity.generative_processes.generative_process import GenerativeProcess +from simplexity.generative_processes.noisy_channel import compute_joint_blur_matrix from simplexity.generative_processes.structures import ConditionalContext, ConditionalStructure from simplexity.logger import SIMPLEXITY_LOGGER from simplexity.utils.factoring_utils import TokenEncoder, transition_with_obs @@ -80,6 +81,10 @@ class FactoredGenerativeProcess(GenerativeProcess[FactoredState]): structure: ConditionalStructure encoder: TokenEncoder + # Noise parameters + noise_epsilon: float + _blur_matrix: jax.Array | None + def __init__( self, *, @@ -89,6 +94,7 @@ def __init__( initial_states: Sequence[jax.Array], structure: ConditionalStructure, device: str | None = None, + noise_epsilon: float = 0.0, ) -> None: """Initialize factored generative process. @@ -101,6 +107,7 @@ def __init__( initial_states: Initial state per factor (shape [S_i]) structure: Conditional structure defining factor interactions device: Device to place arrays on (e.g., "cpu", "gpu") + noise_epsilon: Noisy channel epsilon value """ if len(component_types) == 0: raise ValueError("Must provide at least one component") @@ -133,6 +140,14 @@ def __init__( self.num_variants = tuple(int(k) for k in num_variants) self.encoder = TokenEncoder(jnp.array(vocab_sizes)) + # Store noise parameters + self.noise_epsilon = noise_epsilon + if noise_epsilon > 0.0: + vocab_sizes_tuple = tuple(map(int, vocab_sizes)) + self._blur_matrix = compute_joint_blur_matrix(vocab_sizes_tuple, noise_epsilon) + else: + self._blur_matrix = None + def _make_context(self, state: FactoredState) -> ConditionalContext: """Create conditional context for structure methods.""" return ConditionalContext( @@ -166,7 +181,12 @@ def observation_probability_distribution(self, state: FactoredState) -> jax.Arra Distribution over composite tokens, shape [prod(V_i)] """ context = self._make_context(state) - return self.structure.compute_joint_distribution(context) + joint_dist = self.structure.compute_joint_distribution(context) + + if self._blur_matrix is not None: + joint_dist = self._blur_matrix @ joint_dist + + return joint_dist @eqx.filter_jit def log_observation_probability_distribution(self, log_belief_state: FactoredState) -> jax.Array: diff --git a/simplexity/generative_processes/generalized_hidden_markov_model.py b/simplexity/generative_processes/generalized_hidden_markov_model.py index c3c60807..68d4dd53 100644 --- a/simplexity/generative_processes/generalized_hidden_markov_model.py +++ b/simplexity/generative_processes/generalized_hidden_markov_model.py @@ -17,6 +17,7 @@ import jax.numpy as jnp from simplexity.generative_processes.generative_process import GenerativeProcess +from simplexity.generative_processes.noisy_channel import apply_noisy_channel from simplexity.generative_processes.transition_matrices import get_stationary_state from simplexity.logger import SIMPLEXITY_LOGGER from simplexity.utils.jnp_utils import resolve_jax_device @@ -42,7 +43,11 @@ def __init__( transition_matrices: jax.Array, initial_state: jax.Array | None = None, device: str | None = None, + noise_epsilon: float = 0.0, ): + if noise_epsilon > 0.0: + transition_matrices = apply_noisy_channel(transition_matrices, noise_epsilon) + self.device = resolve_jax_device(device) self.validate_transition_matrices(transition_matrices) diff --git a/simplexity/generative_processes/hidden_markov_model.py b/simplexity/generative_processes/hidden_markov_model.py index f068ef1a..ea5fc9f7 100644 --- a/simplexity/generative_processes/hidden_markov_model.py +++ b/simplexity/generative_processes/hidden_markov_model.py @@ -17,6 +17,7 @@ import jax.numpy as jnp from simplexity.generative_processes.generalized_hidden_markov_model import GeneralizedHiddenMarkovModel, State +from simplexity.generative_processes.noisy_channel import apply_noisy_channel from simplexity.generative_processes.transition_matrices import get_stationary_state from simplexity.logger import SIMPLEXITY_LOGGER from simplexity.utils.jnp_utils import resolve_jax_device @@ -32,7 +33,11 @@ def __init__( transition_matrices: jax.Array, initial_state: jax.Array | None = None, device: str | None = None, + noise_epsilon: float = 0.0, ): + if noise_epsilon > 0.0: + transition_matrices = apply_noisy_channel(transition_matrices, noise_epsilon) + self.device = resolve_jax_device(device) self.validate_transition_matrices(transition_matrices) diff --git a/simplexity/generative_processes/noisy_channel.py b/simplexity/generative_processes/noisy_channel.py new file mode 100644 index 00000000..bbdf05c2 --- /dev/null +++ b/simplexity/generative_processes/noisy_channel.py @@ -0,0 +1,69 @@ +"""Noisy channel utilities for generative processes. + +Implements observation-level noise by blurring transition matrices, +replacing each output symbol with probability epsilon to another +uniformly chosen symbol. +""" + +import math + +import jax +import jax.numpy as jnp + + +def apply_noisy_channel(transition_matrices: jax.Array, noise_epsilon: float) -> jax.Array: + """Apply noisy channel blur to transition matrices. + + Replaces each output symbol with probability noise_epsilon to another + uniformly chosen symbol. + + Args: + transition_matrices: Transition matrices of shape [V, S, S] where V is vocab_size. + noise_epsilon: Noise probability in [0, 1]. 0 means no noise, 1 means uniform noise. + + Returns: + Blurred transition matrices of shape [V, S, S]. + + Raises: + ValueError: If noise_epsilon is not in [0, 1]. + """ + if not 0.0 <= noise_epsilon <= 1.0: + raise ValueError(f"noise_epsilon must be in [0, 1], got {noise_epsilon}") + + if noise_epsilon == 0.0: + return transition_matrices + + vocab_size = transition_matrices.shape[0] + blur_matrix = (1.0 - noise_epsilon) * jnp.eye(vocab_size) + noise_epsilon * ( + jnp.ones((vocab_size, vocab_size)) / vocab_size + ) + return jnp.einsum("kij, kn -> nij", transition_matrices, blur_matrix) + + +def compute_joint_blur_matrix(vocab_sizes: tuple[int, ...], noise_epsilon: float) -> jax.Array: + """Compute blur matrix for joint observation space. + + For factored processes with joint noise, creates a blur matrix + over the composite vocabulary. + + Args: + vocab_sizes: Tuple of vocab sizes per factor. + noise_epsilon: Noise probability in [0, 1]. + + Returns: + Blur matrix of shape [joint_vocab, joint_vocab] where joint_vocab = prod(vocab_sizes). + + Raises: + ValueError: If noise_epsilon is not in [0, 1]. + """ + if not 0.0 <= noise_epsilon <= 1.0: + raise ValueError(f"noise_epsilon must be in [0, 1], got {noise_epsilon}") + + joint_vocab = math.prod(vocab_sizes) + + if noise_epsilon == 0.0: + return jnp.eye(joint_vocab) + + return (1.0 - noise_epsilon) * jnp.eye(joint_vocab) + noise_epsilon * ( + jnp.ones((joint_vocab, joint_vocab)) / joint_vocab + ) diff --git a/tests/generative_processes/test_factored_generative_process.py b/tests/generative_processes/test_factored_generative_process.py index 444bd48b..1e2c6cd0 100644 --- a/tests/generative_processes/test_factored_generative_process.py +++ b/tests/generative_processes/test_factored_generative_process.py @@ -627,3 +627,76 @@ def test_factored_process_device_placement(): assert all(tm.device == process.device for tm in process.transition_matrices) assert all(ev.device == process.device for ev in process.normalizing_eigenvectors) assert all(s.device == process.device for s in process.initial_states) + + +def test_factored_process_with_joint_noise(): + """Test factored process with joint noise modifies the distribution.""" + component_types = ("hmm", "hmm") + transition_matrices = ( + _tensor_from_probs([[0.6, 0.4]]), + _tensor_from_probs([[0.7, 0.3]]), + ) + normalizing_eigenvectors = ( + jnp.ones((1, 1), dtype=jnp.float32), + jnp.ones((1, 1), dtype=jnp.float32), + ) + initial_states = ( + jnp.array([1.0], dtype=jnp.float32), + jnp.array([1.0], dtype=jnp.float32), + ) + structure = IndependentStructure() + + process_clean = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + process_noisy = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + noise_epsilon=0.2, + ) + + state = process_clean.initial_state + clean_dist = process_clean.observation_probability_distribution(state) + noisy_dist = process_noisy.observation_probability_distribution(state) + + assert not jnp.allclose(clean_dist, noisy_dist) + chex.assert_trees_all_close(jnp.sum(clean_dist), 1.0) + chex.assert_trees_all_close(jnp.sum(noisy_dist), 1.0) + + +def test_factored_process_with_zero_noise_unchanged(): + """Test factored process with noise_epsilon=0 is identical to no noise.""" + component_types = ("hmm",) + transition_matrices = (_tensor_from_probs([[0.6, 0.4]]),) + normalizing_eigenvectors = (jnp.ones((1, 1), dtype=jnp.float32),) + initial_states = (jnp.array([1.0], dtype=jnp.float32),) + structure = IndependentStructure() + + process_clean = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + ) + process_zero_noise = FactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + noise_epsilon=0.0, + ) + + state = process_clean.initial_state + clean_dist = process_clean.observation_probability_distribution(state) + zero_noise_dist = process_zero_noise.observation_probability_distribution(state) + + chex.assert_trees_all_close(clean_dist, zero_noise_dist) diff --git a/tests/generative_processes/test_hidden_markov_model.py b/tests/generative_processes/test_hidden_markov_model.py index 1426336c..c36e7f09 100644 --- a/tests/generative_processes/test_hidden_markov_model.py +++ b/tests/generative_processes/test_hidden_markov_model.py @@ -219,3 +219,31 @@ def test_log_probability(z1r: HiddenMarkovModel): log_probability = z1r.log_probability(observations) assert jnp.isclose(log_probability, jnp.log(expected_probability)) + + +def test_hmm_with_noise(): + """Test that HMM with noise has modified observation distributions.""" + hmm_clean = build_hidden_markov_model(process_name="zero_one_random", process_params={"p": 0.5}) + hmm_noisy = build_hidden_markov_model(process_name="zero_one_random", process_params={"p": 0.5}, noise_epsilon=0.2) + + state = jnp.array([1.0, 0.0, 0.0]) + clean_dist = hmm_clean.observation_probability_distribution(state) + noisy_dist = hmm_noisy.observation_probability_distribution(state) + + assert not jnp.allclose(clean_dist, noisy_dist) + chex.assert_trees_all_close(jnp.sum(clean_dist), 1.0) + chex.assert_trees_all_close(jnp.sum(noisy_dist), 1.0) + + +def test_hmm_with_zero_noise_unchanged(): + """Test that HMM with noise_epsilon=0 is identical to no noise.""" + hmm_clean = build_hidden_markov_model(process_name="zero_one_random", process_params={"p": 0.5}) + hmm_zero_noise = build_hidden_markov_model( + process_name="zero_one_random", process_params={"p": 0.5}, noise_epsilon=0.0 + ) + + state = hmm_clean.initial_state + clean_dist = hmm_clean.observation_probability_distribution(state) + zero_noise_dist = hmm_zero_noise.observation_probability_distribution(state) + + chex.assert_trees_all_close(clean_dist, zero_noise_dist) diff --git a/tests/generative_processes/test_noisy_channel.py b/tests/generative_processes/test_noisy_channel.py new file mode 100644 index 00000000..c2057082 --- /dev/null +++ b/tests/generative_processes/test_noisy_channel.py @@ -0,0 +1,94 @@ +"""Tests for noisy channel functionality.""" + +import chex +import jax.numpy as jnp +import pytest + +from simplexity.generative_processes.noisy_channel import ( + apply_noisy_channel, + compute_joint_blur_matrix, +) + + +class TestApplyNoisyChannel: + """Tests for apply_noisy_channel function.""" + + def test_zero_epsilon_returns_unchanged(self): + """Test that zero noise epsilon returns the original matrices.""" + matrices = jnp.array([[[0.5, 0.5], [0.5, 0.5]], [[0.3, 0.7], [0.6, 0.4]]]) + result = apply_noisy_channel(matrices, noise_epsilon=0.0) + chex.assert_trees_all_close(result, matrices) + + def test_one_epsilon_returns_uniform_blur(self): + """Test that noise epsilon of 1.0 produces uniform blur.""" + matrices = jnp.array([[[1.0, 0.0], [0.0, 1.0]], [[0.0, 1.0], [1.0, 0.0]]]) + result = apply_noisy_channel(matrices, noise_epsilon=1.0) + chex.assert_trees_all_close(result[0], result[1]) + + def test_intermediate_epsilon(self): + """Test intermediate noise epsilon values produce expected blur.""" + matrices = jnp.array([[[1.0, 0.0]], [[0.0, 1.0]]]) + result = apply_noisy_channel(matrices, noise_epsilon=0.5) + expected = jnp.array([[[0.75, 0.25]], [[0.25, 0.75]]]) + chex.assert_trees_all_close(result, expected) + + def test_invalid_epsilon_negative_raises(self): + """Test that negative noise epsilon raises ValueError.""" + matrices = jnp.array([[[1.0]]]) + with pytest.raises(ValueError, match="noise_epsilon must be in"): + apply_noisy_channel(matrices, noise_epsilon=-0.1) + + def test_invalid_epsilon_greater_than_one_raises(self): + """Test that noise epsilon greater than 1 raises ValueError.""" + matrices = jnp.array([[[1.0]]]) + with pytest.raises(ValueError, match="noise_epsilon must be in"): + apply_noisy_channel(matrices, noise_epsilon=1.1) + + def test_preserves_shape(self): + """Test that output shape matches input shape.""" + matrices = jnp.ones((3, 4, 4)) + result = apply_noisy_channel(matrices, noise_epsilon=0.2) + assert result.shape == matrices.shape + + +class TestComputeJointBlurMatrix: + """Tests for compute_joint_blur_matrix function.""" + + def test_shape_single_factor(self): + """Test blur matrix shape for single factor.""" + blur = compute_joint_blur_matrix((4,), noise_epsilon=0.1) + assert blur.shape == (4, 4) + + def test_shape_two_factors(self): + """Test blur matrix shape for two factors.""" + blur = compute_joint_blur_matrix((2, 3), noise_epsilon=0.1) + assert blur.shape == (6, 6) + + def test_shape_three_factors(self): + """Test blur matrix shape for three factors.""" + blur = compute_joint_blur_matrix((2, 3, 4), noise_epsilon=0.2) + assert blur.shape == (24, 24) + + def test_row_stochastic(self): + """Test that blur matrix is row stochastic.""" + blur = compute_joint_blur_matrix((2, 3), noise_epsilon=0.2) + row_sums = jnp.sum(blur, axis=1) + chex.assert_trees_all_close(row_sums, jnp.ones(6)) + + def test_zero_epsilon_is_identity(self): + """Test that zero noise epsilon produces identity matrix.""" + blur = compute_joint_blur_matrix((2, 3), noise_epsilon=0.0) + chex.assert_trees_all_close(blur, jnp.eye(6)) + + def test_one_epsilon_is_uniform(self): + """Test that noise epsilon of 1.0 produces uniform matrix.""" + blur = compute_joint_blur_matrix((2, 3), noise_epsilon=1.0) + expected = jnp.ones((6, 6)) / 6 + chex.assert_trees_all_close(blur, expected) + + def test_invalid_epsilon_raises(self): + """Test that invalid noise epsilon values raise ValueError.""" + with pytest.raises(ValueError, match="noise_epsilon must be in"): + compute_joint_blur_matrix((2, 3), noise_epsilon=-0.1) + with pytest.raises(ValueError, match="noise_epsilon must be in"): + compute_joint_blur_matrix((2, 3), noise_epsilon=1.5) From cb7b0345a49a3d8266cffe10526e96a357f3cca9 Mon Sep 17 00:00:00 2001 From: Kyle J Ray Date: Wed, 4 Feb 2026 15:42:59 -0800 Subject: [PATCH 20/35] Add NonErgodicGenerativeProcess for block diagonal mixture models Implements a truly nonergodic generative process that composes multiple GenerativeProcess components with weighted mixture probabilities. Key features: - Block diagonal structure where each component owns disjoint state space - Wraps each component's generate() to avoid exponential joint state space - Vocab mapping to handle different component vocabularies - Support for HMM, GHMM, and FactoredGenerativeProcess components - Builder function for config-driven instantiation - Full test coverage with 21 unit tests Co-Authored-By: Claude Opus 4.5 --- simplexity/generative_processes/builder.py | 95 +++ simplexity/generative_processes/generator.py | 35 +- .../nonergodic_generative_process.py | 540 ++++++++++++++++++ .../nonergodic_example.yaml | 39 ++ .../test_nonergodic_generative_process.py | 332 +++++++++++ 5 files changed, 1033 insertions(+), 8 deletions(-) create mode 100644 simplexity/generative_processes/nonergodic_generative_process.py create mode 100644 tests/end_to_end/configs/generative_process/nonergodic_example.yaml create mode 100644 tests/generative_processes/test_nonergodic_generative_process.py diff --git a/simplexity/generative_processes/builder.py b/simplexity/generative_processes/builder.py index 8331c9c1..4975acd3 100644 --- a/simplexity/generative_processes/builder.py +++ b/simplexity/generative_processes/builder.py @@ -19,6 +19,7 @@ from simplexity.generative_processes.factored_generative_process import ComponentType, FactoredGenerativeProcess from simplexity.generative_processes.generalized_hidden_markov_model import GeneralizedHiddenMarkovModel from simplexity.generative_processes.hidden_markov_model import HiddenMarkovModel +from simplexity.generative_processes.nonergodic_generative_process import NonErgodicGenerativeProcess from simplexity.generative_processes.structures import ( ConditionalTransitions, FullyConditional, @@ -634,3 +635,97 @@ def build_transition_coupled_from_spec( emission_variant_indices_array, emission_control_maps_arrays, ) + + +def build_nonergodic_process_from_spec( + components: Sequence[dict[str, Any]], + component_weights: Sequence[float], + vocab_maps: Sequence[Sequence[int]] | None = None, + device: str | None = None, +) -> NonErgodicGenerativeProcess: + """Build a nonergodic process from component specifications. + + Creates a NonErgodicGenerativeProcess that composes multiple GenerativeProcess + instances into a truly nonergodic mixture with block diagonal structure. + + Args: + components: List of component specs. Each spec has: + - component_type: "hmm", "ghmm", or "factored" + - For hmm/ghmm: process_name, process_params + - For factored: structure_type, spec, and structure-specific params + - vocab_map: Optional per-component vocab mapping + component_weights: Mixture weights for components (will be normalized). + vocab_maps: Optional global vocab maps (overrides per-component). + device: Device placement. + + Returns: + NonErgodicGenerativeProcess instance. + + Example: + ```yaml + instance: + _target_: simplexity.generative_processes.builder.build_nonergodic_process_from_spec + components: + - component_type: hmm + process_name: mess3 + process_params: {x: 0.15, a: 0.6} + - component_type: ghmm + process_name: tom_quantum + process_params: {alpha: 1.0, beta: 4.0} + - component_type: factored + structure_type: independent + spec: + - component_type: hmm + variants: + - process_name: coin + process_params: {p: 0.5} + component_weights: [0.5, 0.3, 0.2] + vocab_maps: + - [0, 1, 2] + - [0, 1, 2] + - [0, 1] + ``` + + Raises: + ValueError: If component_type is unknown. + """ + built_components = [] + inferred_vocab_maps = [] + + for comp_spec in components: + comp_type = comp_spec.get("component_type", "hmm") + + if comp_type == "hmm": + process = build_hidden_markov_model( + process_name=comp_spec["process_name"], + process_params=comp_spec.get("process_params", {}), + device=device, + ) + elif comp_type == "ghmm": + process = build_generalized_hidden_markov_model( + process_name=comp_spec["process_name"], + process_params=comp_spec.get("process_params", {}), + device=device, + ) + elif comp_type == "factored": + # Extract factored-specific params + factored_kwargs = {k: v for k, v in comp_spec.items() if k not in ("component_type", "vocab_map")} + process = build_factored_process_from_spec(**factored_kwargs) + else: + raise ValueError(f"Unknown component_type: {comp_type}") + + built_components.append(process) + + # Infer vocab map if not provided globally + if vocab_maps is None: + comp_vocab_map = comp_spec.get("vocab_map", list(range(process.vocab_size))) + inferred_vocab_maps.append(comp_vocab_map) + + final_vocab_maps = vocab_maps if vocab_maps is not None else inferred_vocab_maps + + return NonErgodicGenerativeProcess( + components=built_components, + component_weights=component_weights, + vocab_maps=final_vocab_maps, + device=device, + ) diff --git a/simplexity/generative_processes/generator.py b/simplexity/generative_processes/generator.py index 15c0ddf1..6a7fcc14 100644 --- a/simplexity/generative_processes/generator.py +++ b/simplexity/generative_processes/generator.py @@ -16,6 +16,7 @@ import jax.numpy as jnp from simplexity.generative_processes.generative_process import GenerativeProcess +from simplexity.generative_processes.nonergodic_generative_process import NonErgodicState @eqx.filter_jit @@ -77,16 +78,10 @@ def generate_data_batch_with_full_history( if bos_token is None: # Drop first belief state since it's the initial state before any token - if isinstance(belief_states, tuple): - belief_states = tuple(b[:, 1:, ...] for b in belief_states) - else: - belief_states = belief_states[:, 1:, ...] + belief_states = _slice_belief_states(belief_states, slice(1, None)) input_len = inputs.shape[1] - if isinstance(belief_states, tuple): - belief_states = tuple(b[:, :input_len, ...] for b in belief_states) - else: - belief_states = belief_states[:, :input_len, ...] + belief_states = _slice_belief_states(belief_states, slice(None, input_len)) result = { "belief_states": belief_states, @@ -98,6 +93,30 @@ def generate_data_batch_with_full_history( return result +def _slice_belief_states( + belief_states: jax.Array | tuple[jax.Array, ...] | NonErgodicState, + seq_slice: slice, +) -> jax.Array | tuple[jax.Array, ...] | NonErgodicState: + """Slice belief states along the sequence dimension (axis 1). + + Handles different state representations: + - Plain array: slice directly + - Tuple of arrays: slice each element + - NonErgodicState: slice component_beliefs, keep component_states as-is + """ + if isinstance(belief_states, NonErgodicState): + # For NonErgodicState, slice component_beliefs trajectory + # component_states are final states, not trajectories, so don't slice + return NonErgodicState( + component_beliefs=belief_states.component_beliefs[:, seq_slice, ...], + component_states=belief_states.component_states, + ) + elif isinstance(belief_states, tuple): + return tuple(b[:, seq_slice, ...] for b in belief_states) + else: + return belief_states[:, seq_slice, ...] + + def _compute_prefix_probabilities( data_generator: GenerativeProcess, initial_states: jax.Array | tuple[jax.Array, ...], diff --git a/simplexity/generative_processes/nonergodic_generative_process.py b/simplexity/generative_processes/nonergodic_generative_process.py new file mode 100644 index 00000000..8fa8a03a --- /dev/null +++ b/simplexity/generative_processes/nonergodic_generative_process.py @@ -0,0 +1,540 @@ +"""Nonergodic generative process that composes multiple GenerativeProcess components.""" + +from __future__ import annotations + +from collections.abc import Sequence +from functools import partial +from typing import Any, NamedTuple + +import chex +import equinox as eqx +import jax +import jax.numpy as jnp + +from simplexity.generative_processes.generative_process import GenerativeProcess +from simplexity.utils.jnp_utils import resolve_jax_device + +# Type alias for component states: either a flat array or tuple of arrays (FactoredState) +ComponentState = jax.Array | tuple[jax.Array, ...] + + +# ----------------------------------------------------------------------------- +# Helper functions for handling heterogeneous component state types. +# +# The generate() method uses jax.lax.switch to select which component to use. +# This requires all branches to return the same shape, but components may have +# different state types: +# - HMM/GHMM: flat jax.Array of shape [state_dim] +# - FactoredGenerativeProcess: tuple of arrays (FactoredState) +# +# To handle this uniformly, we: +# 1. Flatten each state to a 1D array (preserving total element count) +# 2. Pad to a common max size for switch compatibility +# 3. After processing, unpad and unflatten back to original structure +# +# This lets us work with uniform arrays inside jax.lax.switch while components +# still receive their native state structures. +# ----------------------------------------------------------------------------- + + +def _get_flat_size(state: ComponentState) -> int: + """Get total number of elements in a component state. + + Args: + state: Either a flat jax.Array or a tuple of arrays (FactoredState) + + Returns: + Total element count across all arrays in the state + """ + if isinstance(state, tuple): + return sum(arr.size for arr in state) + return state.size + + +def _flatten_state(state: ComponentState) -> jax.Array: + """Flatten a component state to a 1D array. + + Args: + state: Either a flat jax.Array or a tuple of arrays (FactoredState) + + Returns: + 1D array containing all elements from the state + """ + if isinstance(state, tuple): + return jnp.concatenate([arr.ravel() for arr in state]) + return state.ravel() + + +def _unflatten_state(flat: jax.Array, template: ComponentState) -> ComponentState: + """Restore original state structure from a flattened 1D array. + + Uses the template to determine: + - For flat arrays: the target shape + - For tuples: the number of arrays, each array's shape, and split points + + Args: + flat: 1D array containing state data + template: Original state (used only for shape/structure, not values) + + Returns: + State with same structure as template, populated with data from flat + + Note: + Uses dynamic_slice instead of split to avoid ConcretizationTypeError + inside jax.lax.switch. The template shapes are concrete (known at trace + time), so we can compute offsets as Python ints. + """ + if isinstance(template, tuple): + # Extract parts using dynamic_slice with concrete offsets and sizes + # This avoids jnp.split which requires concrete split indices + offset = 0 + parts = [] + for t in template: + part = jax.lax.dynamic_slice(flat, (offset,), (t.size,)) + parts.append(part.reshape(t.shape)) + offset += t.size + return tuple(parts) + return flat.reshape(template.shape) + + +class NonErgodicState(NamedTuple): + """State for nonergodic generative process. + + Attributes: + component_beliefs: P(component_i | observations_so_far), shape [num_components]. + Sums to 1. For generation, becomes one-hot after first emission. + component_states: Tuple of per-component state arrays. Each element has the + shape expected by that component's GenerativeProcess. + """ + + component_beliefs: jax.Array + component_states: tuple[Any, ...] + + +class NonErgodicGenerativeProcess(GenerativeProcess[NonErgodicState]): + """A nonergodic mixture of generative processes. + + Composes multiple GenerativeProcess instances into a block diagonal structure + where no transitions occur between components. The process maintains belief + over which component generated the sequence, updated via Bayes rule. + + Key efficiency: Does NOT materialize a full block diagonal matrix. Instead, + it stores component processes directly and updates only the relevant beliefs. + + For generation: A single component is sampled at the start of each sequence + based on component_weights, and all observations come from that component. + + For inference: Beliefs are tracked across all components via Bayesian filtering. + + Attributes: + components: Tuple of component GenerativeProcess instances. + component_weights: Initial mixture weights (normalized to sum to 1). + vocab_maps: Per-component mapping from local vocab to global vocab. + _vocab_size: Unified vocabulary size across all components. + _inverse_vocab_maps: Per-component mapping from global vocab to local vocab. + device: JAX device for arrays. + """ + + components: tuple[GenerativeProcess, ...] + component_weights: jax.Array + vocab_maps: tuple[jax.Array, ...] + _vocab_size: int + _inverse_vocab_maps: tuple[jax.Array, ...] + device: jax.Device # type: ignore[valid-type] + + def __init__( + self, + components: Sequence[GenerativeProcess], + component_weights: jax.Array | Sequence[float], + vocab_maps: Sequence[Sequence[int]] | None = None, + device: str | None = None, + ) -> None: + """Initialize nonergodic generative process. + + Args: + components: Sequence of GenerativeProcess instances to compose. + component_weights: Initial mixture weights. Will be normalized to sum to 1. + vocab_maps: Optional per-component vocab mappings. vocab_maps[i] maps + component i's local token indices to global token indices. + If None, assumes all components share the same vocab [0, 1, ..., V-1]. + device: Device to place arrays on (e.g., "cpu", "gpu"). + + Raises: + ValueError: If components is empty or weights don't match component count. + """ + if len(components) == 0: + raise ValueError("Must provide at least one component") + + self.device = resolve_jax_device(device) + self.components = tuple(components) + + # Normalize weights + weights = jnp.array(component_weights) + if weights.shape[0] != len(components): + raise ValueError( + f"Number of weights ({weights.shape[0]}) must match number of components ({len(components)})" + ) + if jnp.any(weights < 0): + raise ValueError("Component weights must be non-negative") + self.component_weights = weights / jnp.sum(weights) + self.component_weights = jax.device_put(self.component_weights, self.device) + + # Set up vocab maps + if vocab_maps is None: + # Default: each component uses its natural vocab [0, 1, ..., V_i-1] + vocab_maps = [list(range(c.vocab_size)) for c in components] + + self.vocab_maps = tuple(jax.device_put(jnp.array(vm, dtype=jnp.int32), self.device) for vm in vocab_maps) + + # Compute global vocab size + self._vocab_size = max(max(vm) for vm in vocab_maps) + 1 + + # Build inverse vocab maps (global -> local, -1 if not mapped) + inverse_maps = [] + for vm in vocab_maps: + inv = jnp.full((self._vocab_size,), -1, dtype=jnp.int32) + for local_idx, global_idx in enumerate(vm): + inv = inv.at[global_idx].set(local_idx) + inverse_maps.append(jax.device_put(inv, self.device)) + self._inverse_vocab_maps = tuple(inverse_maps) + + @property + def vocab_size(self) -> int: + """Unified vocabulary size across all components.""" + return self._vocab_size + + @property + def initial_state(self) -> NonErgodicState: + """Initial state with component weights and per-component initial states.""" + return NonErgodicState( + component_beliefs=self.component_weights, + component_states=tuple(c.initial_state for c in self.components), + ) + + @eqx.filter_jit + def observation_probability_distribution(self, state: NonErgodicState) -> jax.Array: + """Compute P(global_obs | state) as weighted sum over components. + + For each global observation token: + P(obs | state) = sum_i P(component_i | state) * P(obs | component_i, state_i) + + Where P(obs | component_i, state_i) is computed by: + 1. Getting the probability from component i's distribution + 2. Mapping to global vocab via vocab_map + 3. Returning 0 if the global obs is not in component i's vocab + """ + global_dist = jnp.zeros(self._vocab_size) + + for i, (component, vm) in enumerate(zip(self.components, self.vocab_maps, strict=False)): + comp_state = state.component_states[i] + # Get component's distribution over its local vocab + local_dist = component.observation_probability_distribution(comp_state) + # Scatter to global vocab positions + component_contrib = jnp.zeros(self._vocab_size).at[vm].add(local_dist) + global_dist = global_dist + state.component_beliefs[i] * component_contrib + + return global_dist + + @eqx.filter_jit + def log_observation_probability_distribution(self, log_belief_state: NonErgodicState) -> jax.Array: + """Compute log P(global_obs | state). + + Note: This expects log_belief_state with log-space component_beliefs and component_states. + """ + log_probs = [] + + for i, (component, vm) in enumerate(zip(self.components, self.vocab_maps, strict=False)): + comp_log_state = log_belief_state.component_states[i] + comp_log_belief = log_belief_state.component_beliefs[i] + + local_log_dist = component.log_observation_probability_distribution(comp_log_state) + # Create global distribution with -inf for unmapped tokens + global_log_dist = jnp.full(self._vocab_size, -jnp.inf) + global_log_dist = global_log_dist.at[vm].set(local_log_dist) + # Weight by component belief (in log space: add) + log_probs.append(comp_log_belief + global_log_dist) + + # Combine via logsumexp across components + log_probs_stacked = jnp.stack(log_probs, axis=0) + return jax.nn.logsumexp(log_probs_stacked, axis=0) + + @eqx.filter_jit + def emit_observation(self, state: NonErgodicState, key: chex.PRNGKey) -> chex.Array: + """Emit an observation by sampling from the mixture distribution. + + First samples a component based on component_beliefs, then emits from + that component and maps to global vocab. + """ + key1, key2 = jax.random.split(key) + + # Sample component based on current beliefs + component_idx = jax.random.categorical(key1, jnp.log(state.component_beliefs)) + + # Emit from chosen component using switch for JIT compatibility + def emit_from_component(i: int, k: chex.PRNGKey) -> chex.Array: + comp_state = state.component_states[i] + local_obs = self.components[i].emit_observation(comp_state, k) + return self.vocab_maps[i][local_obs] + + global_obs = jax.lax.switch( + component_idx, + [partial(emit_from_component, i) for i in range(len(self.components))], + key2, + ) + + return global_obs + + @eqx.filter_jit + def transition_states(self, state: NonErgodicState, obs: chex.Array) -> NonErgodicState: + """Update state given observation using Bayesian filtering. + + 1. Compute P(obs | component_i) for each component using current states + 2. Update component_beliefs using Bayes rule + 3. Update each component's internal state independently + """ + new_component_states = [] + likelihoods = [] + + for i, (component, inv_map) in enumerate(zip(self.components, self._inverse_vocab_maps, strict=False)): + comp_state = state.component_states[i] + local_obs = inv_map[obs] + + # Get observation probability from this component + local_dist = component.observation_probability_distribution(comp_state) + + # Likelihood is 0 if obs not in this component's vocab + likelihood = jnp.where( + local_obs >= 0, + local_dist[jnp.clip(local_obs, 0, local_dist.shape[0] - 1)], + 0.0, + ) + likelihoods.append(likelihood) + + # Update component's internal state + # Only update if the observation was possible for this component + # (likelihood > 0 checks both vocab membership AND state feasibility) + # Bind component via default arg to avoid B023 (late binding in loop) + new_comp_state = jax.lax.cond( + likelihood > 0, + lambda s, lo, c=component: c.transition_states(s, lo), + lambda s, lo, c=None: s, + comp_state, + local_obs, + ) + new_component_states.append(new_comp_state) + + # Bayes update for component beliefs + likelihoods_arr = jnp.array(likelihoods) + unnorm_beliefs = state.component_beliefs * likelihoods_arr + # Handle case where all likelihoods are 0 (shouldn't happen with valid obs) + normalizer = jnp.sum(unnorm_beliefs) + new_beliefs = jnp.where( + normalizer > 0, + unnorm_beliefs / normalizer, + state.component_beliefs, # Keep old beliefs if normalizer is 0 + ) + + return NonErgodicState( + component_beliefs=new_beliefs, + component_states=tuple(new_component_states), + ) + + @eqx.filter_jit + def probability(self, observations: jax.Array) -> jax.Array: + """Compute P(observations) by marginalizing over components. + + P(obs_1:T) = sum_i P(component_i) * P(obs_1:T | component_i) + """ + + def compute_component_prob(i: int) -> jax.Array: + component = self.components[i] + inv_map = self._inverse_vocab_maps[i] + # Map global observations to local + local_obs = inv_map[observations] + # Check if all observations are valid for this component + all_valid = jnp.all(local_obs >= 0) + # Compute probability (0 if any obs invalid) + prob = jax.lax.cond( + all_valid, + lambda lo: component.probability(lo), + lambda lo: jnp.array(0.0), + local_obs, + ) + return self.component_weights[i] * prob + + # Sum over components + total_prob = jnp.array(0.0) + for i in range(len(self.components)): + total_prob = total_prob + compute_component_prob(i) + + return total_prob + + @eqx.filter_jit + def log_probability(self, observations: jax.Array) -> jax.Array: + """Compute log P(observations) using logsumexp for numerical stability.""" + + def compute_component_log_prob(i: int) -> jax.Array: + component = self.components[i] + inv_map = self._inverse_vocab_maps[i] + # Map global observations to local + local_obs = inv_map[observations] + # Check if all observations are valid for this component + all_valid = jnp.all(local_obs >= 0) + # Compute log probability (-inf if any obs invalid) + log_prob = jax.lax.cond( + all_valid, + lambda lo: component.log_probability(lo), + lambda lo: jnp.array(-jnp.inf), + local_obs, + ) + return jnp.log(self.component_weights[i]) + log_prob + + # Combine via logsumexp + log_probs = jnp.array([compute_component_log_prob(i) for i in range(len(self.components))]) + return jax.nn.logsumexp(log_probs) + + @eqx.filter_vmap(in_axes=(None, 0, 0, None, None)) + def generate( + self, + state: NonErgodicState, + key: chex.PRNGKey, + sequence_len: int, + return_all_states: bool, + ) -> tuple[NonErgodicState, chex.Array]: + """Generate a sequence from a single sampled component. + + Unlike inference (which tracks beliefs across all components), generation + samples ONE component at the start and generates entirely from that component. + + This method is vmapped, so inside the function body we work with unbatched + (single-element) states and keys. We cannot call component.generate() here + because that method is also vmapped and expects batched inputs. Instead, + we implement generation directly using jax.lax.scan over emit_observation + and transition_states. + + Args: + state: Initial NonErgodicState with component_beliefs and component_states. + The batch dimension is handled by vmap. + key: Random key for this sequence. + sequence_len: Length of sequence to generate. + return_all_states: If True, return state trajectory at each timestep. + + Returns: + Tuple of (final_state or state_trajectory, observations). + States are NonErgodicState. Observations are in global vocab space. + """ + key1, key2 = jax.random.split(key) + keys = jax.random.split(key2, sequence_len) + + # 1. Sample which component to use for this entire sequence + component_idx = jax.random.categorical(key1, jnp.log(state.component_beliefs)) + + # 2. Flatten and pad component states for jax.lax.switch compatibility + # + # jax.lax.switch requires all branches to return arrays of identical shape. + # But components can have different state structures: + # - HMM/GHMM: flat array of shape [state_dim] + # - FactoredGenerativeProcess: tuple of arrays (FactoredState) + # + # Our approach: + # a) Save original state structures as templates (for unflattening later) + # b) Flatten each state to 1D (handles both arrays and tuples) + # c) Pad to a common max size + # d) Inside switch: unpad + unflatten → call component → flatten + repad + # e) After scan: unpad + unflatten to restore native structures + # + # Components never see the padding - they work with their native shapes. + num_components = len(self.components) + + # Store original structures as templates for restoring after processing + state_templates = state.component_states + flat_sizes = [_get_flat_size(s) for s in state_templates] + max_flat_size = max(flat_sizes) + + def flatten_and_pad(s: ComponentState) -> jax.Array: + """Convert any state to padded 1D array for uniform shape in switch.""" + flat = _flatten_state(s) + return jnp.pad(flat, (0, max_flat_size - flat.size)) + + def unpad_and_unflatten(padded: jax.Array, original_size: int, template: ComponentState) -> ComponentState: + """Restore original state structure from padded 1D array.""" + flat = padded[:original_size] + return _unflatten_state(flat, template) + + padded_states = tuple(flatten_and_pad(s) for s in state.component_states) + + # 3. Generate sequence using scan, selecting component via switch at each step + def gen_step_for_component( + i: int, padded_state: jax.Array, step_key: chex.PRNGKey + ) -> tuple[jax.Array, chex.Array]: + """Generate one observation from component i and update its state.""" + # Unpad and unflatten to get native state structure + real_state = unpad_and_unflatten(padded_state, flat_sizes[i], state_templates[i]) + # Call component's methods with native state shapes + local_obs = self.components[i].emit_observation(real_state, step_key) + new_real_state = self.components[i].transition_states(real_state, local_obs) + # Flatten and repad for uniform shape in switch + new_padded_state = flatten_and_pad(new_real_state) + global_obs = self.vocab_maps[i][local_obs] + return new_padded_state, global_obs + + def scan_step( + carry: tuple[jax.Array, tuple[jax.Array, ...]], step_key: chex.PRNGKey + ) -> tuple[tuple[jax.Array, tuple[jax.Array, ...]], chex.Array]: + """One generation step: emit from active component, update its state.""" + idx, padded_comp_states = carry + + # Use switch to call the correct component's emit/transition + def gen_from_i(i: int) -> tuple[jax.Array, chex.Array]: + return gen_step_for_component(i, padded_comp_states[i], step_key) + + new_padded_state, global_obs = jax.lax.switch( + idx, + [partial(gen_from_i, i) for i in range(num_components)], + ) + + # Update only the active component's state, keep others unchanged + # Now all states have uniform shape, so cond works + new_padded_comp_states = tuple( + jax.lax.cond( + idx == i, + lambda ns=new_padded_state: ns, + lambda ps=padded_comp_states[i]: ps, + ) + for i in range(num_components) + ) + + return (idx, new_padded_comp_states), global_obs + + # Run the scan over all timesteps with padded states + init_carry = (component_idx, padded_states) + (_, final_padded_states), observations = jax.lax.scan(scan_step, init_carry, keys) + + # 4. Unpad and unflatten final states back to native structures + final_comp_states = tuple( + unpad_and_unflatten(final_padded_states[i], flat_sizes[i], state_templates[i]) + for i in range(num_components) + ) + + # 5. Construct final state with one-hot beliefs + one_hot_beliefs = jax.nn.one_hot(component_idx, len(self.components), dtype=self.component_weights.dtype) + + if return_all_states: + # Run inference pass to compute state trajectory + # This simulates what an observer would compute via Bayesian filtering + # on the generated sequence, starting from the original initial state. + def inference_step( + carry_state: NonErgodicState, obs: chex.Array + ) -> tuple[NonErgodicState, NonErgodicState]: + new_state = self.transition_states(carry_state, obs) + return new_state, new_state + + # Use original initial state for inference (not modified by generation) + _, state_trajectory = jax.lax.scan(inference_step, state, observations) + + return state_trajectory, observations + else: + return NonErgodicState( + component_beliefs=one_hot_beliefs, + component_states=final_comp_states, + ), observations diff --git a/tests/end_to_end/configs/generative_process/nonergodic_example.yaml b/tests/end_to_end/configs/generative_process/nonergodic_example.yaml new file mode 100644 index 00000000..2d050fa7 --- /dev/null +++ b/tests/end_to_end/configs/generative_process/nonergodic_example.yaml @@ -0,0 +1,39 @@ +# Nonergodic Generative Process Example +# A mixture of independent generative processes with block diagonal structure. +# No transitions occur between components - the process "picks" a component +# at the start and stays with it forever. + +name: nonergodic_example +base_vocab_size: ??? # Will auto-infer as 3 +vocab_size: ??? + +instance: + _target_: simplexity.generative_processes.builder.build_nonergodic_process_from_spec + + components: + # Component 0: mess3 HMM with specific parameters + - component_type: hmm + process_name: mess3 + process_params: + x: 0.15 + a: 0.6 + + # Component 1: mess3 HMM with different parameters + - component_type: hmm + process_name: mess3 + process_params: + x: 0.5 + a: 0.6 + + # Initial mixture weights (will be normalized) + # 60% chance of starting in component 0, 40% in component 1 + component_weights: [0.6, 0.4] + +bos_token: ??? +eos_token: null + +# Interpretation: +# - With probability 0.6, sequences come from mess3(x=0.15, a=0.6) +# - With probability 0.4, sequences come from mess3(x=0.5, a=0.6) +# - Once a component is "chosen" by the initial state, all future observations +# come from that component (truly nonergodic - no mixing/switching) diff --git a/tests/generative_processes/test_nonergodic_generative_process.py b/tests/generative_processes/test_nonergodic_generative_process.py new file mode 100644 index 00000000..747662d5 --- /dev/null +++ b/tests/generative_processes/test_nonergodic_generative_process.py @@ -0,0 +1,332 @@ +"""Tests for NonErgodicGenerativeProcess.""" + +# pylint: disable-all +# Temporarily disable all pylint checkers during AST traversal to prevent crash. +# The imports checker crashes when resolving simplexity package imports due to a bug +# in pylint/astroid: https://github.com/pylint-dev/pylint/issues/10185 +# pylint: enable=all +# Re-enable all pylint checkers for the checking phase. This allows other checks +# (code quality, style, undefined names, etc.) to run normally while bypassing +# the problematic imports checker that would crash during AST traversal. + +import chex +import jax +import jax.numpy as jnp +import pytest + +from simplexity.generative_processes.builder import ( + build_generalized_hidden_markov_model, + build_hidden_markov_model, + build_nonergodic_process_from_spec, +) +from simplexity.generative_processes.nonergodic_generative_process import ( + NonErgodicGenerativeProcess, + NonErgodicState, +) + + +class TestNonErgodicState: + """Tests for NonErgodicState structure.""" + + def test_state_is_named_tuple(self): + """NonErgodicState should be a NamedTuple with named fields.""" + state = NonErgodicState( + component_beliefs=jnp.array([0.5, 0.5]), + component_states=(jnp.array([1.0, 0.0]), jnp.array([0.5, 0.5])), + ) + assert hasattr(state, "component_beliefs") + assert hasattr(state, "component_states") + assert isinstance(state, tuple) + + def test_state_is_pytree_compatible(self): + """NonErgodicState should be compatible with JAX pytree operations.""" + state = NonErgodicState( + component_beliefs=jnp.array([0.5, 0.5]), + component_states=(jnp.array([1.0, 0.0]), jnp.array([0.5, 0.5])), + ) + # Should work with tree_map + doubled = jax.tree_util.tree_map(lambda x: x * 2, state) + chex.assert_trees_all_close(doubled.component_beliefs, jnp.array([1.0, 1.0])) + + +class TestNonErgodicGenerativeProcess: + """Tests for NonErgodicGenerativeProcess class.""" + + @pytest.fixture + def two_coin_process(self): + """Two biased coins as a nonergodic mixture.""" + coin1 = build_hidden_markov_model("coin", {"p": 0.7}) + coin2 = build_hidden_markov_model("coin", {"p": 0.3}) + return NonErgodicGenerativeProcess( + components=[coin1, coin2], + component_weights=[0.6, 0.4], + ) + + def test_vocab_size_inferred_correctly(self, two_coin_process): + """Vocab size should be max of component vocab sizes.""" + assert two_coin_process.vocab_size == 2 + + def test_initial_state_has_correct_structure(self, two_coin_process): + """Initial state should have component beliefs and per-component states.""" + state = two_coin_process.initial_state + assert isinstance(state, NonErgodicState) + chex.assert_trees_all_close(state.component_beliefs, jnp.array([0.6, 0.4])) + assert len(state.component_states) == 2 + + def test_observation_distribution_is_mixture(self, two_coin_process): + """Observation dist should be weighted mixture of component dists.""" + state = two_coin_process.initial_state + dist = two_coin_process.observation_probability_distribution(state) + + # Expected: 0.6 * [0.7, 0.3] + 0.4 * [0.3, 0.7] = [0.54, 0.46] + expected = jnp.array([0.54, 0.46]) + chex.assert_trees_all_close(dist, expected, atol=1e-6) + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) + + def test_transition_updates_beliefs_correctly(self, two_coin_process): + """Observing a token should update component beliefs via Bayes rule.""" + state = two_coin_process.initial_state + + # Observe token 0 (heads) + new_state = two_coin_process.transition_states(state, jnp.array(0)) + + # Bayes update: P(comp | obs=0) proportional to P(obs=0 | comp) * P(comp) + # P(comp0 | obs=0) proportional to 0.7 * 0.6 = 0.42 + # P(comp1 | obs=0) proportional to 0.3 * 0.4 = 0.12 + # Normalized: [0.42, 0.12] / 0.54 = [0.778, 0.222] + expected_beliefs = jnp.array([0.42, 0.12]) + expected_beliefs = expected_beliefs / jnp.sum(expected_beliefs) + chex.assert_trees_all_close(new_state.component_beliefs, expected_beliefs, atol=1e-5) + + def test_probability_equals_mixture_probability(self, two_coin_process): + """P(sequence) should equal weighted sum of component probabilities.""" + observations = jnp.array([0, 0, 1]) # HHT + + prob = two_coin_process.probability(observations) + + # Manual calculation: + # P(HHT | coin1) = 0.7 * 0.7 * 0.3 = 0.147 + # P(HHT | coin2) = 0.3 * 0.3 * 0.7 = 0.063 + # P(HHT) = 0.6 * 0.147 + 0.4 * 0.063 = 0.0882 + 0.0252 = 0.1134 + expected = 0.6 * 0.147 + 0.4 * 0.063 + chex.assert_trees_all_close(prob, expected, atol=1e-6) + + def test_log_probability_consistent_with_probability(self, two_coin_process): + """log_probability should equal log of probability.""" + observations = jnp.array([0, 1, 0, 1]) + + prob = two_coin_process.probability(observations) + log_prob = two_coin_process.log_probability(observations) + + chex.assert_trees_all_close(log_prob, jnp.log(prob), atol=1e-5) + + def test_generate_produces_valid_sequences(self, two_coin_process): + """generate should produce sequences within vocab range.""" + state = two_coin_process.initial_state + # Batch the state + batch_size = 4 + batch_states = NonErgodicState( + component_beliefs=jnp.broadcast_to(state.component_beliefs, (batch_size,) + state.component_beliefs.shape), + component_states=tuple(jnp.broadcast_to(s, (batch_size,) + s.shape) for s in state.component_states), + ) + keys = jax.random.split(jax.random.PRNGKey(0), batch_size) + + final_states, observations = two_coin_process.generate(batch_states, keys, 10, False) + + assert observations.shape == (batch_size, 10) + assert jnp.all(observations >= 0) + assert jnp.all(observations < two_coin_process.vocab_size) + + def test_emit_observation_within_vocab(self, two_coin_process): + """emit_observation should return valid tokens.""" + state = two_coin_process.initial_state + key = jax.random.PRNGKey(42) + + obs = two_coin_process.emit_observation(state, key) + + assert obs.shape == () + assert 0 <= int(obs) < two_coin_process.vocab_size + + +class TestVocabMaps: + """Tests for vocabulary mapping functionality.""" + + def test_different_vocab_maps_work(self): + """Components with different vocab maps should be handled correctly.""" + coin1 = build_hidden_markov_model("coin", {"p": 0.7}) + coin2 = build_hidden_markov_model("coin", {"p": 0.3}) + + process = NonErgodicGenerativeProcess( + components=[coin1, coin2], + component_weights=[0.5, 0.5], + vocab_maps=[[0, 1], [0, 2]], # coin2 maps to tokens 0, 2 + ) + + assert process.vocab_size == 3 # tokens 0, 1, 2 + + state = process.initial_state + dist = process.observation_probability_distribution(state) + + # Token 0: both components can emit (0.5 * 0.7 + 0.5 * 0.3 = 0.5) + # Token 1: only component 0 (0.5 * 0.3 = 0.15) + # Token 2: only component 1 (0.5 * 0.7 = 0.35) + expected = jnp.array([0.5, 0.15, 0.35]) + chex.assert_trees_all_close(dist, expected, atol=1e-6) + + def test_unmapped_tokens_have_zero_probability(self): + """Tokens not in a component's vocab should contribute zero from that component.""" + coin = build_hidden_markov_model("coin", {"p": 0.5}) + + process = NonErgodicGenerativeProcess( + components=[coin], + component_weights=[1.0], + vocab_maps=[[0, 2]], # Component uses tokens 0, 2; token 1 is unmapped + ) + + state = process.initial_state + dist = process.observation_probability_distribution(state) + + assert process.vocab_size == 3 + assert dist[1] == 0.0 # Token 1 has zero probability + + +class TestMixedComponentTypes: + """Tests for mixing different GenerativeProcess types.""" + + def test_hmm_and_ghmm_mixture(self): + """Should handle mixing HMM and GHMM components.""" + hmm = build_hidden_markov_model("even_ones", {"p": 0.5}) + ghmm = build_generalized_hidden_markov_model("tom_quantum", {"alpha": 1.0, "beta": 1.0}) + + process = NonErgodicGenerativeProcess( + components=[hmm, ghmm], + component_weights=[0.7, 0.3], + ) + + state = process.initial_state + dist = process.observation_probability_distribution(state) + + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) + assert jnp.all(dist >= 0) + + +class TestBuilder: + """Tests for build_nonergodic_process_from_spec.""" + + def test_build_from_hmm_specs(self): + """Should build process from HMM specifications.""" + process = build_nonergodic_process_from_spec( + components=[ + { + "component_type": "hmm", + "process_name": "coin", + "process_params": {"p": 0.6}, + }, + { + "component_type": "hmm", + "process_name": "coin", + "process_params": {"p": 0.4}, + }, + ], + component_weights=[0.5, 0.5], + ) + + assert isinstance(process, NonErgodicGenerativeProcess) + assert len(process.components) == 2 + assert process.vocab_size == 2 + + def test_build_from_ghmm_specs(self): + """Should build process from GHMM specifications.""" + process = build_nonergodic_process_from_spec( + components=[ + { + "component_type": "ghmm", + "process_name": "tom_quantum", + "process_params": {"alpha": 1.0, "beta": 1.0}, + }, + ], + component_weights=[1.0], + ) + + assert isinstance(process, NonErgodicGenerativeProcess) + assert len(process.components) == 1 + + def test_build_with_vocab_maps(self): + """Should respect vocab_maps in spec.""" + process = build_nonergodic_process_from_spec( + components=[ + { + "component_type": "hmm", + "process_name": "coin", + "process_params": {"p": 0.5}, + }, + { + "component_type": "hmm", + "process_name": "coin", + "process_params": {"p": 0.5}, + }, + ], + component_weights=[0.5, 0.5], + vocab_maps=[[0, 1], [0, 2]], + ) + + assert process.vocab_size == 3 + + def test_invalid_component_type_raises(self): + """Should raise for unknown component type.""" + with pytest.raises(ValueError, match="Unknown component_type"): + build_nonergodic_process_from_spec( + components=[{"component_type": "invalid", "process_name": "coin"}], + component_weights=[1.0], + ) + + +class TestEdgeCases: + """Tests for edge cases and error handling.""" + + def test_single_component_degenerates_to_component(self): + """Single-component process should behave like the component.""" + coin = build_hidden_markov_model("coin", {"p": 0.7}) + + process = NonErgodicGenerativeProcess( + components=[coin], + component_weights=[1.0], + ) + + observations = jnp.array([0, 1, 0]) + + process_prob = process.probability(observations) + coin_prob = coin.probability(observations) + + chex.assert_trees_all_close(process_prob, coin_prob, atol=1e-6) + + def test_weights_are_normalized(self): + """Component weights should be normalized to sum to 1.""" + coin1 = build_hidden_markov_model("coin", {"p": 0.7}) + coin2 = build_hidden_markov_model("coin", {"p": 0.3}) + + # Provide unnormalized weights + process = NonErgodicGenerativeProcess( + components=[coin1, coin2], + component_weights=[2.0, 3.0], # Sum to 5, not 1 + ) + + chex.assert_trees_all_close(process.component_weights, jnp.array([0.4, 0.6]), atol=1e-6) + + def test_empty_components_raises(self): + """Should raise for empty component list.""" + with pytest.raises(ValueError, match="at least one component"): + NonErgodicGenerativeProcess( + components=[], + component_weights=[], + ) + + def test_mismatched_weights_raises(self): + """Should raise if weights don't match component count.""" + coin = build_hidden_markov_model("coin", {"p": 0.5}) + + with pytest.raises(ValueError, match="must match"): + NonErgodicGenerativeProcess( + components=[coin, coin], + component_weights=[1.0], # Only 1 weight for 2 components + ) From b4abda3d9faec8f77f391d1ed4c046a3daf8bf9e Mon Sep 17 00:00:00 2001 From: Kyle Ray Date: Fri, 13 Feb 2026 19:40:59 +0000 Subject: [PATCH 21/35] Fix off-by-one in NonErgodic generate inference pass The inference scan returned posterior states (after seeing each observation) instead of prior states (before seeing each observation), misaligning with the base GenerativeProcess.generate contract. Co-Authored-By: Claude Opus 4.6 --- .../generative_processes/nonergodic_generative_process.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simplexity/generative_processes/nonergodic_generative_process.py b/simplexity/generative_processes/nonergodic_generative_process.py index 8fa8a03a..7a601522 100644 --- a/simplexity/generative_processes/nonergodic_generative_process.py +++ b/simplexity/generative_processes/nonergodic_generative_process.py @@ -527,7 +527,7 @@ def inference_step( carry_state: NonErgodicState, obs: chex.Array ) -> tuple[NonErgodicState, NonErgodicState]: new_state = self.transition_states(carry_state, obs) - return new_state, new_state + return new_state, carry_state # Use original initial state for inference (not modified by generation) _, state_trajectory = jax.lax.scan(inference_step, state, observations) From e8918edaae6f26ac02df990586f0bb81ad40a57e Mon Sep 17 00:00:00 2001 From: Kyle J Ray Date: Tue, 17 Feb 2026 15:39:54 -0800 Subject: [PATCH 22/35] Add InflatedVocabularyProcess for vocabulary inflation with uniform noise Wraps any GenerativeProcess by adding a stateless uniform noise dimension, multiplying vocab size by K. Each base token t becomes K inflated tokens, testing whether models can discover which part of the token carries state info. Optimal loss increases by exactly log(K) nats. Co-Authored-By: Claude Opus 4.6 --- simplexity/generative_processes/builder.py | 64 ++++ .../inflated_vocabulary_process.py | 103 ++++++ .../test_inflated_vocabulary_process.py | 349 ++++++++++++++++++ 3 files changed, 516 insertions(+) create mode 100644 simplexity/generative_processes/inflated_vocabulary_process.py create mode 100644 tests/generative_processes/test_inflated_vocabulary_process.py diff --git a/simplexity/generative_processes/builder.py b/simplexity/generative_processes/builder.py index 4975acd3..c24d92ca 100644 --- a/simplexity/generative_processes/builder.py +++ b/simplexity/generative_processes/builder.py @@ -18,7 +18,9 @@ from simplexity.generative_processes.factored_generative_process import ComponentType, FactoredGenerativeProcess from simplexity.generative_processes.generalized_hidden_markov_model import GeneralizedHiddenMarkovModel +from simplexity.generative_processes.generative_process import GenerativeProcess from simplexity.generative_processes.hidden_markov_model import HiddenMarkovModel +from simplexity.generative_processes.inflated_vocabulary_process import InflatedVocabularyProcess from simplexity.generative_processes.nonergodic_generative_process import NonErgodicGenerativeProcess from simplexity.generative_processes.structures import ( ConditionalTransitions, @@ -729,3 +731,65 @@ def build_nonergodic_process_from_spec( vocab_maps=final_vocab_maps, device=device, ) + + +def build_inflated_process( + base_process: GenerativeProcess, + inflation_factor: int, +) -> InflatedVocabularyProcess: + """Build an inflated vocabulary process wrapping a base process. + + Args: + base_process: Any GenerativeProcess to wrap. + inflation_factor: Number of noise variants per base token (K >= 2). + + Returns: + InflatedVocabularyProcess with vocab_size = K * base_process.vocab_size. + """ + return InflatedVocabularyProcess(base_process, inflation_factor) + + +def build_inflated_process_from_spec( + base_spec: dict[str, Any], + inflation_factor: int, + device: str | None = None, +) -> InflatedVocabularyProcess: + """Build an inflated vocabulary process from a base process specification. + + Args: + base_spec: Specification for the base process. Must include: + - component_type: "hmm", "ghmm", or "factored" + - For hmm/ghmm: process_name, process_params + - For factored: structure_type, spec, and structure-specific params + inflation_factor: Number of noise variants per base token (K >= 2). + device: Device placement. + + Returns: + InflatedVocabularyProcess wrapping the built base process. + + Raises: + ValueError: If component_type is unknown. + """ + comp_type = base_spec.get("component_type", "hmm") + + if comp_type == "hmm": + base_process: GenerativeProcess = build_hidden_markov_model( + process_name=base_spec["process_name"], + process_params=base_spec.get("process_params", {}), + device=device, + noise_epsilon=base_spec.get("noise_epsilon", 0.0), + ) + elif comp_type == "ghmm": + base_process = build_generalized_hidden_markov_model( + process_name=base_spec["process_name"], + process_params=base_spec.get("process_params", {}), + device=device, + noise_epsilon=base_spec.get("noise_epsilon", 0.0), + ) + elif comp_type == "factored": + factored_kwargs = {k: v for k, v in base_spec.items() if k not in ("component_type",)} + base_process = build_factored_process_from_spec(**factored_kwargs) + else: + raise ValueError(f"Unknown base component_type: {comp_type}") + + return InflatedVocabularyProcess(base_process, inflation_factor) diff --git a/simplexity/generative_processes/inflated_vocabulary_process.py b/simplexity/generative_processes/inflated_vocabulary_process.py new file mode 100644 index 00000000..abe54ade --- /dev/null +++ b/simplexity/generative_processes/inflated_vocabulary_process.py @@ -0,0 +1,103 @@ +"""Inflated vocabulary generative process wrapper. + +Wraps any GenerativeProcess by adding a uniform noise dimension to the vocabulary, +increasing vocab size by a multiplicative factor K. The noise dimension is stateless: +state dynamics depend only on the base token. + +Token encoding: inflated_token = noise_prefix * V_base + base_token +- base_token extraction: inflated_token % V_base +- noise_prefix extraction: inflated_token // V_base +""" + +from __future__ import annotations + +import chex +import equinox as eqx +import jax +import jax.numpy as jnp + +from simplexity.generative_processes.generative_process import GenerativeProcess + + +class InflatedVocabularyProcess[State](GenerativeProcess[State]): + """Wraps a GenerativeProcess by adding a uniform noise dimension to inflate the vocabulary. + + For a base process with vocab size V and inflation factor K: + - New vocab size is K * V + - inflated_token = noise_prefix * V + base_token + - P(inflated_token | state) = P(base_token | state) / K + - State dynamics only depend on base_token (noise is stateless) + + This increases optimal per-token loss by exactly log(K) nats. + + Args: + base_process: The generative process to wrap. + inflation_factor: Number of noise variants per base token (K >= 2). + """ + + base_process: GenerativeProcess[State] + inflation_factor: int + _base_vocab_size: int + _inflated_vocab_size: int + + def __init__( + self, + base_process: GenerativeProcess[State], + inflation_factor: int, + ) -> None: + if inflation_factor < 2: + raise ValueError(f"inflation_factor must be >= 2, got {inflation_factor}") + self.base_process = base_process + self.inflation_factor = inflation_factor + self._base_vocab_size = base_process.vocab_size + self._inflated_vocab_size = inflation_factor * base_process.vocab_size + + @property + def vocab_size(self) -> int: + """The number of inflated observations: K * base vocab size.""" + return self._inflated_vocab_size + + @property + def initial_state(self) -> State: + """The initial state, identical to the base process.""" + return self.base_process.initial_state + + @eqx.filter_jit + def emit_observation(self, state: State, key: chex.PRNGKey) -> chex.Array: + """Emit an inflated observation: sample base token then add uniform noise prefix.""" + k1, k2 = jax.random.split(key) + base_obs = self.base_process.emit_observation(state, k1) + noise_prefix = jax.random.randint(k2, (), 0, self.inflation_factor) + return noise_prefix * self._base_vocab_size + base_obs + + @eqx.filter_jit + def transition_states(self, state: State, obs: chex.Array) -> State: + """Update state using only the base token (noise prefix is discarded).""" + base_obs = jnp.mod(obs, self._base_vocab_size) + return self.base_process.transition_states(state, base_obs) + + @eqx.filter_jit + def observation_probability_distribution(self, state: State) -> jax.Array: + """Compute P(inflated_obs | state) = P(base_obs | state) / K for each noise variant.""" + base_dist = self.base_process.observation_probability_distribution(state) + return jnp.tile(base_dist / self.inflation_factor, self.inflation_factor) + + @eqx.filter_jit + def log_observation_probability_distribution(self, log_belief_state: State) -> jax.Array: + """Compute log P(inflated_obs | state) = log P(base_obs | state) - log(K).""" + base_log_dist = self.base_process.log_observation_probability_distribution(log_belief_state) + return jnp.tile(base_log_dist - jnp.log(self.inflation_factor), self.inflation_factor) + + @eqx.filter_jit + def probability(self, observations: jax.Array) -> jax.Array: + """Compute P(inflated_seq) = P(base_seq) * (1/K)^T.""" + base_obs = jnp.mod(observations, self._base_vocab_size) + base_prob = self.base_process.probability(base_obs) + return base_prob * (1.0 / self.inflation_factor) ** observations.shape[0] + + @eqx.filter_jit + def log_probability(self, observations: jax.Array) -> jax.Array: + """Compute log P(inflated_seq) = log P(base_seq) - T * log(K).""" + base_obs = jnp.mod(observations, self._base_vocab_size) + base_log_prob = self.base_process.log_probability(base_obs) + return base_log_prob - observations.shape[0] * jnp.log(self.inflation_factor) diff --git a/tests/generative_processes/test_inflated_vocabulary_process.py b/tests/generative_processes/test_inflated_vocabulary_process.py new file mode 100644 index 00000000..793aba79 --- /dev/null +++ b/tests/generative_processes/test_inflated_vocabulary_process.py @@ -0,0 +1,349 @@ +"""Tests for InflatedVocabularyProcess.""" + +# pylint: disable-all +# Temporarily disable all pylint checkers during AST traversal to prevent crash. +# The imports checker crashes when resolving simplexity package imports due to a bug +# in pylint/astroid: https://github.com/pylint-dev/pylint/issues/10185 +# pylint: enable=all +# Re-enable all pylint checkers for the checking phase. This allows other checks +# (code quality, style, undefined names, etc.) to run normally while bypassing +# the problematic imports checker that would crash during AST traversal. + +import chex +import jax +import jax.numpy as jnp +import pytest + +from simplexity.generative_processes.builder import ( + build_generalized_hidden_markov_model, + build_hidden_markov_model, + build_inflated_process, + build_inflated_process_from_spec, +) +from simplexity.generative_processes.inflated_vocabulary_process import InflatedVocabularyProcess +from simplexity.generative_processes.nonergodic_generative_process import ( + NonErgodicGenerativeProcess, + NonErgodicState, +) + + +class TestBasicProperties: + """Tests for basic properties of InflatedVocabularyProcess.""" + + @pytest.fixture + def coin_k3(self): + """Coin process with K=3 inflation.""" + coin = build_hidden_markov_model("coin", {"p": 0.7}) + return InflatedVocabularyProcess(coin, inflation_factor=3) + + @pytest.fixture + def mess3_k3(self): + """Mess3 process with K=3 inflation.""" + mess3 = build_hidden_markov_model("mess3", {"x": 0.15, "a": 0.6}) + return InflatedVocabularyProcess(mess3, inflation_factor=3) + + def test_vocab_size_coin(self, coin_k3: InflatedVocabularyProcess): + assert coin_k3.vocab_size == 6 + + def test_vocab_size_mess3(self, mess3_k3: InflatedVocabularyProcess): + assert mess3_k3.vocab_size == 9 + + def test_initial_state_matches_base(self, coin_k3: InflatedVocabularyProcess): + chex.assert_trees_all_close(coin_k3.initial_state, coin_k3.base_process.initial_state) + + def test_inflation_factor_stored(self, coin_k3: InflatedVocabularyProcess): + assert coin_k3.inflation_factor == 3 + + def test_invalid_inflation_factor_raises(self): + coin = build_hidden_markov_model("coin", {"p": 0.7}) + with pytest.raises(ValueError, match="inflation_factor must be >= 2"): + InflatedVocabularyProcess(coin, inflation_factor=1) + + def test_invalid_inflation_factor_zero_raises(self): + coin = build_hidden_markov_model("coin", {"p": 0.7}) + with pytest.raises(ValueError, match="inflation_factor must be >= 2"): + InflatedVocabularyProcess(coin, inflation_factor=0) + + +class TestObservationDistribution: + """Tests for observation probability distribution.""" + + @pytest.fixture + def coin_k3(self): + coin = build_hidden_markov_model("coin", {"p": 0.7}) + return InflatedVocabularyProcess(coin, inflation_factor=3) + + def test_distribution_sums_to_one(self, coin_k3: InflatedVocabularyProcess): + state = coin_k3.initial_state + dist = coin_k3.observation_probability_distribution(state) + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) + + def test_distribution_has_correct_size(self, coin_k3: InflatedVocabularyProcess): + state = coin_k3.initial_state + dist = coin_k3.observation_probability_distribution(state) + assert dist.shape == (6,) + + def test_distribution_spreads_uniformly(self, coin_k3: InflatedVocabularyProcess): + """Each base token's prob is split equally among K noise variants.""" + state = coin_k3.initial_state + dist = coin_k3.observation_probability_distribution(state) + expected = jnp.array([0.7 / 3, 0.3 / 3, 0.7 / 3, 0.3 / 3, 0.7 / 3, 0.3 / 3]) + chex.assert_trees_all_close(dist, expected, atol=1e-6) + + def test_noise_variants_have_equal_probability(self): + """All K noise variants of the same base token should have identical probability.""" + mess3 = build_hidden_markov_model("mess3", {"x": 0.15, "a": 0.6}) + inflated = InflatedVocabularyProcess(mess3, inflation_factor=4) + state = inflated.initial_state + dist = inflated.observation_probability_distribution(state) + v_base = mess3.vocab_size + for base_tok in range(v_base): + probs = [float(dist[n * v_base + base_tok]) for n in range(4)] + for p in probs[1:]: + chex.assert_trees_all_close(p, probs[0], atol=1e-6) + + def test_log_distribution_consistent(self, coin_k3: InflatedVocabularyProcess): + state = coin_k3.initial_state + log_state = jnp.log(state) + dist = coin_k3.observation_probability_distribution(state) + log_dist = coin_k3.log_observation_probability_distribution(log_state) + chex.assert_trees_all_close(log_dist, jnp.log(dist), atol=1e-5) + + +class TestTransitionStates: + """Tests for state transitions.""" + + def test_noise_prefix_does_not_affect_state(self): + """All K noise variants of the same base token should produce identical states.""" + even_ones = build_hidden_markov_model("even_ones", {"p": 0.5}) + inflated = InflatedVocabularyProcess(even_ones, inflation_factor=3) + state = inflated.initial_state + v_base = even_ones.vocab_size + + for base_tok in range(v_base): + states = [inflated.transition_states(state, jnp.array(n * v_base + base_tok)) for n in range(3)] + for s in states[1:]: + chex.assert_trees_all_close(s, states[0], atol=1e-6) + + def test_transition_matches_base_process(self): + """Transitioning with an inflated token should match transitioning with the base token.""" + mess3 = build_hidden_markov_model("mess3", {"x": 0.15, "a": 0.6}) + inflated = InflatedVocabularyProcess(mess3, inflation_factor=3) + state = mess3.initial_state + + for base_tok in range(mess3.vocab_size): + base_state = mess3.transition_states(state, jnp.array(base_tok)) + inflated_state = inflated.transition_states(state, jnp.array(base_tok)) + chex.assert_trees_all_close(inflated_state, base_state, atol=1e-6) + + inflated_state_noisy = inflated.transition_states(state, jnp.array(2 * mess3.vocab_size + base_tok)) + chex.assert_trees_all_close(inflated_state_noisy, base_state, atol=1e-6) + + +class TestProbability: + """Tests for sequence probability computation.""" + + def test_probability_scales_by_inflation_penalty(self): + """P(inflated_seq) = P(base_seq) / K^T.""" + coin = build_hidden_markov_model("coin", {"p": 0.7}) + k = 3 + inflated = InflatedVocabularyProcess(coin, inflation_factor=k) + + base_seq = jnp.array([0, 1, 0]) + base_prob = coin.probability(base_seq) + inflated_prob = inflated.probability(base_seq) + + expected = base_prob / (k**3) + chex.assert_trees_all_close(inflated_prob, expected, atol=1e-6) + + def test_probability_same_base_different_noise(self): + """Different noise prefixes with same base sequence should have same probability.""" + coin = build_hidden_markov_model("coin", {"p": 0.7}) + inflated = InflatedVocabularyProcess(coin, inflation_factor=3) + + seq_noise0 = jnp.array([0, 1, 0]) + seq_noise1 = jnp.array([2, 3, 2]) + seq_noise2 = jnp.array([4, 5, 4]) + + p0 = inflated.probability(seq_noise0) + p1 = inflated.probability(seq_noise1) + p2 = inflated.probability(seq_noise2) + + chex.assert_trees_all_close(p0, p1, atol=1e-6) + chex.assert_trees_all_close(p1, p2, atol=1e-6) + + def test_log_probability_consistent(self): + coin = build_hidden_markov_model("coin", {"p": 0.7}) + inflated = InflatedVocabularyProcess(coin, inflation_factor=3) + + seq = jnp.array([0, 3, 1, 4]) + prob = inflated.probability(seq) + log_prob = inflated.log_probability(seq) + chex.assert_trees_all_close(log_prob, jnp.log(prob), atol=1e-5) + + def test_optimal_loss_increases_by_log_k(self): + """Average per-token loss should increase by exactly log(K).""" + mess3 = build_hidden_markov_model("mess3", {"x": 0.15, "a": 0.6}) + k = 4 + inflated = InflatedVocabularyProcess(mess3, inflation_factor=k) + + key = jax.random.PRNGKey(42) + state = mess3.initial_state + batch_state = jnp.broadcast_to(state, (100,) + state.shape) + keys = jax.random.split(key, 100) + + _, base_seqs = mess3.generate(batch_state, keys, 200, False) + base_log_probs = jax.vmap(mess3.log_probability)(base_seqs) + base_avg_loss = -jnp.mean(base_log_probs) / 200 + + inflated_log_probs = jax.vmap(inflated.log_probability)(base_seqs) + inflated_avg_loss = -jnp.mean(inflated_log_probs) / 200 + + expected_increase = jnp.log(jnp.array(k, dtype=jnp.float32)) + chex.assert_trees_all_close(inflated_avg_loss - base_avg_loss, expected_increase, atol=0.01) + + +class TestGeneration: + """Tests for sequence generation.""" + + def test_generate_valid_tokens(self): + coin = build_hidden_markov_model("coin", {"p": 0.7}) + inflated = InflatedVocabularyProcess(coin, inflation_factor=3) + + state = inflated.initial_state + batch_state = jnp.broadcast_to(state, (4,) + state.shape) + keys = jax.random.split(jax.random.PRNGKey(0), 4) + _, observations = inflated.generate(batch_state, keys, 20, False) + + assert observations.shape == (4, 20) + assert jnp.all(observations >= 0) + assert jnp.all(observations < inflated.vocab_size) + + def test_generate_covers_noise_variants(self): + """Generated tokens should use all noise variants over many samples.""" + coin = build_hidden_markov_model("coin", {"p": 0.5}) + inflated = InflatedVocabularyProcess(coin, inflation_factor=3) + + state = inflated.initial_state + batch_state = jnp.broadcast_to(state, (50,) + state.shape) + keys = jax.random.split(jax.random.PRNGKey(123), 50) + _, observations = inflated.generate(batch_state, keys, 100, False) + + unique_tokens = jnp.unique(observations.ravel()) + assert unique_tokens.shape[0] == 6 + + def test_generate_with_return_all_states(self): + """Generation with return_all_states=True should return state trajectory.""" + mess3 = build_hidden_markov_model("mess3", {"x": 0.15, "a": 0.6}) + inflated = InflatedVocabularyProcess(mess3, inflation_factor=2) + + state = inflated.initial_state + batch_state = jnp.broadcast_to(state, (4,) + state.shape) + keys = jax.random.split(jax.random.PRNGKey(0), 4) + states, observations = inflated.generate(batch_state, keys, 10, True) + + assert observations.shape == (4, 10) + assert states.shape == (4, 10) + state.shape + + def test_base_token_distribution_matches(self): + """Extracting base tokens from inflated generation should match base distribution.""" + coin = build_hidden_markov_model("coin", {"p": 0.8}) + inflated = InflatedVocabularyProcess(coin, inflation_factor=5) + + state = inflated.initial_state + batch_state = jnp.broadcast_to(state, (200,) + state.shape) + keys = jax.random.split(jax.random.PRNGKey(42), 200) + _, observations = inflated.generate(batch_state, keys, 500, False) + + base_tokens = observations % coin.vocab_size + base_freq = jnp.mean(base_tokens == 0) + chex.assert_trees_all_close(base_freq, 0.8, atol=0.03) + + +class TestWithDifferentBaseProcesses: + """Tests for wrapping different process types.""" + + def test_wrap_ghmm(self): + ghmm = build_generalized_hidden_markov_model("tom_quantum", {"alpha": 1.0, "beta": 1.0}) + inflated = InflatedVocabularyProcess(ghmm, inflation_factor=2) + assert inflated.vocab_size == 2 * ghmm.vocab_size + + state = inflated.initial_state + dist = inflated.observation_probability_distribution(state) + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) + + batch_state = jnp.broadcast_to(state, (4,) + state.shape) + keys = jax.random.split(jax.random.PRNGKey(0), 4) + _, observations = inflated.generate(batch_state, keys, 10, False) + assert jnp.all(observations >= 0) + assert jnp.all(observations < inflated.vocab_size) + + def test_wrap_nonergodic(self): + coin1 = build_hidden_markov_model("coin", {"p": 0.7}) + coin2 = build_hidden_markov_model("coin", {"p": 0.3}) + nonergodic = NonErgodicGenerativeProcess( + components=[coin1, coin2], + component_weights=[0.5, 0.5], + ) + inflated = InflatedVocabularyProcess(nonergodic, inflation_factor=3) + assert inflated.vocab_size == 6 + + state = inflated.initial_state + assert isinstance(state, NonErgodicState) + dist = inflated.observation_probability_distribution(state) + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) + + def test_double_inflation(self): + """Stacking inflation: K1 * K2 total inflation.""" + coin = build_hidden_markov_model("coin", {"p": 0.5}) + inflated1 = InflatedVocabularyProcess(coin, inflation_factor=2) + inflated2 = InflatedVocabularyProcess(inflated1, inflation_factor=3) + assert inflated2.vocab_size == 12 + + state = inflated2.initial_state + dist = inflated2.observation_probability_distribution(state) + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) + chex.assert_trees_all_close(dist, jnp.ones(12) / 12, atol=1e-6) + + +class TestBuilder: + """Tests for builder functions.""" + + def test_build_inflated_process(self): + coin = build_hidden_markov_model("coin", {"p": 0.7}) + inflated = build_inflated_process(coin, inflation_factor=3) + assert isinstance(inflated, InflatedVocabularyProcess) + assert inflated.vocab_size == 6 + + def test_build_inflated_process_from_spec_hmm(self): + inflated = build_inflated_process_from_spec( + base_spec={ + "component_type": "hmm", + "process_name": "mess3", + "process_params": {"x": 0.15, "a": 0.6}, + }, + inflation_factor=3, + ) + assert isinstance(inflated, InflatedVocabularyProcess) + assert inflated.vocab_size == 9 + + def test_build_inflated_process_from_spec_ghmm(self): + inflated = build_inflated_process_from_spec( + base_spec={ + "component_type": "ghmm", + "process_name": "tom_quantum", + "process_params": {"alpha": 1.0, "beta": 1.0}, + }, + inflation_factor=2, + ) + assert isinstance(inflated, InflatedVocabularyProcess) + state = inflated.initial_state + dist = inflated.observation_probability_distribution(state) + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) + + def test_build_inflated_process_from_spec_unknown_type_raises(self): + with pytest.raises(ValueError, match="Unknown base component_type"): + build_inflated_process_from_spec( + base_spec={"component_type": "unknown", "process_name": "coin"}, + inflation_factor=2, + ) From 44364135a4b5e391df2ab7630e3c2d3778811505 Mon Sep 17 00:00:00 2001 From: Kyle Ray Date: Tue, 24 Feb 2026 23:34:11 +0000 Subject: [PATCH 23/35] Add disjoint/partial-overlap vocab builders and use IndependentFactoredGenerativeProcess for independent structures - Add build_nonergodic_disjoint_vocab and build_nonergodic_partial_overlap builders with prefix, sliding, and random vocab map modes - Modify build_factored_process to return IndependentFactoredGenerativeProcess for independent structures (O(sum V_i) sampling vs O(prod V_i)) - Add noise_epsilon parameter to IndependentFactoredGenerativeProcess - Add 18 new tests covering all new functionality Co-Authored-By: Claude Opus 4.6 --- simplexity/generative_processes/builder.py | 127 +++++++++- ...independent_factored_generative_process.py | 3 + tests/generative_processes/test_builder.py | 226 ++++++++++++++++++ 3 files changed, 355 insertions(+), 1 deletion(-) diff --git a/simplexity/generative_processes/builder.py b/simplexity/generative_processes/builder.py index c24d92ca..a998beb1 100644 --- a/simplexity/generative_processes/builder.py +++ b/simplexity/generative_processes/builder.py @@ -10,6 +10,7 @@ # the problematic imports checker that would crash during AST traversal. import inspect +import random from collections.abc import Callable, Mapping, Sequence from typing import Any, Literal @@ -20,6 +21,7 @@ from simplexity.generative_processes.generalized_hidden_markov_model import GeneralizedHiddenMarkovModel from simplexity.generative_processes.generative_process import GenerativeProcess from simplexity.generative_processes.hidden_markov_model import HiddenMarkovModel +from simplexity.generative_processes.independent_factored_generative_process import IndependentFactoredGenerativeProcess from simplexity.generative_processes.inflated_vocabulary_process import InflatedVocabularyProcess from simplexity.generative_processes.nonergodic_generative_process import NonErgodicGenerativeProcess from simplexity.generative_processes.structures import ( @@ -195,7 +197,16 @@ def build_factored_process( if structure_type == "independent": structure = IndependentStructure() - elif structure_type == "chain": + return IndependentFactoredGenerativeProcess( + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + structure=structure, + noise_epsilon=noise_epsilon, + ) + + if structure_type == "chain": if "control_maps" not in structure_kwargs: raise ValueError("Missing required argument 'control_maps' for chain structure") structure = SequentialConditional(control_maps=tuple(structure_kwargs["control_maps"]), vocab_sizes=vocab_sizes) @@ -733,6 +744,120 @@ def build_nonergodic_process_from_spec( ) +def build_nonergodic_disjoint_vocab( + components: Sequence[dict[str, Any]], + component_weights: Sequence[float], + device: str | None = None, +) -> NonErgodicGenerativeProcess: + """Build a nonergodic process where each component has a fully disjoint alphabet. + + First builds each component to discover its vocab_size, then assigns + non-overlapping vocab_maps: C0 -> [0..V0-1], C1 -> [V0..V0+V1-1], etc. + + Args: + components: List of component specs (same format as build_nonergodic_process_from_spec). + component_weights: Mixture weights for components. + device: Device placement. + + Returns: + NonErgodicGenerativeProcess with disjoint per-component vocabularies. + """ + temp = build_nonergodic_process_from_spec(components, component_weights, device=device) + comp_vocab_sizes = [c.vocab_size for c in temp.components] + + vocab_maps: list[list[int]] = [] + offset = 0 + for v in comp_vocab_sizes: + vocab_maps.append(list(range(offset, offset + v))) + offset += v + + return build_nonergodic_process_from_spec(components, component_weights, vocab_maps=vocab_maps, device=device) + + +def _build_prefix_vocab_maps(n_components: int, v: int, n_shared: int, n_unique: int) -> list[list[int]]: + """Build vocab maps using the prefix strategy. + + C0 gets [0..V-1]. Ci>0 gets shared [0..n_shared-1] + unique tokens above V. + """ + vocab_maps: list[list[int]] = [] + for i in range(n_components): + if i == 0: + vocab_maps.append(list(range(v))) + else: + unique_start = v + (i - 1) * n_unique + vocab_maps.append(list(range(n_shared)) + list(range(unique_start, unique_start + n_unique))) + return vocab_maps + + +def _build_sliding_vocab_maps(n_components: int, v: int, n_unique: int) -> list[list[int]]: + """Build vocab maps using the sliding/offset strategy. + + Ci gets [i*offset..i*offset+V-1] where offset = max(1, n_unique). + """ + offset = max(1, n_unique) + return [list(range(i * offset, i * offset + v)) for i in range(n_components)] + + +def _build_random_vocab_maps(n_components: int, v: int, n_shared: int, n_unique: int, seed: int) -> list[list[int]]: + """Build vocab maps using the prefix strategy, then randomly permute global token indices.""" + prefix_maps = _build_prefix_vocab_maps(n_components, v, n_shared, n_unique) + global_vocab_size = max(max(vm) for vm in prefix_maps) + 1 + rng = random.Random(seed) + perm = list(range(global_vocab_size)) + rng.shuffle(perm) + return [[perm[tok] for tok in vm] for vm in prefix_maps] + + +def build_nonergodic_partial_overlap( + components: Sequence[dict[str, Any]], + component_weights: Sequence[float], + overlap_frac: float = 0.7, + mode: Literal["prefix", "sliding", "random"] = "prefix", + seed: int | None = None, + device: str | None = None, +) -> NonErgodicGenerativeProcess: + """Build a nonergodic process with partially overlapping alphabets. + + Args: + components: List of component specs (same format as build_nonergodic_process_from_spec). + component_weights: Mixture weights for components. + overlap_frac: Fraction of tokens shared between components (0.0 = disjoint, 1.0 = full overlap). + mode: Strategy for assigning vocab maps: + - "prefix": C0 gets [0..V-1], Ci>0 gets shared prefix + unique suffix above V. + - "sliding": Each component's vocab is offset by V * (1 - overlap_frac) from the previous. + - "random": Same overlap structure as prefix, but with a random permutation + of global token indices. Requires the ``seed`` parameter. + seed: Random seed for reproducibility. Required when mode="random". + device: Device placement. + + Returns: + NonErgodicGenerativeProcess with partially overlapping vocabularies. + + Raises: + ValueError: If mode is unknown or seed is missing for random mode. + """ + if mode == "random" and seed is None: + raise ValueError("seed is required when mode='random'") + + temp = build_nonergodic_process_from_spec(components, component_weights, device=device) + v = temp.components[0].vocab_size + n_shared = int(v * overlap_frac) + n_unique = v - n_shared + n_components = len(components) + + if mode == "prefix": + vocab_maps = _build_prefix_vocab_maps(n_components, v, n_shared, n_unique) + elif mode == "sliding": + vocab_maps = _build_sliding_vocab_maps(n_components, v, n_unique) + elif mode == "random": + assert seed is not None + vocab_maps = _build_random_vocab_maps(n_components, v, n_shared, n_unique, seed) + else: + raise ValueError(f"Unknown mode '{mode}'. Must be 'prefix', 'sliding', or 'random'.") + + return build_nonergodic_process_from_spec(components, component_weights, vocab_maps=vocab_maps, device=device) + + def build_inflated_process( base_process: GenerativeProcess, inflation_factor: int, diff --git a/simplexity/generative_processes/independent_factored_generative_process.py b/simplexity/generative_processes/independent_factored_generative_process.py index acb83710..adeca737 100644 --- a/simplexity/generative_processes/independent_factored_generative_process.py +++ b/simplexity/generative_processes/independent_factored_generative_process.py @@ -49,6 +49,7 @@ def __init__( initial_states: Sequence[jax.Array], structure: ConditionalStructure, device: str | None = None, + noise_epsilon: float = 0.0, frozen_factor_indices: frozenset[int] = frozenset(), frozen_key: jax.Array | None = None, ) -> None: @@ -63,6 +64,7 @@ def __init__( initial_states: Initial state per factor (shape [S_i]) structure: Conditional structure defining factor interactions device: Device to place arrays on (e.g., "cpu", "gpu") + noise_epsilon: Noisy channel epsilon value frozen_factor_indices: Indices of factors whose sequences are frozen across batch frozen_key: JAX random key for frozen sequence generation. Required if frozen_factor_indices is non-empty. @@ -78,6 +80,7 @@ def __init__( initial_states=initial_states, structure=structure, device=device, + noise_epsilon=noise_epsilon, ) num_factors = len(component_types) diff --git a/tests/generative_processes/test_builder.py b/tests/generative_processes/test_builder.py index bd80643b..38df3eb6 100644 --- a/tests/generative_processes/test_builder.py +++ b/tests/generative_processes/test_builder.py @@ -23,8 +23,10 @@ build_generalized_hidden_markov_model, build_hidden_markov_model, build_matrices_from_spec, + build_nonergodic_disjoint_vocab, build_nonergodic_hidden_markov_model, build_nonergodic_initial_state, + build_nonergodic_partial_overlap, build_nonergodic_transition_matrices, build_symmetric_from_spec, build_transition_coupled_from_spec, @@ -603,3 +605,227 @@ def test_build_chain_from_spec_empty_chain_raises(): """Empty chain should raise ValueError.""" with pytest.raises(ValueError, match="chain must contain at least one node"): build_chain_from_spec([]) + + +# --- Tests for IndependentFactoredGenerativeProcess in build_factored_process --- + + +def test_build_factored_process_independent_returns_independent_subclass(components_spec): + """build_factored_process with independent structure should return IndependentFactoredGenerativeProcess.""" + from simplexity.generative_processes.independent_factored_generative_process import ( + IndependentFactoredGenerativeProcess, + ) + + component_types, transition_matrices, normalizing_eigenvectors, initial_states = build_matrices_from_spec( + components_spec + ) + process = build_factored_process( + structure_type="independent", + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + ) + assert isinstance(process, IndependentFactoredGenerativeProcess) + assert isinstance(process.structure, IndependentStructure) + + +def test_build_factored_process_independent_passes_noise_epsilon(components_spec): + """noise_epsilon should be propagated to IndependentFactoredGenerativeProcess.""" + component_types, transition_matrices, normalizing_eigenvectors, initial_states = build_matrices_from_spec( + components_spec + ) + process = build_factored_process( + structure_type="independent", + component_types=component_types, + transition_matrices=transition_matrices, + normalizing_eigenvectors=normalizing_eigenvectors, + initial_states=initial_states, + noise_epsilon=0.05, + ) + assert process.noise_epsilon == 0.05 + + +def test_build_factored_process_from_spec_independent_returns_independent_subclass(components_spec): + """build_factored_process_from_spec with independent structure returns IndependentFactoredGenerativeProcess.""" + from simplexity.generative_processes.independent_factored_generative_process import ( + IndependentFactoredGenerativeProcess, + ) + + process = build_factored_process_from_spec(structure_type="independent", spec=components_spec) + assert isinstance(process, IndependentFactoredGenerativeProcess) + + +# --- Tests for build_nonergodic_disjoint_vocab --- + + +TWO_COINS = [ + {"component_type": "hmm", "process_name": "coin", "process_params": {"p": 0.6}}, + {"component_type": "hmm", "process_name": "coin", "process_params": {"p": 0.4}}, +] + + +class TestBuildNonErgodicDisjointVocab: + """Tests for build_nonergodic_disjoint_vocab.""" + + def test_vocab_maps_are_non_overlapping(self): + """Each component should get a unique, non-overlapping vocab range.""" + process = build_nonergodic_disjoint_vocab(components=TWO_COINS, component_weights=[0.5, 0.5]) + vm0 = set(process.vocab_maps[0].tolist()) + vm1 = set(process.vocab_maps[1].tolist()) + assert vm0 == {0, 1} + assert vm1 == {2, 3} + assert vm0.isdisjoint(vm1) + + def test_vocab_size_is_sum_of_components(self): + """Total vocab size should be sum of all component vocab sizes.""" + process = build_nonergodic_disjoint_vocab(components=TWO_COINS, component_weights=[0.5, 0.5]) + assert process.vocab_size == 4 + + def test_distribution_sums_to_one(self): + """Observation distribution should be a valid probability distribution.""" + process = build_nonergodic_disjoint_vocab(components=TWO_COINS, component_weights=[0.5, 0.5]) + dist = process.observation_probability_distribution(process.initial_state) + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) + + def test_three_components_disjoint(self): + """Three-component disjoint should produce three non-overlapping ranges.""" + components = [ + {"component_type": "hmm", "process_name": "coin", "process_params": {"p": 0.5}}, + {"component_type": "hmm", "process_name": "mess3", "process_params": {"x": 0.15, "a": 0.6}}, + {"component_type": "hmm", "process_name": "coin", "process_params": {"p": 0.3}}, + ] + process = build_nonergodic_disjoint_vocab(components=components, component_weights=[0.4, 0.3, 0.3]) + assert process.vocab_size == 7 # 2 + 3 + 2 + all_tokens = set() + for vm in process.vocab_maps: + tokens = set(vm.tolist()) + assert all_tokens.isdisjoint(tokens) + all_tokens.update(tokens) + + +# --- Tests for build_nonergodic_partial_overlap --- + + +class TestBuildNonErgodicPartialOverlap: + """Tests for build_nonergodic_partial_overlap.""" + + def test_prefix_mode_shared_and_unique_tokens(self): + """Prefix mode: components should share some tokens and have unique tokens.""" + process = build_nonergodic_partial_overlap( + components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=0.5, mode="prefix" + ) + vm0 = process.vocab_maps[0].tolist() + vm1 = process.vocab_maps[1].tolist() + assert vm0 == [0, 1] + assert vm1 == [0, 2] + + def test_prefix_mode_full_overlap(self): + """overlap_frac=1.0 should give fully shared vocabularies.""" + process = build_nonergodic_partial_overlap( + components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=1.0, mode="prefix" + ) + vm0 = process.vocab_maps[0].tolist() + vm1 = process.vocab_maps[1].tolist() + assert vm0 == vm1 + + def test_prefix_mode_zero_overlap_is_disjoint(self): + """overlap_frac=0.0 with prefix mode should produce disjoint vocabs.""" + process = build_nonergodic_partial_overlap( + components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=0.0, mode="prefix" + ) + vm0 = set(process.vocab_maps[0].tolist()) + vm1 = set(process.vocab_maps[1].tolist()) + assert vm0.isdisjoint(vm1) + + def test_sliding_mode_produces_offset_maps(self): + """Sliding mode should produce overlapping ranges offset by V * (1 - overlap_frac).""" + components = [ + {"component_type": "hmm", "process_name": "mess3", "process_params": {"x": 0.15, "a": 0.6}}, + {"component_type": "hmm", "process_name": "mess3", "process_params": {"x": 0.35, "a": 0.6}}, + {"component_type": "hmm", "process_name": "mess3", "process_params": {"x": 0.5, "a": 0.6}}, + ] + # V=3, overlap_frac=2/3 => n_unique=1, offset=1 + process = build_nonergodic_partial_overlap( + components=components, component_weights=[0.333, 0.333, 0.334], overlap_frac=2.0 / 3.0, mode="sliding" + ) + assert process.vocab_maps[0].tolist() == [0, 1, 2] + assert process.vocab_maps[1].tolist() == [1, 2, 3] + assert process.vocab_maps[2].tolist() == [2, 3, 4] + + def test_sliding_mode_full_overlap(self): + """Sliding with overlap_frac=1.0 should still offset by at least 1.""" + process = build_nonergodic_partial_overlap( + components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=1.0, mode="sliding" + ) + vm0 = process.vocab_maps[0].tolist() + vm1 = process.vocab_maps[1].tolist() + # offset = max(1, 0) = 1, so C0=[0,1], C1=[1,2] + assert vm0 == [0, 1] + assert vm1 == [1, 2] + + def test_random_mode_preserves_overlap_structure(self): + """Random mode should have the same overlap counts as prefix mode.""" + process_prefix = build_nonergodic_partial_overlap( + components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=0.5, mode="prefix" + ) + process_random = build_nonergodic_partial_overlap( + components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=0.5, mode="random", seed=42 + ) + prefix_shared = len(set(process_prefix.vocab_maps[0].tolist()) & set(process_prefix.vocab_maps[1].tolist())) + random_shared = len(set(process_random.vocab_maps[0].tolist()) & set(process_random.vocab_maps[1].tolist())) + assert prefix_shared == random_shared + + def test_random_mode_is_deterministic_with_seed(self): + """Same seed should produce identical vocab maps.""" + p1 = build_nonergodic_partial_overlap( + components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=0.5, mode="random", seed=123 + ) + p2 = build_nonergodic_partial_overlap( + components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=0.5, mode="random", seed=123 + ) + for vm1, vm2 in zip(p1.vocab_maps, p2.vocab_maps, strict=True): + assert vm1.tolist() == vm2.tolist() + + def test_random_mode_different_seeds_differ(self): + """Different seeds should produce different vocab maps.""" + three_mess3 = [ + {"component_type": "hmm", "process_name": "mess3", "process_params": {"x": 0.15, "a": 0.6}}, + {"component_type": "hmm", "process_name": "mess3", "process_params": {"x": 0.35, "a": 0.6}}, + {"component_type": "hmm", "process_name": "mess3", "process_params": {"x": 0.5, "a": 0.6}}, + ] + weights = [0.333, 0.333, 0.334] + p1 = build_nonergodic_partial_overlap( + components=three_mess3, component_weights=weights, overlap_frac=0.5, mode="random", seed=1 + ) + p2 = build_nonergodic_partial_overlap( + components=three_mess3, component_weights=weights, overlap_frac=0.5, mode="random", seed=2 + ) + maps_differ = any(vm1.tolist() != vm2.tolist() for vm1, vm2 in zip(p1.vocab_maps, p2.vocab_maps, strict=True)) + assert maps_differ + + def test_random_mode_requires_seed(self): + """Random mode without seed should raise ValueError.""" + with pytest.raises(ValueError, match="seed is required"): + build_nonergodic_partial_overlap( + components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=0.5, mode="random" + ) + + def test_distribution_sums_to_one_all_modes(self): + """Observation distribution should be valid for all modes.""" + for mode, kwargs in [("prefix", {}), ("sliding", {}), ("random", {"seed": 42})]: + process = build_nonergodic_partial_overlap( + components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=0.5, mode=mode, **kwargs + ) + dist = process.observation_probability_distribution(process.initial_state) + chex.assert_trees_all_close(jnp.sum(dist), 1.0, atol=1e-6) + + def test_unknown_mode_raises(self): + """Unknown mode should raise ValueError.""" + with pytest.raises(ValueError, match="Unknown mode"): + build_nonergodic_partial_overlap( + components=TWO_COINS, + component_weights=[0.5, 0.5], + overlap_frac=0.5, + mode="bogus", # type: ignore[arg-type] + ) From e13bd711d47f590d558cdcedfe2ae2497e6fd77c Mon Sep 17 00:00:00 2001 From: Kyle Ray Date: Wed, 25 Feb 2026 01:28:04 +0000 Subject: [PATCH 24/35] Fix random vocab mode to independently sample tokens per component Previously, random mode applied a global permutation to the prefix maps, making the overlap structure identical to prefix. Now each component independently samples V tokens from the global pool, producing genuinely random overlap patterns. Co-Authored-By: Claude Opus 4.6 --- simplexity/generative_processes/builder.py | 15 +++++++++------ tests/generative_processes/test_builder.py | 16 +++++++--------- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/simplexity/generative_processes/builder.py b/simplexity/generative_processes/builder.py index a998beb1..3e8eb4a1 100644 --- a/simplexity/generative_processes/builder.py +++ b/simplexity/generative_processes/builder.py @@ -799,13 +799,16 @@ def _build_sliding_vocab_maps(n_components: int, v: int, n_unique: int) -> list[ def _build_random_vocab_maps(n_components: int, v: int, n_shared: int, n_unique: int, seed: int) -> list[list[int]]: - """Build vocab maps using the prefix strategy, then randomly permute global token indices.""" + """Build vocab maps by having each component randomly sample V tokens from the global pool. + + The global vocab size is the same as in prefix mode (V + (n_components - 1) * n_unique), + and each component independently samples V tokens without replacement. + """ prefix_maps = _build_prefix_vocab_maps(n_components, v, n_shared, n_unique) global_vocab_size = max(max(vm) for vm in prefix_maps) + 1 rng = random.Random(seed) - perm = list(range(global_vocab_size)) - rng.shuffle(perm) - return [[perm[tok] for tok in vm] for vm in prefix_maps] + global_tokens = list(range(global_vocab_size)) + return [sorted(rng.sample(global_tokens, v)) for _ in range(n_components)] def build_nonergodic_partial_overlap( @@ -825,8 +828,8 @@ def build_nonergodic_partial_overlap( mode: Strategy for assigning vocab maps: - "prefix": C0 gets [0..V-1], Ci>0 gets shared prefix + unique suffix above V. - "sliding": Each component's vocab is offset by V * (1 - overlap_frac) from the previous. - - "random": Same overlap structure as prefix, but with a random permutation - of global token indices. Requires the ``seed`` parameter. + - "random": Each component independently samples V tokens from the global pool. + Global pool size matches prefix mode. Requires the ``seed`` parameter. seed: Random seed for reproducibility. Required when mode="random". device: Device placement. diff --git a/tests/generative_processes/test_builder.py b/tests/generative_processes/test_builder.py index 38df3eb6..03b4386e 100644 --- a/tests/generative_processes/test_builder.py +++ b/tests/generative_processes/test_builder.py @@ -764,17 +764,15 @@ def test_sliding_mode_full_overlap(self): assert vm0 == [0, 1] assert vm1 == [1, 2] - def test_random_mode_preserves_overlap_structure(self): - """Random mode should have the same overlap counts as prefix mode.""" - process_prefix = build_nonergodic_partial_overlap( - components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=0.5, mode="prefix" - ) - process_random = build_nonergodic_partial_overlap( + def test_random_mode_independent_sampling(self): + """Random mode should independently sample V tokens per component from the global pool.""" + process = build_nonergodic_partial_overlap( components=TWO_COINS, component_weights=[0.5, 0.5], overlap_frac=0.5, mode="random", seed=42 ) - prefix_shared = len(set(process_prefix.vocab_maps[0].tolist()) & set(process_prefix.vocab_maps[1].tolist())) - random_shared = len(set(process_random.vocab_maps[0].tolist()) & set(process_random.vocab_maps[1].tolist())) - assert prefix_shared == random_shared + v = process.components[0].vocab_size + for vm in process.vocab_maps: + assert len(vm.tolist()) == v + assert len(set(vm.tolist())) == v # no duplicates within a component def test_random_mode_is_deterministic_with_seed(self): """Same seed should produce identical vocab maps.""" From 23925a1d8a1115444167c759df9d0aeafe9fb3fd Mon Sep 17 00:00:00 2001 From: Kyle Ray Date: Thu, 26 Feb 2026 02:17:35 +0000 Subject: [PATCH 25/35] Address PR #172 review feedback - Fix _slice_belief_states to slice component_states for NonErgodicState - Add vocab size validation in build_nonergodic_partial_overlap - Extract _build_components_from_spec to eliminate double-building - Move inline comments to docstrings in nonergodic_generative_process.py - Tighten NonErgodicState.component_states type to tuple[ComponentState, ...] - Add tests for return_all_states, factored components, and full history Co-Authored-By: Claude Opus 4.6 --- simplexity/generative_processes/builder.py | 116 +++++++---- simplexity/generative_processes/generator.py | 9 +- .../nonergodic_generative_process.py | 116 ++--------- .../test_nonergodic_generative_process.py | 188 ++++++++++++++++++ 4 files changed, 286 insertions(+), 143 deletions(-) diff --git a/simplexity/generative_processes/builder.py b/simplexity/generative_processes/builder.py index 3e8eb4a1..0dbb8f3a 100644 --- a/simplexity/generative_processes/builder.py +++ b/simplexity/generative_processes/builder.py @@ -650,6 +650,53 @@ def build_transition_coupled_from_spec( ) +def _build_components_from_spec( + components: Sequence[dict[str, Any]], + device: str | None = None, +) -> list[GenerativeProcess]: + """Build component GenerativeProcess instances from specifications. + + Args: + components: List of component specs. Each spec has: + - component_type: "hmm", "ghmm", or "factored" + - For hmm/ghmm: process_name, process_params + - For factored: structure_type, spec, and structure-specific params + device: Device placement. + + Returns: + List of built GenerativeProcess instances. + + Raises: + ValueError: If component_type is unknown. + """ + built_components = [] + + for comp_spec in components: + comp_type = comp_spec.get("component_type", "hmm") + + if comp_type == "hmm": + process: GenerativeProcess = build_hidden_markov_model( + process_name=comp_spec["process_name"], + process_params=comp_spec.get("process_params", {}), + device=device, + ) + elif comp_type == "ghmm": + process = build_generalized_hidden_markov_model( + process_name=comp_spec["process_name"], + process_params=comp_spec.get("process_params", {}), + device=device, + ) + elif comp_type == "factored": + factored_kwargs = {k: v for k, v in comp_spec.items() if k not in ("component_type", "vocab_map")} + process = build_factored_process_from_spec(**factored_kwargs) + else: + raise ValueError(f"Unknown component_type: {comp_type}") + + built_components.append(process) + + return built_components + + def build_nonergodic_process_from_spec( components: Sequence[dict[str, Any]], component_weights: Sequence[float], @@ -702,39 +749,16 @@ def build_nonergodic_process_from_spec( Raises: ValueError: If component_type is unknown. """ - built_components = [] - inferred_vocab_maps = [] - - for comp_spec in components: - comp_type = comp_spec.get("component_type", "hmm") - - if comp_type == "hmm": - process = build_hidden_markov_model( - process_name=comp_spec["process_name"], - process_params=comp_spec.get("process_params", {}), - device=device, - ) - elif comp_type == "ghmm": - process = build_generalized_hidden_markov_model( - process_name=comp_spec["process_name"], - process_params=comp_spec.get("process_params", {}), - device=device, - ) - elif comp_type == "factored": - # Extract factored-specific params - factored_kwargs = {k: v for k, v in comp_spec.items() if k not in ("component_type", "vocab_map")} - process = build_factored_process_from_spec(**factored_kwargs) - else: - raise ValueError(f"Unknown component_type: {comp_type}") - - built_components.append(process) + built_components = _build_components_from_spec(components, device=device) - # Infer vocab map if not provided globally - if vocab_maps is None: + if vocab_maps is None: + inferred_vocab_maps = [] + for comp_spec, process in zip(components, built_components, strict=True): comp_vocab_map = comp_spec.get("vocab_map", list(range(process.vocab_size))) inferred_vocab_maps.append(comp_vocab_map) - - final_vocab_maps = vocab_maps if vocab_maps is not None else inferred_vocab_maps + final_vocab_maps: Sequence[Sequence[int]] = inferred_vocab_maps + else: + final_vocab_maps = vocab_maps return NonErgodicGenerativeProcess( components=built_components, @@ -751,7 +775,7 @@ def build_nonergodic_disjoint_vocab( ) -> NonErgodicGenerativeProcess: """Build a nonergodic process where each component has a fully disjoint alphabet. - First builds each component to discover its vocab_size, then assigns + Builds each component once to discover its vocab_size, then assigns non-overlapping vocab_maps: C0 -> [0..V0-1], C1 -> [V0..V0+V1-1], etc. Args: @@ -762,16 +786,20 @@ def build_nonergodic_disjoint_vocab( Returns: NonErgodicGenerativeProcess with disjoint per-component vocabularies. """ - temp = build_nonergodic_process_from_spec(components, component_weights, device=device) - comp_vocab_sizes = [c.vocab_size for c in temp.components] + built_components = _build_components_from_spec(components, device=device) vocab_maps: list[list[int]] = [] offset = 0 - for v in comp_vocab_sizes: - vocab_maps.append(list(range(offset, offset + v))) - offset += v + for c in built_components: + vocab_maps.append(list(range(offset, offset + c.vocab_size))) + offset += c.vocab_size - return build_nonergodic_process_from_spec(components, component_weights, vocab_maps=vocab_maps, device=device) + return NonErgodicGenerativeProcess( + components=built_components, + component_weights=component_weights, + vocab_maps=vocab_maps, + device=device, + ) def _build_prefix_vocab_maps(n_components: int, v: int, n_shared: int, n_unique: int) -> list[list[int]]: @@ -842,8 +870,11 @@ def build_nonergodic_partial_overlap( if mode == "random" and seed is None: raise ValueError("seed is required when mode='random'") - temp = build_nonergodic_process_from_spec(components, component_weights, device=device) - v = temp.components[0].vocab_size + built_components = _build_components_from_spec(components, device=device) + comp_vocab_sizes = [c.vocab_size for c in built_components] + if len(set(comp_vocab_sizes)) != 1: + raise ValueError(f"All components must have equal vocab_size for partial_overlap, got {comp_vocab_sizes}") + v = comp_vocab_sizes[0] n_shared = int(v * overlap_frac) n_unique = v - n_shared n_components = len(components) @@ -858,7 +889,12 @@ def build_nonergodic_partial_overlap( else: raise ValueError(f"Unknown mode '{mode}'. Must be 'prefix', 'sliding', or 'random'.") - return build_nonergodic_process_from_spec(components, component_weights, vocab_maps=vocab_maps, device=device) + return NonErgodicGenerativeProcess( + components=built_components, + component_weights=component_weights, + vocab_maps=vocab_maps, + device=device, + ) def build_inflated_process( diff --git a/simplexity/generative_processes/generator.py b/simplexity/generative_processes/generator.py index 6a7fcc14..32dcd766 100644 --- a/simplexity/generative_processes/generator.py +++ b/simplexity/generative_processes/generator.py @@ -102,14 +102,15 @@ def _slice_belief_states( Handles different state representations: - Plain array: slice directly - Tuple of arrays: slice each element - - NonErgodicState: slice component_beliefs, keep component_states as-is + - NonErgodicState: slice both component_beliefs and component_states """ if isinstance(belief_states, NonErgodicState): - # For NonErgodicState, slice component_beliefs trajectory - # component_states are final states, not trajectories, so don't slice return NonErgodicState( component_beliefs=belief_states.component_beliefs[:, seq_slice, ...], - component_states=belief_states.component_states, + component_states=tuple( + tuple(s[:, seq_slice, ...] for s in cs) if isinstance(cs, tuple) else cs[:, seq_slice, ...] + for cs in belief_states.component_states + ), ) elif isinstance(belief_states, tuple): return tuple(b[:, seq_slice, ...] for b in belief_states) diff --git a/simplexity/generative_processes/nonergodic_generative_process.py b/simplexity/generative_processes/nonergodic_generative_process.py index 7a601522..45f6e7ed 100644 --- a/simplexity/generative_processes/nonergodic_generative_process.py +++ b/simplexity/generative_processes/nonergodic_generative_process.py @@ -4,7 +4,7 @@ from collections.abc import Sequence from functools import partial -from typing import Any, NamedTuple +from typing import NamedTuple import chex import equinox as eqx @@ -14,29 +14,9 @@ from simplexity.generative_processes.generative_process import GenerativeProcess from simplexity.utils.jnp_utils import resolve_jax_device -# Type alias for component states: either a flat array or tuple of arrays (FactoredState) ComponentState = jax.Array | tuple[jax.Array, ...] -# ----------------------------------------------------------------------------- -# Helper functions for handling heterogeneous component state types. -# -# The generate() method uses jax.lax.switch to select which component to use. -# This requires all branches to return the same shape, but components may have -# different state types: -# - HMM/GHMM: flat jax.Array of shape [state_dim] -# - FactoredGenerativeProcess: tuple of arrays (FactoredState) -# -# To handle this uniformly, we: -# 1. Flatten each state to a 1D array (preserving total element count) -# 2. Pad to a common max size for switch compatibility -# 3. After processing, unpad and unflatten back to original structure -# -# This lets us work with uniform arrays inside jax.lax.switch while components -# still receive their native state structures. -# ----------------------------------------------------------------------------- - - def _get_flat_size(state: ComponentState) -> int: """Get total number of elements in a component state. @@ -85,8 +65,6 @@ def _unflatten_state(flat: jax.Array, template: ComponentState) -> ComponentStat time), so we can compute offsets as Python ints. """ if isinstance(template, tuple): - # Extract parts using dynamic_slice with concrete offsets and sizes - # This avoids jnp.split which requires concrete split indices offset = 0 parts = [] for t in template: @@ -108,7 +86,7 @@ class NonErgodicState(NamedTuple): """ component_beliefs: jax.Array - component_states: tuple[Any, ...] + component_states: tuple[ComponentState, ...] class NonErgodicGenerativeProcess(GenerativeProcess[NonErgodicState]): @@ -168,7 +146,6 @@ def __init__( self.device = resolve_jax_device(device) self.components = tuple(components) - # Normalize weights weights = jnp.array(component_weights) if weights.shape[0] != len(components): raise ValueError( @@ -179,17 +156,12 @@ def __init__( self.component_weights = weights / jnp.sum(weights) self.component_weights = jax.device_put(self.component_weights, self.device) - # Set up vocab maps if vocab_maps is None: - # Default: each component uses its natural vocab [0, 1, ..., V_i-1] vocab_maps = [list(range(c.vocab_size)) for c in components] self.vocab_maps = tuple(jax.device_put(jnp.array(vm, dtype=jnp.int32), self.device) for vm in vocab_maps) - - # Compute global vocab size self._vocab_size = max(max(vm) for vm in vocab_maps) + 1 - # Build inverse vocab maps (global -> local, -1 if not mapped) inverse_maps = [] for vm in vocab_maps: inv = jnp.full((self._vocab_size,), -1, dtype=jnp.int32) @@ -227,9 +199,7 @@ def observation_probability_distribution(self, state: NonErgodicState) -> jax.Ar for i, (component, vm) in enumerate(zip(self.components, self.vocab_maps, strict=False)): comp_state = state.component_states[i] - # Get component's distribution over its local vocab local_dist = component.observation_probability_distribution(comp_state) - # Scatter to global vocab positions component_contrib = jnp.zeros(self._vocab_size).at[vm].add(local_dist) global_dist = global_dist + state.component_beliefs[i] * component_contrib @@ -239,7 +209,9 @@ def observation_probability_distribution(self, state: NonErgodicState) -> jax.Ar def log_observation_probability_distribution(self, log_belief_state: NonErgodicState) -> jax.Array: """Compute log P(global_obs | state). - Note: This expects log_belief_state with log-space component_beliefs and component_states. + Expects log-space component_beliefs and component_states. Unmapped tokens + get -inf. Component beliefs weight via addition in log space, then combined + via logsumexp across components. """ log_probs = [] @@ -248,13 +220,10 @@ def log_observation_probability_distribution(self, log_belief_state: NonErgodicS comp_log_belief = log_belief_state.component_beliefs[i] local_log_dist = component.log_observation_probability_distribution(comp_log_state) - # Create global distribution with -inf for unmapped tokens global_log_dist = jnp.full(self._vocab_size, -jnp.inf) global_log_dist = global_log_dist.at[vm].set(local_log_dist) - # Weight by component belief (in log space: add) log_probs.append(comp_log_belief + global_log_dist) - # Combine via logsumexp across components log_probs_stacked = jnp.stack(log_probs, axis=0) return jax.nn.logsumexp(log_probs_stacked, axis=0) @@ -266,11 +235,8 @@ def emit_observation(self, state: NonErgodicState, key: chex.PRNGKey) -> chex.Ar that component and maps to global vocab. """ key1, key2 = jax.random.split(key) - - # Sample component based on current beliefs component_idx = jax.random.categorical(key1, jnp.log(state.component_beliefs)) - # Emit from chosen component using switch for JIT compatibility def emit_from_component(i: int, k: chex.PRNGKey) -> chex.Array: comp_state = state.component_states[i] local_obs = self.components[i].emit_observation(comp_state, k) @@ -288,9 +254,11 @@ def emit_from_component(i: int, k: chex.PRNGKey) -> chex.Array: def transition_states(self, state: NonErgodicState, obs: chex.Array) -> NonErgodicState: """Update state given observation using Bayesian filtering. - 1. Compute P(obs | component_i) for each component using current states - 2. Update component_beliefs using Bayes rule - 3. Update each component's internal state independently + For each component: computes P(obs | component_i) as the likelihood + (0 if obs not in that component's vocab), conditionally updates the + component's internal state only when likelihood > 0, then applies + Bayes rule to update component_beliefs. Falls back to prior beliefs + if all likelihoods are 0. """ new_component_states = [] likelihoods = [] @@ -299,10 +267,7 @@ def transition_states(self, state: NonErgodicState, obs: chex.Array) -> NonErgod comp_state = state.component_states[i] local_obs = inv_map[obs] - # Get observation probability from this component local_dist = component.observation_probability_distribution(comp_state) - - # Likelihood is 0 if obs not in this component's vocab likelihood = jnp.where( local_obs >= 0, local_dist[jnp.clip(local_obs, 0, local_dist.shape[0] - 1)], @@ -310,10 +275,6 @@ def transition_states(self, state: NonErgodicState, obs: chex.Array) -> NonErgod ) likelihoods.append(likelihood) - # Update component's internal state - # Only update if the observation was possible for this component - # (likelihood > 0 checks both vocab membership AND state feasibility) - # Bind component via default arg to avoid B023 (late binding in loop) new_comp_state = jax.lax.cond( likelihood > 0, lambda s, lo, c=component: c.transition_states(s, lo), @@ -323,15 +284,13 @@ def transition_states(self, state: NonErgodicState, obs: chex.Array) -> NonErgod ) new_component_states.append(new_comp_state) - # Bayes update for component beliefs likelihoods_arr = jnp.array(likelihoods) unnorm_beliefs = state.component_beliefs * likelihoods_arr - # Handle case where all likelihoods are 0 (shouldn't happen with valid obs) normalizer = jnp.sum(unnorm_beliefs) new_beliefs = jnp.where( normalizer > 0, unnorm_beliefs / normalizer, - state.component_beliefs, # Keep old beliefs if normalizer is 0 + state.component_beliefs, ) return NonErgodicState( @@ -349,11 +308,8 @@ def probability(self, observations: jax.Array) -> jax.Array: def compute_component_prob(i: int) -> jax.Array: component = self.components[i] inv_map = self._inverse_vocab_maps[i] - # Map global observations to local local_obs = inv_map[observations] - # Check if all observations are valid for this component all_valid = jnp.all(local_obs >= 0) - # Compute probability (0 if any obs invalid) prob = jax.lax.cond( all_valid, lambda lo: component.probability(lo), @@ -362,7 +318,6 @@ def compute_component_prob(i: int) -> jax.Array: ) return self.component_weights[i] * prob - # Sum over components total_prob = jnp.array(0.0) for i in range(len(self.components)): total_prob = total_prob + compute_component_prob(i) @@ -376,11 +331,8 @@ def log_probability(self, observations: jax.Array) -> jax.Array: def compute_component_log_prob(i: int) -> jax.Array: component = self.components[i] inv_map = self._inverse_vocab_maps[i] - # Map global observations to local local_obs = inv_map[observations] - # Check if all observations are valid for this component all_valid = jnp.all(local_obs >= 0) - # Compute log probability (-inf if any obs invalid) log_prob = jax.lax.cond( all_valid, lambda lo: component.log_probability(lo), @@ -389,7 +341,6 @@ def compute_component_log_prob(i: int) -> jax.Array: ) return jnp.log(self.component_weights[i]) + log_prob - # Combine via logsumexp log_probs = jnp.array([compute_component_log_prob(i) for i in range(len(self.components))]) return jax.nn.logsumexp(log_probs) @@ -412,6 +363,11 @@ def generate( we implement generation directly using jax.lax.scan over emit_observation and transition_states. + Because jax.lax.switch requires all branches to return the same shape, and + components may have different state types (HMM: flat array vs Factored: tuple + of arrays), we flatten each state to 1D, pad to a common max size for switch + compatibility, and unflatten back to native structures after processing. + Args: state: Initial NonErgodicState with component_beliefs and component_states. The batch dimension is handled by vmap. @@ -426,54 +382,28 @@ def generate( key1, key2 = jax.random.split(key) keys = jax.random.split(key2, sequence_len) - # 1. Sample which component to use for this entire sequence component_idx = jax.random.categorical(key1, jnp.log(state.component_beliefs)) - # 2. Flatten and pad component states for jax.lax.switch compatibility - # - # jax.lax.switch requires all branches to return arrays of identical shape. - # But components can have different state structures: - # - HMM/GHMM: flat array of shape [state_dim] - # - FactoredGenerativeProcess: tuple of arrays (FactoredState) - # - # Our approach: - # a) Save original state structures as templates (for unflattening later) - # b) Flatten each state to 1D (handles both arrays and tuples) - # c) Pad to a common max size - # d) Inside switch: unpad + unflatten → call component → flatten + repad - # e) After scan: unpad + unflatten to restore native structures - # - # Components never see the padding - they work with their native shapes. num_components = len(self.components) - - # Store original structures as templates for restoring after processing state_templates = state.component_states flat_sizes = [_get_flat_size(s) for s in state_templates] max_flat_size = max(flat_sizes) def flatten_and_pad(s: ComponentState) -> jax.Array: - """Convert any state to padded 1D array for uniform shape in switch.""" flat = _flatten_state(s) return jnp.pad(flat, (0, max_flat_size - flat.size)) def unpad_and_unflatten(padded: jax.Array, original_size: int, template: ComponentState) -> ComponentState: - """Restore original state structure from padded 1D array.""" - flat = padded[:original_size] - return _unflatten_state(flat, template) + return _unflatten_state(padded[:original_size], template) padded_states = tuple(flatten_and_pad(s) for s in state.component_states) - # 3. Generate sequence using scan, selecting component via switch at each step def gen_step_for_component( i: int, padded_state: jax.Array, step_key: chex.PRNGKey ) -> tuple[jax.Array, chex.Array]: - """Generate one observation from component i and update its state.""" - # Unpad and unflatten to get native state structure real_state = unpad_and_unflatten(padded_state, flat_sizes[i], state_templates[i]) - # Call component's methods with native state shapes local_obs = self.components[i].emit_observation(real_state, step_key) new_real_state = self.components[i].transition_states(real_state, local_obs) - # Flatten and repad for uniform shape in switch new_padded_state = flatten_and_pad(new_real_state) global_obs = self.vocab_maps[i][local_obs] return new_padded_state, global_obs @@ -481,10 +411,8 @@ def gen_step_for_component( def scan_step( carry: tuple[jax.Array, tuple[jax.Array, ...]], step_key: chex.PRNGKey ) -> tuple[tuple[jax.Array, tuple[jax.Array, ...]], chex.Array]: - """One generation step: emit from active component, update its state.""" idx, padded_comp_states = carry - # Use switch to call the correct component's emit/transition def gen_from_i(i: int) -> tuple[jax.Array, chex.Array]: return gen_step_for_component(i, padded_comp_states[i], step_key) @@ -493,8 +421,6 @@ def gen_from_i(i: int) -> tuple[jax.Array, chex.Array]: [partial(gen_from_i, i) for i in range(num_components)], ) - # Update only the active component's state, keep others unchanged - # Now all states have uniform shape, so cond works new_padded_comp_states = tuple( jax.lax.cond( idx == i, @@ -506,32 +432,24 @@ def gen_from_i(i: int) -> tuple[jax.Array, chex.Array]: return (idx, new_padded_comp_states), global_obs - # Run the scan over all timesteps with padded states init_carry = (component_idx, padded_states) (_, final_padded_states), observations = jax.lax.scan(scan_step, init_carry, keys) - # 4. Unpad and unflatten final states back to native structures final_comp_states = tuple( unpad_and_unflatten(final_padded_states[i], flat_sizes[i], state_templates[i]) for i in range(num_components) ) - # 5. Construct final state with one-hot beliefs one_hot_beliefs = jax.nn.one_hot(component_idx, len(self.components), dtype=self.component_weights.dtype) if return_all_states: - # Run inference pass to compute state trajectory - # This simulates what an observer would compute via Bayesian filtering - # on the generated sequence, starting from the original initial state. def inference_step( carry_state: NonErgodicState, obs: chex.Array ) -> tuple[NonErgodicState, NonErgodicState]: new_state = self.transition_states(carry_state, obs) return new_state, carry_state - # Use original initial state for inference (not modified by generation) _, state_trajectory = jax.lax.scan(inference_step, state, observations) - return state_trajectory, observations else: return NonErgodicState( diff --git a/tests/generative_processes/test_nonergodic_generative_process.py b/tests/generative_processes/test_nonergodic_generative_process.py index 747662d5..bc7820cb 100644 --- a/tests/generative_processes/test_nonergodic_generative_process.py +++ b/tests/generative_processes/test_nonergodic_generative_process.py @@ -15,16 +15,33 @@ import pytest from simplexity.generative_processes.builder import ( + build_factored_process_from_spec, build_generalized_hidden_markov_model, build_hidden_markov_model, build_nonergodic_process_from_spec, ) +from simplexity.generative_processes.generator import generate_data_batch_with_full_history from simplexity.generative_processes.nonergodic_generative_process import ( NonErgodicGenerativeProcess, NonErgodicState, ) +def _expand_state( + state: jax.Array | tuple[jax.Array, ...] | NonErgodicState, + batch_size: int, +) -> jax.Array | tuple[jax.Array, ...] | NonErgodicState: + """Expand a single state to a batch of identical states.""" + if isinstance(state, NonErgodicState): + return NonErgodicState( + component_beliefs=jnp.repeat(state.component_beliefs[None, :], batch_size, axis=0), + component_states=tuple(_expand_state(cs, batch_size) for cs in state.component_states), + ) + elif isinstance(state, tuple): + return tuple(jnp.repeat(s[None, :], batch_size, axis=0) for s in state) + return jnp.repeat(state[None, :], batch_size, axis=0) + + class TestNonErgodicState: """Tests for NonErgodicState structure.""" @@ -330,3 +347,174 @@ def test_mismatched_weights_raises(self): components=[coin, coin], component_weights=[1.0], # Only 1 weight for 2 components ) + + +class TestGenerateReturnAllStates: + """Tests for generate with return_all_states=True.""" + + @pytest.fixture + def two_mess3_process(self): + """Two mess3 HMMs as a nonergodic mixture.""" + hmm1 = build_hidden_markov_model("mess3", {"x": 0.15, "a": 0.6}) + hmm2 = build_hidden_markov_model("mess3", {"x": 0.5, "a": 0.6}) + return NonErgodicGenerativeProcess( + components=[hmm1, hmm2], + component_weights=[0.6, 0.4], + ) + + def test_return_all_states_shapes(self, two_mess3_process): + """Both component_beliefs and component_states should have time dimension.""" + batch_size = 4 + seq_len = 8 + state = two_mess3_process.initial_state + batch_states = NonErgodicState( + component_beliefs=jnp.broadcast_to(state.component_beliefs, (batch_size,) + state.component_beliefs.shape), + component_states=tuple(jnp.broadcast_to(s, (batch_size,) + s.shape) for s in state.component_states), + ) + keys = jax.random.split(jax.random.PRNGKey(0), batch_size) + + trajectory, observations = two_mess3_process.generate(batch_states, keys, seq_len, True) + + assert observations.shape == (batch_size, seq_len) + assert trajectory.component_beliefs.shape == (batch_size, seq_len, 2) + for i, comp in enumerate(two_mess3_process.components): + assert trajectory.component_states[i].shape == (batch_size, seq_len, comp.initial_state.shape[0]) + + def test_return_all_states_beliefs_are_valid_distributions(self, two_mess3_process): + """Component beliefs at each timestep should sum to 1.""" + batch_size = 4 + seq_len = 8 + state = two_mess3_process.initial_state + batch_states = NonErgodicState( + component_beliefs=jnp.broadcast_to(state.component_beliefs, (batch_size,) + state.component_beliefs.shape), + component_states=tuple(jnp.broadcast_to(s, (batch_size,) + s.shape) for s in state.component_states), + ) + keys = jax.random.split(jax.random.PRNGKey(0), batch_size) + + trajectory, _ = two_mess3_process.generate(batch_states, keys, seq_len, True) + + belief_sums = jnp.sum(trajectory.component_beliefs, axis=-1) + chex.assert_trees_all_close(belief_sums, jnp.ones_like(belief_sums), atol=1e-5) + + +class TestFactoredComponent: + """Tests for FactoredGenerativeProcess as a NonErgodic component.""" + + @pytest.fixture + def hmm_factored_process(self): + """NonErgodic process with one HMM and one factored component.""" + hmm = build_hidden_markov_model("coin", {"p": 0.7}) + factored = build_factored_process_from_spec( + structure_type="independent", + spec=[ + {"component_type": "hmm", "variants": [{"process_name": "coin", "process_params": {"p": 0.6}}]}, + {"component_type": "hmm", "variants": [{"process_name": "coin", "process_params": {"p": 0.4}}]}, + ], + ) + return NonErgodicGenerativeProcess( + components=[hmm, factored], + component_weights=[0.5, 0.5], + ) + + def test_factored_component_generate(self, hmm_factored_process): + """NonErgodic with a factored component should generate valid sequences.""" + process = hmm_factored_process + batch_size = 4 + seq_len = 6 + batch_states = _expand_state(process.initial_state, batch_size) + keys = jax.random.split(jax.random.PRNGKey(42), batch_size) + + final_states, observations = process.generate(batch_states, keys, seq_len, False) + + assert observations.shape == (batch_size, seq_len) + assert jnp.all(observations >= 0) + assert jnp.all(observations < process.vocab_size) + + def test_factored_component_return_all_states(self, hmm_factored_process): + """Factored component state trajectory should have correct shapes.""" + process = hmm_factored_process + + batch_size = 4 + seq_len = 6 + batch_states = _expand_state(process.initial_state, batch_size) + keys = jax.random.split(jax.random.PRNGKey(42), batch_size) + + trajectory, observations = process.generate(batch_states, keys, seq_len, True) + + assert observations.shape == (batch_size, seq_len) + assert trajectory.component_beliefs.shape == (batch_size, seq_len, 2) + # HMM component state: flat array + assert trajectory.component_states[0].ndim == 3 # [batch, seq, state_dim] + # Factored component state: tuple of arrays + assert isinstance(trajectory.component_states[1], tuple) + for factor_state in trajectory.component_states[1]: + assert factor_state.ndim == 3 # [batch, seq, factor_dim] + + +class TestGenerateDataBatchWithFullHistory: + """Tests for generate_data_batch_with_full_history with NonErgodicGenerativeProcess.""" + + def test_full_history_shapes(self): + """Belief states should have consistent shapes after slicing.""" + coin1 = build_hidden_markov_model("coin", {"p": 0.7}) + coin2 = build_hidden_markov_model("coin", {"p": 0.3}) + process = NonErgodicGenerativeProcess( + components=[coin1, coin2], + component_weights=[0.6, 0.4], + ) + + batch_size = 4 + seq_len = 8 + state = process.initial_state + batch_states = NonErgodicState( + component_beliefs=jnp.broadcast_to(state.component_beliefs, (batch_size,) + state.component_beliefs.shape), + component_states=tuple(jnp.broadcast_to(s, (batch_size,) + s.shape) for s in state.component_states), + ) + + result = generate_data_batch_with_full_history( + batch_states, process, batch_size, seq_len, jax.random.PRNGKey(0), + ) + + belief_states = result["belief_states"] + inputs = result["inputs"] + labels = result["labels"] + + assert isinstance(belief_states, NonErgodicState) + input_len = inputs.shape[1] + assert belief_states.component_beliefs.shape == (batch_size, input_len, 2) + for cs in belief_states.component_states: + assert cs.shape[0] == batch_size + assert cs.shape[1] == input_len + + def test_full_history_with_bos(self): + """Belief states should align with inputs when BOS token is used.""" + coin1 = build_hidden_markov_model("coin", {"p": 0.7}) + coin2 = build_hidden_markov_model("coin", {"p": 0.3}) + process = NonErgodicGenerativeProcess( + components=[coin1, coin2], + component_weights=[0.6, 0.4], + ) + + batch_size = 4 + seq_len = 8 + bos_token = process.vocab_size + state = process.initial_state + batch_states = NonErgodicState( + component_beliefs=jnp.broadcast_to(state.component_beliefs, (batch_size,) + state.component_beliefs.shape), + component_states=tuple(jnp.broadcast_to(s, (batch_size,) + s.shape) for s in state.component_states), + ) + + result = generate_data_batch_with_full_history( + batch_states, process, batch_size, seq_len, jax.random.PRNGKey(0), + bos_token=bos_token, + ) + + belief_states = result["belief_states"] + inputs = result["inputs"] + + assert isinstance(belief_states, NonErgodicState) + input_len = inputs.shape[1] + assert belief_states.component_beliefs.shape == (batch_size, input_len, 2) + for cs in belief_states.component_states: + assert cs.shape[0] == batch_size + assert cs.shape[1] == input_len From f089ed16d6c5d769e0f43a5d60d64939d011bf30 Mon Sep 17 00:00:00 2001 From: Kyle Ray Date: Thu, 26 Feb 2026 02:23:20 +0000 Subject: [PATCH 26/35] Apply ruff formatting Co-Authored-By: Claude Opus 4.6 --- .../nonergodic_generative_process.py | 1 + .../test_nonergodic_generative_process.py | 12 ++++++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/simplexity/generative_processes/nonergodic_generative_process.py b/simplexity/generative_processes/nonergodic_generative_process.py index 45f6e7ed..0be9f1a4 100644 --- a/simplexity/generative_processes/nonergodic_generative_process.py +++ b/simplexity/generative_processes/nonergodic_generative_process.py @@ -443,6 +443,7 @@ def gen_from_i(i: int) -> tuple[jax.Array, chex.Array]: one_hot_beliefs = jax.nn.one_hot(component_idx, len(self.components), dtype=self.component_weights.dtype) if return_all_states: + def inference_step( carry_state: NonErgodicState, obs: chex.Array ) -> tuple[NonErgodicState, NonErgodicState]: diff --git a/tests/generative_processes/test_nonergodic_generative_process.py b/tests/generative_processes/test_nonergodic_generative_process.py index bc7820cb..5d13e1ca 100644 --- a/tests/generative_processes/test_nonergodic_generative_process.py +++ b/tests/generative_processes/test_nonergodic_generative_process.py @@ -472,7 +472,11 @@ def test_full_history_shapes(self): ) result = generate_data_batch_with_full_history( - batch_states, process, batch_size, seq_len, jax.random.PRNGKey(0), + batch_states, + process, + batch_size, + seq_len, + jax.random.PRNGKey(0), ) belief_states = result["belief_states"] @@ -505,7 +509,11 @@ def test_full_history_with_bos(self): ) result = generate_data_batch_with_full_history( - batch_states, process, batch_size, seq_len, jax.random.PRNGKey(0), + batch_states, + process, + batch_size, + seq_len, + jax.random.PRNGKey(0), bos_token=bos_token, ) From a7c5b3dfbbd4462bf085a7ee78d186ebfec7854e Mon Sep 17 00:00:00 2001 From: Kyle Ray Date: Thu, 26 Feb 2026 02:36:23 +0000 Subject: [PATCH 27/35] Fix pyright errors: widen generator state types for NonErgodicState Co-Authored-By: Claude Opus 4.6 --- simplexity/generative_processes/generator.py | 12 +++--- .../test_nonergodic_generative_process.py | 41 +++++++++---------- 2 files changed, 26 insertions(+), 27 deletions(-) diff --git a/simplexity/generative_processes/generator.py b/simplexity/generative_processes/generator.py index 32dcd766..50253354 100644 --- a/simplexity/generative_processes/generator.py +++ b/simplexity/generative_processes/generator.py @@ -21,14 +21,14 @@ @eqx.filter_jit def generate_data_batch( - gen_states: jax.Array | tuple[jax.Array, ...], + gen_states: jax.Array | tuple[jax.Array, ...] | NonErgodicState, data_generator: GenerativeProcess, batch_size: int, sequence_len: int, key: jax.Array, bos_token: int | None = None, eos_token: int | None = None, -) -> tuple[jax.Array | tuple[jax.Array, ...], jax.Array, jax.Array]: +) -> tuple[jax.Array | tuple[jax.Array, ...] | NonErgodicState, jax.Array, jax.Array]: """Generate a batch of data without tracking intermediate beliefs.""" batch_keys = jax.random.split(key, batch_size) gen_states, tokens = data_generator.generate(gen_states, batch_keys, sequence_len, False) @@ -45,14 +45,14 @@ def generate_data_batch( @eqx.filter_jit def generate_data_batch_with_full_history( - gen_states: jax.Array | tuple[jax.Array, ...], + gen_states: jax.Array | tuple[jax.Array, ...] | NonErgodicState, data_generator: GenerativeProcess, batch_size: int, sequence_len: int, key: jax.Array, bos_token: int | None = None, eos_token: int | None = None, -) -> dict[str, jax.Array | tuple[jax.Array, ...]]: +) -> dict[str, jax.Array | tuple[jax.Array, ...] | NonErgodicState]: """Generate sequences plus per-token belief states and prefix probabilities.""" batch_keys = jax.random.split(key, batch_size) belief_states, tokens = data_generator.generate(gen_states, batch_keys, sequence_len, True) @@ -120,10 +120,10 @@ def _slice_belief_states( def _compute_prefix_probabilities( data_generator: GenerativeProcess, - initial_states: jax.Array | tuple[jax.Array, ...], + initial_states: jax.Array | tuple[jax.Array, ...] | NonErgodicState, tokens: jax.Array, ) -> jax.Array: - def run_sequence(state: jax.Array | tuple[jax.Array, ...], seq: jax.Array) -> jax.Array: + def run_sequence(state: jax.Array | tuple[jax.Array, ...] | NonErgodicState, seq: jax.Array) -> jax.Array: def step(carry_state: Any, token: jax.Array) -> tuple[Any, jax.Array]: obs_probs = data_generator.observation_probability_distribution(carry_state) token_prob = obs_probs[token] diff --git a/tests/generative_processes/test_nonergodic_generative_process.py b/tests/generative_processes/test_nonergodic_generative_process.py index 5d13e1ca..22a04428 100644 --- a/tests/generative_processes/test_nonergodic_generative_process.py +++ b/tests/generative_processes/test_nonergodic_generative_process.py @@ -22,26 +22,30 @@ ) from simplexity.generative_processes.generator import generate_data_batch_with_full_history from simplexity.generative_processes.nonergodic_generative_process import ( + ComponentState, NonErgodicGenerativeProcess, NonErgodicState, ) -def _expand_state( - state: jax.Array | tuple[jax.Array, ...] | NonErgodicState, +def _expand_component_state( + state: ComponentState, batch_size: int, -) -> jax.Array | tuple[jax.Array, ...] | NonErgodicState: - """Expand a single state to a batch of identical states.""" - if isinstance(state, NonErgodicState): - return NonErgodicState( - component_beliefs=jnp.repeat(state.component_beliefs[None, :], batch_size, axis=0), - component_states=tuple(_expand_state(cs, batch_size) for cs in state.component_states), - ) - elif isinstance(state, tuple): +) -> ComponentState: + """Expand a single component state to a batch of identical states.""" + if isinstance(state, tuple): return tuple(jnp.repeat(s[None, :], batch_size, axis=0) for s in state) return jnp.repeat(state[None, :], batch_size, axis=0) +def _expand_state(state: NonErgodicState, batch_size: int) -> NonErgodicState: + """Expand a single NonErgodicState to a batch of identical states.""" + return NonErgodicState( + component_beliefs=jnp.repeat(state.component_beliefs[None, :], batch_size, axis=0), + component_states=tuple(_expand_component_state(cs, batch_size) for cs in state.component_states), + ) + + class TestNonErgodicState: """Tests for NonErgodicState structure.""" @@ -465,11 +469,7 @@ def test_full_history_shapes(self): batch_size = 4 seq_len = 8 - state = process.initial_state - batch_states = NonErgodicState( - component_beliefs=jnp.broadcast_to(state.component_beliefs, (batch_size,) + state.component_beliefs.shape), - component_states=tuple(jnp.broadcast_to(s, (batch_size,) + s.shape) for s in state.component_states), - ) + batch_states = _expand_state(process.initial_state, batch_size) result = generate_data_batch_with_full_history( batch_states, @@ -481,12 +481,13 @@ def test_full_history_shapes(self): belief_states = result["belief_states"] inputs = result["inputs"] - labels = result["labels"] + assert isinstance(inputs, jax.Array) assert isinstance(belief_states, NonErgodicState) input_len = inputs.shape[1] assert belief_states.component_beliefs.shape == (batch_size, input_len, 2) for cs in belief_states.component_states: + assert not isinstance(cs, tuple) assert cs.shape[0] == batch_size assert cs.shape[1] == input_len @@ -502,11 +503,7 @@ def test_full_history_with_bos(self): batch_size = 4 seq_len = 8 bos_token = process.vocab_size - state = process.initial_state - batch_states = NonErgodicState( - component_beliefs=jnp.broadcast_to(state.component_beliefs, (batch_size,) + state.component_beliefs.shape), - component_states=tuple(jnp.broadcast_to(s, (batch_size,) + s.shape) for s in state.component_states), - ) + batch_states = _expand_state(process.initial_state, batch_size) result = generate_data_batch_with_full_history( batch_states, @@ -519,10 +516,12 @@ def test_full_history_with_bos(self): belief_states = result["belief_states"] inputs = result["inputs"] + assert isinstance(inputs, jax.Array) assert isinstance(belief_states, NonErgodicState) input_len = inputs.shape[1] assert belief_states.component_beliefs.shape == (batch_size, input_len, 2) for cs in belief_states.component_states: + assert not isinstance(cs, tuple) assert cs.shape[0] == batch_size assert cs.shape[1] == input_len From 4f509652ee7db6ff9b101c0bb982190bfab29d9a Mon Sep 17 00:00:00 2001 From: Kyle Ray Date: Thu, 26 Feb 2026 05:22:11 +0000 Subject: [PATCH 28/35] Revert generator signature widening, use type: ignore in tests Keep NonErgodicState as an internal detail rather than leaking it into the public generator interface, which would cascade into torch_generator and other consumers. Co-Authored-By: Claude Opus 4.6 --- simplexity/generative_processes/generator.py | 12 ++++++------ .../test_nonergodic_generative_process.py | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/simplexity/generative_processes/generator.py b/simplexity/generative_processes/generator.py index 50253354..32dcd766 100644 --- a/simplexity/generative_processes/generator.py +++ b/simplexity/generative_processes/generator.py @@ -21,14 +21,14 @@ @eqx.filter_jit def generate_data_batch( - gen_states: jax.Array | tuple[jax.Array, ...] | NonErgodicState, + gen_states: jax.Array | tuple[jax.Array, ...], data_generator: GenerativeProcess, batch_size: int, sequence_len: int, key: jax.Array, bos_token: int | None = None, eos_token: int | None = None, -) -> tuple[jax.Array | tuple[jax.Array, ...] | NonErgodicState, jax.Array, jax.Array]: +) -> tuple[jax.Array | tuple[jax.Array, ...], jax.Array, jax.Array]: """Generate a batch of data without tracking intermediate beliefs.""" batch_keys = jax.random.split(key, batch_size) gen_states, tokens = data_generator.generate(gen_states, batch_keys, sequence_len, False) @@ -45,14 +45,14 @@ def generate_data_batch( @eqx.filter_jit def generate_data_batch_with_full_history( - gen_states: jax.Array | tuple[jax.Array, ...] | NonErgodicState, + gen_states: jax.Array | tuple[jax.Array, ...], data_generator: GenerativeProcess, batch_size: int, sequence_len: int, key: jax.Array, bos_token: int | None = None, eos_token: int | None = None, -) -> dict[str, jax.Array | tuple[jax.Array, ...] | NonErgodicState]: +) -> dict[str, jax.Array | tuple[jax.Array, ...]]: """Generate sequences plus per-token belief states and prefix probabilities.""" batch_keys = jax.random.split(key, batch_size) belief_states, tokens = data_generator.generate(gen_states, batch_keys, sequence_len, True) @@ -120,10 +120,10 @@ def _slice_belief_states( def _compute_prefix_probabilities( data_generator: GenerativeProcess, - initial_states: jax.Array | tuple[jax.Array, ...] | NonErgodicState, + initial_states: jax.Array | tuple[jax.Array, ...], tokens: jax.Array, ) -> jax.Array: - def run_sequence(state: jax.Array | tuple[jax.Array, ...] | NonErgodicState, seq: jax.Array) -> jax.Array: + def run_sequence(state: jax.Array | tuple[jax.Array, ...], seq: jax.Array) -> jax.Array: def step(carry_state: Any, token: jax.Array) -> tuple[Any, jax.Array]: obs_probs = data_generator.observation_probability_distribution(carry_state) token_prob = obs_probs[token] diff --git a/tests/generative_processes/test_nonergodic_generative_process.py b/tests/generative_processes/test_nonergodic_generative_process.py index 22a04428..51e93e47 100644 --- a/tests/generative_processes/test_nonergodic_generative_process.py +++ b/tests/generative_processes/test_nonergodic_generative_process.py @@ -472,7 +472,7 @@ def test_full_history_shapes(self): batch_states = _expand_state(process.initial_state, batch_size) result = generate_data_batch_with_full_history( - batch_states, + batch_states, # type: ignore[arg-type] process, batch_size, seq_len, @@ -506,7 +506,7 @@ def test_full_history_with_bos(self): batch_states = _expand_state(process.initial_state, batch_size) result = generate_data_batch_with_full_history( - batch_states, + batch_states, # type: ignore[arg-type] process, batch_size, seq_len, From 2520382ddad93025eb39278c0c549ca1a6791f57 Mon Sep 17 00:00:00 2001 From: Kyle Ray Date: Thu, 26 Feb 2026 06:19:02 +0000 Subject: [PATCH 29/35] Remove misleading auto-infer comment from YAML config Co-Authored-By: Claude Opus 4.6 --- .../configs/generative_process/nonergodic_example.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/end_to_end/configs/generative_process/nonergodic_example.yaml b/tests/end_to_end/configs/generative_process/nonergodic_example.yaml index 2d050fa7..199656d9 100644 --- a/tests/end_to_end/configs/generative_process/nonergodic_example.yaml +++ b/tests/end_to_end/configs/generative_process/nonergodic_example.yaml @@ -4,7 +4,7 @@ # at the start and stays with it forever. name: nonergodic_example -base_vocab_size: ??? # Will auto-infer as 3 +base_vocab_size: ??? vocab_size: ??? instance: From e67ab10db6fe756b5ffc70d873815210ab8daaf0 Mon Sep 17 00:00:00 2001 From: Casper Lutzhoft Christensen Date: Tue, 3 Mar 2026 16:06:18 -0800 Subject: [PATCH 30/35] re-delete visualization --- .../activations/activation_visualizations.py | 282 ---- .../activations/visualization/__init__.py | 49 - .../visualization/data_structures.py | 66 - .../visualization/dataframe_builders.py | 470 ------ .../visualization/field_resolution.py | 198 --- .../visualization/pattern_expansion.py | 616 -------- .../visualization/pattern_utils.py | 139 -- .../visualization/preprocessing.py | 223 --- .../activations/visualization_configs.py | 364 ----- .../activations/visualization_persistence.py | 104 -- simplexity/visualization/altair_renderer.py | 380 ----- simplexity/visualization/data_pipeline.py | 194 --- simplexity/visualization/data_registry.py | 39 - simplexity/visualization/history.py | 105 -- simplexity/visualization/plotly_renderer.py | 1338 ----------------- .../visualization/structured_configs.py | 238 --- .../test_activation_visualizations.py | 298 ---- .../activations/test_dataframe_integration.py | 349 ----- tests/activations/test_field_expansion.py | 980 ------------ .../test_scalar_wildcard_expansion.py | 182 --- .../activations/test_visualization_modules.py | 969 ------------ .../test_visualization_persistence.py | 89 -- .../rmse_over_time_example.yaml | 37 - .../with_factor_visuals.yaml | 162 -- .../activation_tracker/with_visuals.yaml | 207 --- .../configs/visualization/3d_scatter.yaml | 5 - .../configs/visualization/plot/scatter3d.yaml | 26 - tests/end_to_end/visualization_3d_demo.py | 201 --- tests/end_to_end/visualization_demo.py | 105 -- tests/visualization/test_altair_renderer.py | 330 ---- tests/visualization/test_data_pipeline.py | 317 ---- tests/visualization/test_history.py | 155 -- tests/visualization/test_plotly_renderer.py | 411 ----- tests/visualization/test_renderer_controls.py | 234 --- 34 files changed, 9862 deletions(-) delete mode 100644 simplexity/activations/activation_visualizations.py delete mode 100644 simplexity/activations/visualization/__init__.py delete mode 100644 simplexity/activations/visualization/data_structures.py delete mode 100644 simplexity/activations/visualization/dataframe_builders.py delete mode 100644 simplexity/activations/visualization/field_resolution.py delete mode 100644 simplexity/activations/visualization/pattern_expansion.py delete mode 100644 simplexity/activations/visualization/pattern_utils.py delete mode 100644 simplexity/activations/visualization/preprocessing.py delete mode 100644 simplexity/activations/visualization_configs.py delete mode 100644 simplexity/activations/visualization_persistence.py delete mode 100644 simplexity/visualization/altair_renderer.py delete mode 100644 simplexity/visualization/data_pipeline.py delete mode 100644 simplexity/visualization/data_registry.py delete mode 100644 simplexity/visualization/history.py delete mode 100644 simplexity/visualization/plotly_renderer.py delete mode 100644 simplexity/visualization/structured_configs.py delete mode 100644 tests/activations/test_activation_visualizations.py delete mode 100644 tests/activations/test_dataframe_integration.py delete mode 100644 tests/activations/test_field_expansion.py delete mode 100644 tests/activations/test_scalar_wildcard_expansion.py delete mode 100644 tests/activations/test_visualization_modules.py delete mode 100644 tests/activations/test_visualization_persistence.py delete mode 100644 tests/end_to_end/configs/activation_tracker/rmse_over_time_example.yaml delete mode 100644 tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml delete mode 100644 tests/end_to_end/configs/activation_tracker/with_visuals.yaml delete mode 100644 tests/end_to_end/configs/visualization/3d_scatter.yaml delete mode 100644 tests/end_to_end/configs/visualization/plot/scatter3d.yaml delete mode 100644 tests/end_to_end/visualization_3d_demo.py delete mode 100644 tests/end_to_end/visualization_demo.py delete mode 100644 tests/visualization/test_altair_renderer.py delete mode 100644 tests/visualization/test_data_pipeline.py delete mode 100644 tests/visualization/test_history.py delete mode 100644 tests/visualization/test_plotly_renderer.py delete mode 100644 tests/visualization/test_renderer_controls.py diff --git a/simplexity/activations/activation_visualizations.py b/simplexity/activations/activation_visualizations.py deleted file mode 100644 index cd3d0e01..00000000 --- a/simplexity/activations/activation_visualizations.py +++ /dev/null @@ -1,282 +0,0 @@ -"""Helpers for building activation visualizations from analysis outputs.""" - -from __future__ import annotations - -import re -from collections.abc import Mapping -from typing import Any - -import altair -import numpy as np -import pandas as pd -import plotly.graph_objects as go - -from simplexity.activations.visualization.data_structures import ( - _SCALAR_INDEX_SENTINEL, - ActivationVisualizationPayload, - PreparedMetadata, - VisualizationControlDetail, - VisualizationControlsState, -) -from simplexity.activations.visualization.dataframe_builders import ( - _apply_sampling, - _build_dataframe, - _build_metadata_columns, -) -from simplexity.activations.visualization.preprocessing import _apply_preprocessing -from simplexity.activations.visualization_configs import ( - ActivationVisualizationConfig, - ActivationVisualizationControlsConfig, -) -from simplexity.exceptions import ConfigValidationError -from simplexity.visualization.altair_renderer import build_altair_chart -from simplexity.visualization.data_registry import DictDataRegistry -from simplexity.visualization.plotly_renderer import build_plotly_figure -from simplexity.visualization.structured_configs import PlotConfig - - -def _parse_scalar_expression(expr: str) -> tuple[str, str | None]: - """Parse a scalar expression that may contain an aggregation function. - - Args: - expr: Expression like "layer_0_rmse" or "min(layer_0_rmse)" - - Returns: - Tuple of (scalar_key, aggregation_function or None) - """ - expr = expr.strip() - agg_match = re.match(r"^(min|max|avg|mean|latest|first|last)\((.+)\)$", expr) - if agg_match: - agg_func = agg_match.group(1) - scalar_key = agg_match.group(2).strip() - return (scalar_key, agg_func) - return (expr, None) - - -def _compute_aggregation( - history: list[tuple[int, float]], - agg_func: str, -) -> float: - """Compute aggregation over scalar history. - - Args: - history: List of (step, value) tuples - agg_func: Aggregation function name (min, max, avg, mean, latest, first, last) - - Returns: - Aggregated value - """ - if not history: - raise ConfigValidationError(f"Cannot compute {agg_func} over empty history") - - values = [value for _, value in history] - - if agg_func == "min": - return float(np.min(values)) - elif agg_func == "max": - return float(np.max(values)) - elif agg_func in ("avg", "mean"): - return float(np.mean(values)) - elif agg_func in ("latest", "last"): - return history[-1][1] - elif agg_func == "first": - return history[0][1] - else: - raise ConfigValidationError(f"Unknown aggregation function: {agg_func}") - - -def _render_title_template( - title: str | None, - title_scalars: dict[str, str] | None, - scalars: Mapping[str, float], - scalar_history: Mapping[str, list[tuple[int, float]]], -) -> str | None: - """Render a title template by substituting scalar values and aggregations. - - Args: - title: Title string potentially containing format placeholders like {rmse:.3f} - title_scalars: Mapping from template variable names to scalar keys or expressions - scalars: Available current scalar values - scalar_history: Historical scalar values for aggregations - - Returns: - Rendered title string with scalar values substituted, or None if title is None - - Examples: - title_scalars: {"rmse": "layer_0_rmse", "best": "min(layer_0_rmse)"} - This will substitute {rmse} with current value and {best} with historical minimum. - """ - if title is None: - return None - - if title_scalars is None or not title_scalars: - return title - - scalar_values = {} - for var_name, scalar_expr in title_scalars.items(): - scalar_key, agg_func = _parse_scalar_expression(scalar_expr) - - if agg_func is None: - # No aggregation, use current value - if scalar_key in scalars: - scalar_values[var_name] = scalars[scalar_key] - else: - raise ConfigValidationError( - f"Title template references scalar '{scalar_key}' (var: '{var_name}') but it is not available. " - f"Available scalars: {list(scalars.keys())}" - ) - else: - # Aggregation requested, use history - if scalar_key not in scalar_history: - raise ConfigValidationError( - f"Title template requests {agg_func}({scalar_key}) but no history available for '{scalar_key}'. " - f"Available history keys: {list(scalar_history.keys())}" - ) - history = scalar_history[scalar_key] - scalar_values[var_name] = _compute_aggregation(history, agg_func) - - try: - return title.format(**scalar_values) - except (KeyError, ValueError, IndexError) as e: - raise ConfigValidationError( - f"Failed to render title template '{title}' with values {scalar_values}: {e}" - ) from e - - -def _get_facet_columns(viz_cfg: ActivationVisualizationConfig) -> list[str]: - """Get columns used for faceting/subplots. - - Returns columns that define subplot groups, used for per-subplot sampling. - """ - cols = ["layer", "factor", "data_type"] - if viz_cfg.plot and viz_cfg.plot.facet: - if viz_cfg.plot.facet.row: - cols.append(viz_cfg.plot.facet.row) - if viz_cfg.plot.facet.column: - cols.append(viz_cfg.plot.facet.column) - return list(dict.fromkeys(cols)) - - -def build_visualization_payloads( - analysis_name: str, - viz_cfgs: list[ActivationVisualizationConfig], - *, - default_backend: str, - prepared_metadata: PreparedMetadata, - weights: np.ndarray, - belief_states: np.ndarray | None, - arrays: Mapping[str, np.ndarray], - scalars: Mapping[str, float], - scalar_history: Mapping[str, list[tuple[int, float]]], - scalar_history_step: int | None, - analysis_concat_layers: bool, - layer_names: list[str], -) -> list[ActivationVisualizationPayload]: - """Materialize and render the configured visualizations for one analysis.""" - payloads: list[ActivationVisualizationPayload] = [] - metadata_columns = _build_metadata_columns(analysis_name, prepared_metadata, weights) - for viz_cfg in viz_cfgs: - dataframe = _build_dataframe( - viz_cfg, - metadata_columns, - arrays, - scalars, - scalar_history, - scalar_history_step, - belief_states, - analysis_concat_layers, - layer_names, - ) - if viz_cfg.data_mapping.sampling is not None: - facet_cols = _get_facet_columns(viz_cfg) - dataframe = _apply_sampling(dataframe, viz_cfg.data_mapping.sampling, facet_cols) - dataframe = _apply_preprocessing(dataframe, viz_cfg.preprocessing) - plot_cfg = viz_cfg.resolve_plot_config(default_backend) - - if plot_cfg.guides and plot_cfg.guides.title_scalars: - plot_cfg.guides.title = _render_title_template( - plot_cfg.guides.title, - plot_cfg.guides.title_scalars, - scalars, - scalar_history, - ) - - controls = _build_controls_state(dataframe, viz_cfg.controls) - backend = plot_cfg.backend - figure = render_visualization(plot_cfg, dataframe, controls) - payloads.append( - ActivationVisualizationPayload( - analysis=analysis_name, - name=viz_cfg.name, - backend=backend, - figure=figure, - dataframe=dataframe, - controls=controls, - plot_config=plot_cfg, - ) - ) - return payloads - - -def render_visualization( - plot_cfg: PlotConfig, - dataframe: pd.DataFrame, - controls: VisualizationControlsState | None, -) -> altair.Chart | go.Figure: - """Render a visualization figure from plot configuration and dataframe.""" - registry = DictDataRegistry({plot_cfg.data.source: dataframe}) - return _render_plot(plot_cfg, registry, controls) - - -def _render_plot( - plot_cfg: PlotConfig, - registry: DictDataRegistry, - controls: VisualizationControlsState | None, -) -> Any: - if plot_cfg.backend == "plotly": - return build_plotly_figure(plot_cfg, registry, controls=controls) - return build_altair_chart(plot_cfg, registry, controls=controls) - - -def _build_controls_state( - dataframe: pd.DataFrame, controls_cfg: ActivationVisualizationControlsConfig | None -) -> VisualizationControlsState | None: - if controls_cfg is None: - return None - slider = _build_control_detail(dataframe, "slider", controls_cfg.slider, controls_cfg.cumulative) - dropdown = _build_control_detail(dataframe, "dropdown", controls_cfg.dropdown) - toggle = _build_control_detail(dataframe, "toggle", controls_cfg.toggle) - return VisualizationControlsState( - slider=slider, - dropdown=dropdown, - toggle=toggle, - accumulate_steps=controls_cfg.accumulate_steps, - ) - - -def _build_control_detail( - dataframe: pd.DataFrame, - control_type: str, - field: str | None, - cumulative: bool | None = None, -) -> VisualizationControlDetail | None: - if field is None: - return None - if field not in dataframe: - raise ConfigValidationError(f"Control field '{field}' is not present in visualization dataframe.") - options = list(pd.unique(dataframe[field])) - # Filter out "_no_layer_" placeholder used for layer-independent data (e.g., ground truth) - if field == "layer": - options = [opt for opt in options if opt != "_no_layer_"] - return VisualizationControlDetail(type=control_type, field=field, options=options, cumulative=cumulative) - - -__all__ = [ - "ActivationVisualizationPayload", - "PreparedMetadata", - "VisualizationControlDetail", - "VisualizationControlsState", - "_SCALAR_INDEX_SENTINEL", - "build_visualization_payloads", - "render_visualization", -] diff --git a/simplexity/activations/visualization/__init__.py b/simplexity/activations/visualization/__init__.py deleted file mode 100644 index 4d80976f..00000000 --- a/simplexity/activations/visualization/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Visualization subpackage for activation analysis.""" - -from simplexity.activations.visualization.data_structures import ( - _SCALAR_INDEX_SENTINEL, - ActivationVisualizationPayload, - PreparedMetadata, - VisualizationControlDetail, - VisualizationControlsState, -) -from simplexity.activations.visualization.dataframe_builders import ( - _build_dataframe, - _build_metadata_columns, -) -from simplexity.activations.visualization.field_resolution import ( - _lookup_array, - _lookup_scalar_value, - _maybe_component, - _resolve_belief_states, - _resolve_field, -) -from simplexity.activations.visualization.pattern_expansion import ( - _expand_field_mapping, - _has_field_pattern, - _has_key_pattern, - _parse_component_spec, -) -from simplexity.activations.visualization.preprocessing import ( - _apply_preprocessing, -) - -__all__ = [ - "ActivationVisualizationPayload", - "PreparedMetadata", - "VisualizationControlDetail", - "VisualizationControlsState", - "_SCALAR_INDEX_SENTINEL", - "_apply_preprocessing", - "_build_dataframe", - "_build_metadata_columns", - "_expand_field_mapping", - "_has_field_pattern", - "_has_key_pattern", - "_lookup_array", - "_lookup_scalar_value", - "_maybe_component", - "_parse_component_spec", - "_resolve_belief_states", - "_resolve_field", -] diff --git a/simplexity/activations/visualization/data_structures.py b/simplexity/activations/visualization/data_structures.py deleted file mode 100644 index 6d9fb065..00000000 --- a/simplexity/activations/visualization/data_structures.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Core data structures for activation visualizations.""" - -from __future__ import annotations - -from dataclasses import dataclass -from typing import Any - -import altair -import numpy as np -import pandas as pd -import plotly.graph_objects as go - -from simplexity.visualization.structured_configs import PlotConfig - - -@dataclass -class PreparedMetadata: - """Metadata derived during activation preprocessing.""" - - sequences: list[tuple[int, ...]] - steps: np.ndarray - select_last_token: bool - - -@dataclass -class ActivationVisualizationPayload: - """Rendered visualization plus auxiliary metadata.""" - - analysis: str - name: str - backend: str - figure: altair.Chart | go.Figure - dataframe: pd.DataFrame - controls: VisualizationControlsState | None - plot_config: PlotConfig - - -@dataclass -class VisualizationControlDetail: - """Runtime metadata for a single control.""" - - type: str - field: str - options: list[Any] - cumulative: bool | None = None - - -@dataclass -class VisualizationControlsState: - """Collection of optional control metadata.""" - - slider: VisualizationControlDetail | None = None - dropdown: VisualizationControlDetail | None = None - toggle: VisualizationControlDetail | None = None - accumulate_steps: bool = False - - -_SCALAR_INDEX_SENTINEL = "__SCALAR_INDEX_SENTINEL__" - -__all__ = [ - "ActivationVisualizationPayload", - "PreparedMetadata", - "VisualizationControlDetail", - "VisualizationControlsState", - "_SCALAR_INDEX_SENTINEL", -] diff --git a/simplexity/activations/visualization/dataframe_builders.py b/simplexity/activations/visualization/dataframe_builders.py deleted file mode 100644 index 4d6164ff..00000000 --- a/simplexity/activations/visualization/dataframe_builders.py +++ /dev/null @@ -1,470 +0,0 @@ -"""DataFrame construction for activation visualizations.""" - -from __future__ import annotations - -import re -from collections.abc import Mapping -from typing import Any - -import numpy as np -import pandas as pd - -from simplexity.activations.visualization.data_structures import ( - _SCALAR_INDEX_SENTINEL, - PreparedMetadata, -) -from simplexity.activations.visualization.field_resolution import _resolve_field -from simplexity.activations.visualization.pattern_expansion import ( - _expand_field_mapping, - _expand_scalar_pattern_keys, - _scalar_pattern_label, -) -from simplexity.activations.visualization.pattern_utils import has_pattern -from simplexity.activations.visualization_configs import ( - ActivationVisualizationConfig, - ActivationVisualizationFieldRef, - SamplingConfig, - ScalarSeriesMapping, -) -from simplexity.analysis.metric_keys import format_layer_spec -from simplexity.exceptions import ConfigValidationError - - -def _build_metadata_columns( - analysis_name: str, - metadata: PreparedMetadata, - weights: np.ndarray, -) -> dict[str, Any]: - """Build base metadata columns for visualization DataFrames.""" - sequences = metadata.sequences - numeric_steps = metadata.steps - sequence_strings = [" ".join(str(token) for token in seq) for seq in sequences] - base = { - "analysis": np.repeat(analysis_name, len(sequences)), - "step": numeric_steps, - "sequence_length": numeric_steps, - "sequence": np.asarray(sequence_strings), - "sample_index": np.arange(len(sequences), dtype=np.int32), - "weight": weights, - } - return base - - -def _extract_base_column_name(column: str, group_value: str) -> str: - """Extract base column name by removing group index from expanded column name. - - For column='factor_0_prob_0' with group_value='0', returns 'prob_0'. - Uses the group_value to identify and remove the group-related part. - - In practice, key-expanded columns will have format prefix_N_suffix where prefix - is the group name (e.g., 'factor') and suffix is the base column (e.g., 'prob_0'). - Columns like 'prob_0' without a clear group prefix are returned unchanged. - """ - # Pattern: prefix_N_suffix (e.g., factor_0_prob_0 -> prob_0) - # Must have alphabetic suffix after the group value underscore to ensure - # we're stripping a real group prefix, not just matching any column ending in _N - pattern = re.compile(rf"^([a-zA-Z][a-zA-Z0-9_]*)_{re.escape(group_value)}_([a-zA-Z].*)$") - match = pattern.match(column) - if match: - return match.group(2) - - # No match - return original column unchanged - # This handles cases like 'prob_0' where there's no group prefix to strip - return column - - -def _build_scalar_dataframe( - mappings: dict[str, ActivationVisualizationFieldRef], - scalars: Mapping[str, float], - scalar_history: Mapping[str, list[tuple[int, float]]], - analysis_name: str, - current_step: int, -) -> pd.DataFrame: - """Build a long-format DataFrame for scalar visualizations supporting both current and historical data.""" - rows: list[dict[str, Any]] = [] - - for field_name, ref in mappings.items(): - if ref.source not in ("scalar_pattern", "scalar_history"): - continue - - if ref.key is None: - raise ConfigValidationError(f"{ref.source} field references must specify a key") - - # Determine which scalar keys this mapping should include - if has_pattern(ref.key): - # Match pattern against both current scalars and history keys - all_available_keys = set(scalars.keys()) | set(scalar_history.keys()) - matched_keys = _expand_scalar_pattern_keys(ref.key, all_available_keys, analysis_name) - else: - matched_keys = [ref.key if "/" in ref.key else f"{analysis_name}/{ref.key}"] - - for scalar_key in matched_keys: - if ref.source == "scalar_pattern": - # scalar_pattern: Always use current scalar values - # This ensures compatibility with accumulate_steps file persistence - if scalar_key in scalars: - value = scalars[scalar_key] - rows.append( - { - "step": current_step, - "layer": _scalar_pattern_label(scalar_key), - field_name: value, - "metric": scalar_key, - } - ) - elif ref.source == "scalar_history": - # scalar_history: Use full in-memory history - if scalar_key in scalar_history and scalar_history[scalar_key]: - for step, value in scalar_history[scalar_key]: - rows.append( - { - "step": step, - "layer": _scalar_pattern_label(scalar_key), - field_name: value, - "metric": scalar_key, - } - ) - elif scalar_key in scalars: - # No history yet, use current value - value = scalars[scalar_key] - rows.append( - { - "step": current_step, - "layer": _scalar_pattern_label(scalar_key), - field_name: value, - "metric": scalar_key, - } - ) - - if not rows: - raise ConfigValidationError( - "Scalar visualization could not find any matching scalar values. " - f"Available keys: {list(scalars.keys())}, History keys: {list(scalar_history.keys())}" - ) - - return pd.DataFrame(rows) - - -def _build_scalar_series_dataframe( - mapping: ScalarSeriesMapping, - metadata_columns: Mapping[str, Any], - scalars: Mapping[str, float], - layer_names: list[str], - analysis_name: str, -) -> pd.DataFrame: - """Build a DataFrame from scalar series data.""" - base_metadata = _scalar_series_metadata(metadata_columns) - rows: list[dict[str, Any]] = [] - for layer_name in layer_names: - formatted_layer = format_layer_spec(layer_name) - index_values = mapping.index_values or _infer_scalar_series_indices(mapping, scalars, layer_name, analysis_name) - for index_value in index_values: - raw_key = mapping.key_template.format(layer=formatted_layer, index=index_value) - scalar_key = f"{analysis_name}/{raw_key}" - scalar_value = scalars.get(scalar_key) - if scalar_value is None: - continue - row: dict[str, Any] = { - mapping.index_field: index_value, - mapping.value_field: scalar_value, - "layer": layer_name, - } - row.update(base_metadata) - rows.append(row) - if not rows: - raise ConfigValidationError( - "Scalar series visualization could not resolve any scalar values with the provided key_template." - ) - return pd.DataFrame(rows) - - -def _infer_scalar_series_indices( - mapping: ScalarSeriesMapping, - scalars: Mapping[str, float], - layer_name: str, - analysis_name: str, -) -> list[int]: - """Infer available indices for scalar series from available scalar keys.""" - formatted_layer = format_layer_spec(layer_name) - raw_template = mapping.key_template.format(layer=formatted_layer, index=_SCALAR_INDEX_SENTINEL) - template = f"{analysis_name}/{raw_template}" - if _SCALAR_INDEX_SENTINEL not in template: - raise ConfigValidationError( - "scalar_series.key_template must include '{index}' placeholder to infer index values." - ) - prefix, suffix = template.split(_SCALAR_INDEX_SENTINEL, 1) - inferred: set[int] = set() - for key in scalars: - if not key.startswith(prefix): - continue - if suffix and not key.endswith(suffix): - continue - body = key[len(prefix) : len(key) - len(suffix) if suffix else None] - if not body: - continue - try: - inferred.add(int(body)) - except ValueError: - continue - if not inferred: - raise ConfigValidationError( - f"Scalar series could not infer indices for layer '{layer_name}' " - f"using key_template '{mapping.key_template}'." - ) - return sorted(inferred) - - -def _scalar_series_metadata(metadata_columns: Mapping[str, Any]) -> dict[str, Any]: - """Extract scalar metadata from metadata columns.""" - metadata: dict[str, Any] = {} - for key, value in metadata_columns.items(): - if isinstance(value, np.ndarray): - if value.size == 0: - continue - metadata[key] = value.flat[0] - else: - metadata[key] = value - return metadata - - -def _build_dataframe_for_mappings( - mappings: dict[str, ActivationVisualizationFieldRef], - metadata_columns: Mapping[str, Any], - arrays: Mapping[str, np.ndarray], - scalars: Mapping[str, float], - belief_states: np.ndarray | None, - analysis_concat_layers: bool, - layer_names: list[str], -) -> pd.DataFrame: - """Build a DataFrame from a single set of mappings (used by both regular and combined modes).""" - base_rows = len(metadata_columns["step"]) - frames: list[pd.DataFrame] = [] - - # Check if mappings are belief-state-only (don't need layer iteration) - all_belief_states = all(ref.source == "belief_states" for ref in mappings.values()) - effective_layer_names = ["_no_layer_"] if all_belief_states else layer_names - - for layer_name in effective_layer_names: - # Expand all mappings first - expanded_mappings: dict[str, ActivationVisualizationFieldRef] = {} - for field_name, ref in mappings.items(): - try: - expanded = _expand_field_mapping( - field_name, ref, layer_name, arrays, scalars, belief_states, analysis_concat_layers - ) - expanded_mappings.update(expanded) - except ConfigValidationError as e: - raise ConfigValidationError(f"Error expanding '{field_name}' for layer '{layer_name}': {e}") from e - - # Check if any refs have group expansion (_group_value set) - group_refs = {col: ref for col, ref in expanded_mappings.items() if ref._group_value is not None} - non_group_refs = {col: ref for col, ref in expanded_mappings.items() if ref._group_value is None} - - if group_refs: - # Group expansion: restructure to long format - # Group refs by _group_value - groups: dict[str, dict[str, ActivationVisualizationFieldRef]] = {} - group_column_name: str | None = None - - for col, ref in group_refs.items(): - group_val = ref._group_value - assert group_val is not None - if group_val not in groups: - groups[group_val] = {} - groups[group_val][col] = ref - - # Extract group column name from group_as - if ref.group_as is not None: - if isinstance(ref.group_as, str): - group_column_name = ref.group_as - elif isinstance(ref.group_as, list) and len(ref.group_as) > 0: - group_column_name = ref.group_as[0] - - if group_column_name is None: - group_column_name = "group" # Default fallback - - # Build DataFrame chunks for each group value - for group_val, group_col_refs in sorted(groups.items(), key=lambda x: int(x[0])): - group_data = {key: np.copy(value) for key, value in metadata_columns.items()} - group_data["layer"] = np.repeat(layer_name, base_rows) - # Ensure group value is always string for consistent faceting - group_data[group_column_name] = np.repeat(str(group_val), base_rows) - - # Add non-group columns (same for all groups) - for column, ref in non_group_refs.items(): - group_data[column] = _resolve_field( - ref, - layer_name, - arrays, - scalars, - belief_states, - analysis_concat_layers, - base_rows, - metadata_columns, - ) - - # Add group-specific columns with base names (strip group index) - for column, ref in group_col_refs.items(): - base_col_name = _extract_base_column_name(column, group_val) - group_data[base_col_name] = _resolve_field( - ref, - layer_name, - arrays, - scalars, - belief_states, - analysis_concat_layers, - base_rows, - metadata_columns, - ) - - frames.append(pd.DataFrame(group_data)) - else: - # No group expansion: standard DataFrame construction - layer_data = {key: np.copy(value) for key, value in metadata_columns.items()} - layer_data["layer"] = np.repeat(layer_name, base_rows) - - for column, ref in expanded_mappings.items(): - layer_data[column] = _resolve_field( - ref, - layer_name, - arrays, - scalars, - belief_states, - analysis_concat_layers, - base_rows, - metadata_columns, - ) - frames.append(pd.DataFrame(layer_data)) - - return pd.concat(frames, ignore_index=True) - - -def _build_dataframe( - viz_cfg: ActivationVisualizationConfig, - metadata_columns: Mapping[str, Any], - arrays: Mapping[str, np.ndarray], - scalars: Mapping[str, float], - scalar_history: Mapping[str, list[tuple[int, float]]], - scalar_history_step: int | None, - belief_states: np.ndarray | None, - analysis_concat_layers: bool, - layer_names: list[str], -) -> pd.DataFrame: - """Build a DataFrame from visualization configuration.""" - # Handle combined mappings (multiple data sources with labels) - if viz_cfg.data_mapping.combined is not None: - combined_frames: list[pd.DataFrame] = [] - combine_column = viz_cfg.data_mapping.combine_as - assert combine_column is not None, "combine_as should be validated in config" - - for section in viz_cfg.data_mapping.combined: - section_df = _build_dataframe_for_mappings( - section.mappings, - metadata_columns, - arrays, - scalars, - belief_states, - analysis_concat_layers, - layer_names, - ) - section_df[combine_column] = section.label - combined_frames.append(section_df) - - return pd.concat(combined_frames, ignore_index=True) - - # Check if this is a scalar_pattern or scalar_history visualization - has_scalar_pattern = any(ref.source == "scalar_pattern" for ref in viz_cfg.data_mapping.mappings.values()) - has_scalar_history = any(ref.source == "scalar_history" for ref in viz_cfg.data_mapping.mappings.values()) - - if has_scalar_pattern or has_scalar_history: - if scalar_history_step is None: - raise ConfigValidationError( - "Visualization uses scalar_pattern/scalar_history " - "source but analyze() was called without the `step` parameter." - ) - if "analysis" not in metadata_columns: - raise ConfigValidationError("scalar_pattern/scalar_history requires 'analysis' in metadata_columns.") - analysis_name = str(metadata_columns["analysis"][0]) - return _build_scalar_dataframe( - viz_cfg.data_mapping.mappings, - scalars, - scalar_history, - analysis_name, - scalar_history_step, - ) - - if viz_cfg.data_mapping.scalar_series is not None: - if "analysis" not in metadata_columns: - raise ConfigValidationError("scalar_series requires 'analysis' in metadata_columns.") - analysis_name = str(metadata_columns["analysis"][0]) - return _build_scalar_series_dataframe( - viz_cfg.data_mapping.scalar_series, - metadata_columns, - scalars, - layer_names, - analysis_name, - ) - - # Standard mappings mode - delegate to helper - return _build_dataframe_for_mappings( - viz_cfg.data_mapping.mappings, - metadata_columns, - arrays, - scalars, - belief_states, - analysis_concat_layers, - layer_names, - ) - - -def _apply_sampling( - df: pd.DataFrame, - config: SamplingConfig, - facet_columns: list[str], -) -> pd.DataFrame: - """Sample DataFrame down to max_points per facet group. - - Args: - df: The DataFrame to sample - config: Sampling configuration with max_points and optional seed - facet_columns: Column names used for faceting/subplots (e.g., layer, factor, data_type) - - Returns: - Sampled DataFrame with at most max_points rows per facet group - """ - if config.max_points is None: - return df - - group_cols = [col for col in facet_columns if col in df.columns] - - if not group_cols: - if len(df) <= config.max_points: - return df - return df.sample(n=config.max_points, random_state=config.seed) - - def sample_group(group: pd.DataFrame) -> pd.DataFrame: - if len(group) <= config.max_points: # type: ignore[operator] - return group - return group.sample(n=config.max_points, random_state=config.seed) # type: ignore[arg-type] - - # Use group_keys=True to preserve group columns in index, include_groups=False to avoid FutureWarning, - # then reset_index to restore group columns as regular columns - return ( - df.groupby(group_cols, group_keys=True) - .apply(sample_group, include_groups=False) - .reset_index(level=group_cols) - .reset_index(drop=True) - ) - - -__all__ = [ - "_apply_sampling", - "_build_dataframe", - "_build_dataframe_for_mappings", - "_build_metadata_columns", - "_build_scalar_dataframe", - "_build_scalar_series_dataframe", - "_extract_base_column_name", - "_infer_scalar_series_indices", - "_scalar_series_metadata", -] diff --git a/simplexity/activations/visualization/field_resolution.py b/simplexity/activations/visualization/field_resolution.py deleted file mode 100644 index 38f5e23b..00000000 --- a/simplexity/activations/visualization/field_resolution.py +++ /dev/null @@ -1,198 +0,0 @@ -"""Field resolution from arrays, scalars, and belief states.""" - -from __future__ import annotations - -from collections.abc import Mapping - -import numpy as np - -from simplexity.activations.visualization_configs import ActivationVisualizationFieldRef -from simplexity.analysis.metric_keys import construct_layer_specific_key, format_layer_spec -from simplexity.exceptions import ConfigValidationError - - -def _lookup_array(arrays: Mapping[str, np.ndarray], layer_name: str, key: str, concat_layers: bool) -> np.ndarray: - """Look up an array by key, handling layer naming conventions. - - Supports keys in the format "{analysis}/{layer_spec}" (e.g., "pca/L0.resid.pre") - or "{analysis}/{layer_spec}-{factor_spec}" (e.g., "reg/L0.resid.pre-F0"). - - When key contains a factor suffix (e.g., "projected/F0"), looks for the full key - "{analysis}/{layer_spec}-{factor_spec}" (e.g., "projected/L0.resid.pre-F0"). - """ - for full_key, value in arrays.items(): - if concat_layers: - if full_key == key or full_key.startswith(f"{key}/"): - return value - else: - if _key_matches_layer(full_key, key, layer_name): - return value - raise ConfigValidationError(f"Array '{key}' not available for layer '{layer_name}'.") - - -def _key_matches_layer(full_key: str, key: str, layer_name: str) -> bool: - """Check if a full key matches the given key pattern and layer name. - - Handles two formats: - - Simple: key="pca" matches full_key="pca/L0.resid.pre" - - Factor: key="projected/F0" matches full_key="projected/L0.resid.pre-F0" - - The layer_name is formatted using format_layer_spec before matching. - """ - if "/" not in full_key: - return False - - formatted_layer = format_layer_spec(layer_name) - - # Check if key has a factor suffix (e.g., "projected/F0") - if "/" in key: - return full_key == construct_layer_specific_key(key, formatted_layer) - - # Simple key format: key="pca" matches "pca/L0.resid.pre" - prefix = f"{key}/" - if not full_key.startswith(prefix): - return False - layer_part = full_key[len(prefix) :] - candidate_layer = layer_part.split("-")[0] - return candidate_layer == formatted_layer - - -def _lookup_scalar_value(scalars: Mapping[str, float], layer_name: str, key: str, concat_layers: bool) -> float: - """Look up a scalar value by key, handling layer naming conventions. - - Supports keys in the format "{metric}/{layer_spec}" (e.g., "r2/L0.resid.pre") - or "{metric}/{layer_spec}-{factor_spec}" (e.g., "r2/L0.resid.pre-F0"). - - The layer_name is formatted using format_layer_spec before matching. - """ - formatted_layer = format_layer_spec(layer_name) - prefix = f"{key}/" - for full_key, value in scalars.items(): - if concat_layers: - if full_key.startswith(prefix) or full_key == key: - return float(value) - else: - if not full_key.startswith(prefix): - continue - layer_part = full_key[len(prefix) :] - candidate_layer = layer_part.split("-")[0] - if candidate_layer == formatted_layer: - return float(value) - raise ConfigValidationError(f"Scalar '{key}' not available for layer '{layer_name}'.") - - -def _maybe_component(array: np.ndarray, component: int | None) -> np.ndarray: - """Extract a component from a 2D array, or return the 1D array as-is.""" - np_array = np.asarray(array) - if np_array.ndim == 1: - if component is not None: - raise ConfigValidationError("Component index is invalid for 1D projection arrays.") - return np_array - if np_array.ndim != 2: - raise ConfigValidationError("Projection arrays must be 1D or 2D.") - if component is None: - raise ConfigValidationError("Projection references for 2D arrays must specify `component`.") - if component < 0 or component >= np_array.shape[1]: - raise ConfigValidationError( - f"Component index {component} is out of bounds for projection dimension {np_array.shape[1]}" - ) - return np_array[:, component] - - -def _resolve_belief_states(belief_states: np.ndarray, ref: ActivationVisualizationFieldRef) -> np.ndarray: - """Resolve belief states to a 1D array based on field reference configuration.""" - np_array = np.asarray(belief_states) - - # Handle factor dimension for 3D belief states (samples, factors, states) - if np_array.ndim == 3: - if ref.factor is None: - raise ConfigValidationError( - f"Belief states have 3 dimensions (samples, factors, states) but no `factor` was specified. " - f"Shape: {np_array.shape}" - ) - if isinstance(ref.factor, str): - raise ConfigValidationError("Factor patterns should be expanded before resolution") - factor_idx = ref.factor - if factor_idx < 0 or factor_idx >= np_array.shape[1]: - raise ConfigValidationError( - f"Belief state factor {factor_idx} is out of bounds for dimension {np_array.shape[1]}" - ) - np_array = np_array[:, factor_idx, :] # Now 2D: (samples, states) - elif np_array.ndim == 2: - if ref.factor is not None: - raise ConfigValidationError( - f"Belief states are 2D but `factor={ref.factor}` was specified. " - f"Factor selection requires 3D belief states (samples, factors, states)." - ) - else: - raise ConfigValidationError(f"Belief states must be 2D or 3D, got {np_array.ndim}D") - - # Now np_array is 2D: (samples, states) - if ref.reducer == "argmax": - return np.argmax(np_array, axis=1) - if ref.reducer == "l2_norm": - return np.linalg.norm(np_array, axis=1) - if isinstance(ref.component, str): - raise ConfigValidationError("Component indices should be expanded before resolution") - component = ref.component if ref.component is not None else 0 - if component < 0 or component >= np_array.shape[1]: - raise ConfigValidationError( - f"Belief state component {component} is out of bounds for dimension {np_array.shape[1]}" - ) - return np_array[:, component] - - -def _resolve_field( - ref: ActivationVisualizationFieldRef, - layer_name: str, - arrays: Mapping[str, np.ndarray], - scalars: Mapping[str, float], - belief_states: np.ndarray | None, - analysis_concat_layers: bool, - num_rows: int, - metadata_columns: Mapping[str, object], -) -> np.ndarray: - """Resolve a field reference to a numpy array of values.""" - if ref.source == "metadata": - if ref.key is None: - raise ConfigValidationError("Metadata references must specify `key`.") - if ref.key == "layer": - return np.repeat(layer_name, num_rows) - if ref.key not in metadata_columns: - raise ConfigValidationError(f"Metadata column '{ref.key}' is not available.") - return np.asarray(metadata_columns[ref.key]) - - if ref.source == "weights": - if "weight" not in metadata_columns: - raise ConfigValidationError("Weight metadata is unavailable for visualization mapping.") - return np.asarray(metadata_columns["weight"]) - - if ref.source == "arrays": - if ref.key is None: - raise ConfigValidationError("Array references must supply a `key` value.") - array = _lookup_array(arrays, layer_name, ref.key, analysis_concat_layers) - if isinstance(ref.component, str): - raise ConfigValidationError("Component indices should be expanded before resolution") - return _maybe_component(array, ref.component) - - if ref.source == "belief_states": - if belief_states is None: - raise ConfigValidationError("Visualization requests belief_states but they were not retained.") - return _resolve_belief_states(belief_states, ref) - - if ref.source == "scalars": - if ref.key is None: - raise ConfigValidationError("Scalar references must supply `key`.") - value = _lookup_scalar_value(scalars, layer_name, ref.key, analysis_concat_layers) - return np.repeat(value, num_rows) - - raise ConfigValidationError(f"Unsupported field source '{ref.source}'") - - -__all__ = [ - "_lookup_array", - "_lookup_scalar_value", - "_maybe_component", - "_resolve_belief_states", - "_resolve_field", -] diff --git a/simplexity/activations/visualization/pattern_expansion.py b/simplexity/activations/visualization/pattern_expansion.py deleted file mode 100644 index 6190e1a4..00000000 --- a/simplexity/activations/visualization/pattern_expansion.py +++ /dev/null @@ -1,616 +0,0 @@ -"""Pattern parsing and expansion logic for visualization field mappings.""" - -from __future__ import annotations - -import re -from collections.abc import Iterable, Mapping - -import numpy as np - -from simplexity.activations.visualization.field_resolution import _lookup_array -from simplexity.activations.visualization.pattern_utils import ( - build_wildcard_regex, - count_patterns, - has_pattern, - parse_range, - substitute_pattern, - validate_single_pattern, -) -from simplexity.activations.visualization_configs import ActivationVisualizationFieldRef -from simplexity.analysis.metric_keys import format_layer_spec -from simplexity.exceptions import ConfigValidationError - - -def _has_key_pattern(key: str | None) -> bool: - """Check if key contains * or range pattern (e.g., factor_*/projected).""" - if key is None: - return False - validate_single_pattern(key, "Key") - return has_pattern(key) - - -def _has_field_pattern(field_name: str) -> bool: - """Check if field name contains * or range pattern.""" - validate_single_pattern(field_name, "Field name") - return has_pattern(field_name) - - -def _parse_component_spec(component: int | str | None) -> tuple[str, int | None, int | None]: - """Parse component into (type, start, end). - - Returns: - - ("single", val, None) for int component - - ("wildcard", None, None) for "*" - - ("range", start, end) for "start...end" - - ("none", None, None) for None - """ - if component is None: - return ("none", None, None) - if isinstance(component, int): - return ("single", component, None) - if component == "*": - return ("wildcard", None, None) - if "..." in component: - parts = component.split("...") - if len(parts) != 2: - raise ConfigValidationError(f"Invalid range: {component}") - try: - start, end = int(parts[0]), int(parts[1]) - if start >= end: - raise ConfigValidationError(f"Range start must be < end: {component}") - return ("range", start, end) - except ValueError as e: - raise ConfigValidationError(f"Invalid range: {component}") from e - raise ConfigValidationError(f"Unrecognized component pattern: {component}") - - -def _expand_pattern_to_indices( - pattern: str, - available_keys: Iterable[str], -) -> list[int]: - """Extract numeric indices from keys matching a wildcard or range pattern. - - Args: - pattern: Pattern with * or N...M - available_keys: Keys to match against - - Returns: - Sorted list of unique indices that match the pattern - """ - if not has_pattern(pattern): - raise ConfigValidationError(f"Pattern '{pattern}' has no wildcard or range") - - if "*" in pattern: - regex_pattern = build_wildcard_regex(pattern) - indices: list[int] = [] - for key in available_keys: - match = regex_pattern.match(key) - if match: - try: - indices.append(int(match.group(1))) - except (ValueError, IndexError): - continue - if not indices: - raise ConfigValidationError(f"No keys found matching pattern '{pattern}'") - return sorted(set(indices)) - else: - range_bounds = parse_range(pattern) - if not range_bounds: - raise ConfigValidationError(f"Invalid range pattern in '{pattern}'") - start_idx, end_idx = range_bounds - return list(range(start_idx, end_idx)) - - -def _get_component_count( - ref: ActivationVisualizationFieldRef, - layer_name: str, - arrays: Mapping[str, np.ndarray], - belief_states: np.ndarray | None, - analysis_concat_layers: bool, -) -> int: - """Get number of components available for expansion.""" - if ref.source == "arrays": - if ref.key is None: - raise ConfigValidationError("Array refs require key") - array = _lookup_array(arrays, layer_name, ref.key, analysis_concat_layers) - np_array = np.asarray(array) - if np_array.ndim == 1: - raise ConfigValidationError(f"Cannot expand 1D projection '{ref.key}'. Patterns require 2D arrays.") - if np_array.ndim != 2: - raise ConfigValidationError(f"Projection must be 1D or 2D, got {np_array.ndim}D") - return np_array.shape[1] - - if ref.source == "belief_states": - if belief_states is None: - raise ConfigValidationError("Belief states not available") - np_array = np.asarray(belief_states) - if np_array.ndim != 2: - raise ConfigValidationError(f"Belief states must be 2D, got {np_array.ndim}D") - return np_array.shape[1] - - raise ConfigValidationError(f"Component expansion not supported for source: {ref.source}") - - -def _expand_array_key_pattern( - key_pattern: str, - layer_name: str, - arrays: Mapping[str, np.ndarray], - analysis_concat_layers: bool, -) -> dict[str, str]: - """Expand array key patterns against available keys. - - Args: - key_pattern: Pattern like "factor_*/projected" or "factor_0...3/projected" - layer_name: Current layer name for matching - arrays: Available arrays - analysis_concat_layers: Whether layers were concatenated - - Returns: - Dict mapping extracted index (as string) to the concrete key suffix. - E.g., {"0": "factor_0/projected", "1": "factor_1/projected"} - """ - # Format layer name to match against projection keys which use formatted names - formatted_layer = format_layer_spec(layer_name) - - # Build regex from pattern - if "*" in key_pattern: - regex_pattern = build_wildcard_regex(key_pattern) - else: - # Range pattern like "factor_0...3/projected" - range_bounds = parse_range(key_pattern) - if not range_bounds: - raise ConfigValidationError(f"Invalid key pattern: {key_pattern}") - start_idx, end_idx = range_bounds - if start_idx >= end_idx: - raise ConfigValidationError(f"Invalid range in key pattern: {key_pattern}") - # Return explicit range without matching - result = {} - for idx in range(start_idx, end_idx): - concrete_key = substitute_pattern(key_pattern, idx) - result[str(idx)] = concrete_key - return result - - # Match against available arrays - result: dict[str, str] = {} - for full_key in arrays: - # Extract the key suffix for pattern matching - if analysis_concat_layers: - # Keys are like "analysis/Lcat" or "analysis/Lcat-F0" directly - key_suffix = full_key - else: - # New format: keys are like "analysis/layer_name" or "analysis/layer_name-F0" - # Extract the analysis prefix and factor suffix for matching - if "/" not in full_key: - continue - parts = full_key.rsplit("/", 1) - if len(parts) != 2: - continue - analysis_prefix, layer_part = parts - - # Check if this key is for the current layer - if not layer_part.startswith(formatted_layer): - continue - - # Extract factor suffix if present (e.g., "L0.resid.pre-F0" -> "-F0") - factor_suffix = layer_part[len(formatted_layer) :] - - # Reconstruct a pattern-matchable key suffix - # Convert "projected/layer_0-F0" to "projected/F0" for pattern matching - if factor_suffix.startswith("-"): - key_suffix = f"{analysis_prefix}/{factor_suffix[1:]}" - else: - key_suffix = analysis_prefix - - match = regex_pattern.match(key_suffix) - if match: - extracted_idx = match.group(1) - if extracted_idx not in result: - result[extracted_idx] = key_suffix - - if not result: - raise ConfigValidationError( - f"No array keys found matching pattern '{key_pattern}' for layer '{layer_name}'. " - f"Available keys: {list(arrays.keys())}" - ) - - return result - - -def _expand_array_key_mapping( - field_name: str, - ref: ActivationVisualizationFieldRef, - layer_name: str, - arrays: Mapping[str, np.ndarray], - analysis_concat_layers: bool, -) -> dict[str, ActivationVisualizationFieldRef]: - """Expand array key patterns, optionally combined with component patterns. - - Handles cross-product expansion when both key and component patterns are present. - Sets _group_value on expanded refs for DataFrame construction. - """ - assert ref.key is not None, "Key must be provided for projection key pattern expansion" - - # Expand key pattern to get concrete keys - key_expansions = _expand_array_key_pattern(ref.key, layer_name, arrays, analysis_concat_layers) - - # Check if component expansion is also needed - spec_type, start_idx, end_idx = _parse_component_spec(ref.component) - needs_component_expansion = spec_type in ("wildcard", "range") - - expanded: dict[str, ActivationVisualizationFieldRef] = {} - - # Count patterns in field name to handle cross-product correctly - total_field_patterns = count_patterns(field_name) - - for group_idx, concrete_key in sorted(key_expansions.items(), key=lambda x: int(x[0])): - if needs_component_expansion: - # Get component count for this specific key - array = _lookup_array(arrays, layer_name, concrete_key, analysis_concat_layers) - np_array = np.asarray(array) - if np_array.ndim != 2: - raise ConfigValidationError( - f"Component expansion requires 2D projection, got {np_array.ndim}D for key '{concrete_key}'" - ) - max_components = np_array.shape[1] - - if spec_type == "wildcard": - components = list(range(max_components)) - else: - assert start_idx is not None - assert end_idx is not None - if end_idx > max_components: - raise ConfigValidationError( - f"Range {start_idx}...{end_idx} exceeds components ({max_components}) for key '{concrete_key}'" - ) - components = list(range(start_idx, end_idx)) - - # Cross-product: expand both key and component - for comp_idx in components: - # Replace patterns in field name (key pattern first, then component) - if total_field_patterns == 2: - # Two patterns: first for key, second for component - expanded_name = substitute_pattern(field_name, int(group_idx)) - expanded_name = substitute_pattern(expanded_name, comp_idx) - elif total_field_patterns == 1: - # Only one pattern in field name - use for component, prefix with group index - # to ensure unique keys when iterating over multiple groups - expanded_name = f"factor_{group_idx}_{substitute_pattern(field_name, comp_idx)}" - else: - raise ConfigValidationError( - f"Field '{field_name}' must have 1-2 patterns for key+component expansion" - ) - - expanded[expanded_name] = ActivationVisualizationFieldRef( - source="arrays", - key=concrete_key, - component=comp_idx, - reducer=ref.reducer, - group_as=ref.group_as, - _group_value=str(group_idx), - ) - else: - # Only key pattern, no component expansion - expanded_name = substitute_pattern(field_name, int(group_idx)) - - expanded[expanded_name] = ActivationVisualizationFieldRef( - source="arrays", - key=concrete_key, - component=ref.component, # Keep original (could be None or int) - reducer=ref.reducer, - group_as=ref.group_as, - _group_value=str(group_idx), - ) - - return expanded - - -def _expand_belief_factor_mapping( - field_name: str, - ref: ActivationVisualizationFieldRef, - belief_states: np.ndarray, -) -> dict[str, ActivationVisualizationFieldRef]: - """Expand belief state factor patterns, optionally combined with component patterns. - - Handles cross-product expansion when both factor and component patterns are present. - Sets _group_value on expanded refs for DataFrame construction. - """ - np_beliefs = np.asarray(belief_states) - if np_beliefs.ndim != 3: - raise ConfigValidationError( - f"Belief state factor patterns require 3D beliefs (samples, factors, states), got {np_beliefs.ndim}D" - ) - - n_factors = np_beliefs.shape[1] - n_states = np_beliefs.shape[2] - - # Parse factor pattern using _parse_component_spec (same pattern syntax) - try: - factor_spec_type, factor_start, factor_end = _parse_component_spec(ref.factor) - except ConfigValidationError: - raise ConfigValidationError(f"Invalid factor pattern: {ref.factor}") from None - - if factor_spec_type == "wildcard": - factors = list(range(n_factors)) - elif factor_spec_type == "range": - assert factor_start is not None - assert factor_end is not None - if factor_end > n_factors: - raise ConfigValidationError( - f"Factor range {factor_start}...{factor_end} exceeds available factors ({n_factors})" - ) - factors = list(range(factor_start, factor_end)) - else: - raise ConfigValidationError(f"Invalid factor pattern: {ref.factor}") - - # Check if component expansion is also needed - spec_type, start_idx, end_idx = _parse_component_spec(ref.component) - needs_component_expansion = spec_type in ("wildcard", "range") - - expanded: dict[str, ActivationVisualizationFieldRef] = {} - - # Count patterns in field name - total_field_patterns = count_patterns(field_name) - - for factor_idx in factors: - if needs_component_expansion: - # Get component range - if spec_type == "wildcard": - components = list(range(n_states)) - else: - assert start_idx is not None - assert end_idx is not None - if end_idx > n_states: - raise ConfigValidationError(f"Component range {start_idx}...{end_idx} exceeds states ({n_states})") - components = list(range(start_idx, end_idx)) - - # Cross-product: expand both factor and component - for comp_idx in components: - if total_field_patterns == 2: - # Two patterns: first for factor, second for component - expanded_name = substitute_pattern(field_name, factor_idx) - expanded_name = substitute_pattern(expanded_name, comp_idx) - elif total_field_patterns == 1: - # Only one pattern in field name - use for component, prefix with factor index - # to ensure unique keys when iterating over multiple factors - expanded_name = f"factor_{factor_idx}_{substitute_pattern(field_name, comp_idx)}" - else: - raise ConfigValidationError( - f"Field '{field_name}' must have 1-2 patterns for factor+component expansion" - ) - - expanded[expanded_name] = ActivationVisualizationFieldRef( - source="belief_states", - key=ref.key, - component=comp_idx, - reducer=ref.reducer, - group_as=ref.group_as, - factor=factor_idx, - _group_value=str(factor_idx), - ) - else: - # Only factor pattern, no component expansion - expanded_name = substitute_pattern(field_name, factor_idx) - - expanded[expanded_name] = ActivationVisualizationFieldRef( - source="belief_states", - key=ref.key, - component=ref.component, - reducer=ref.reducer, - group_as=ref.group_as, - factor=factor_idx, - _group_value=str(factor_idx), - ) - - return expanded - - -def _expand_scalar_keys( - field_pattern: str, - key_pattern: str | None, - scalars: Mapping[str, float], -) -> dict[str, str]: - """Expand scalar field patterns by matching available scalar keys. - - Returns dict of expanded field_name → scalar_key. - """ - if key_pattern is None: - raise ConfigValidationError("Scalar wildcard expansion requires a key pattern") - - if not has_pattern(key_pattern): - return {field_pattern: key_pattern} - - indices = _expand_pattern_to_indices(key_pattern, scalars.keys()) - - expanded = {} - for idx in indices: - expanded_field = substitute_pattern(field_pattern, idx) if has_pattern(field_pattern) else field_pattern - expanded_key = substitute_pattern(key_pattern, idx) - expanded[expanded_field] = expanded_key - - return expanded - - -def _expand_scalar_pattern_keys( - pattern: str, - available_keys: Iterable[str], - analysis_name: str, -) -> list[str]: - """Expand wildcard/range pattern against available scalar keys.""" - keys = list(available_keys) - prefix = f"{analysis_name}/" - keys_have_prefix = any(key.startswith(prefix) for key in keys) - - normalized_pattern = pattern - if keys_have_prefix and not pattern.startswith(prefix): - normalized_pattern = f"{prefix}{pattern}" - elif not keys_have_prefix and pattern.startswith(prefix): - normalized_pattern = pattern[len(prefix) :] - - pattern_variants = _expand_scalar_pattern_ranges(normalized_pattern) - matched: list[str] = [] - - for variant in pattern_variants: - if "*" in variant: - escaped = re.escape(variant).replace(r"\*", r"([^/]+)") - regex = re.compile(f"^{escaped}$") - matched.extend(key for key in keys if regex.match(key)) - else: - if variant in keys: - matched.append(variant) - - unique_matches: list[str] = [] - seen: set[str] = set() - for key in matched: - if key not in seen: - seen.add(key) - unique_matches.append(key) - - if not unique_matches: - raise ConfigValidationError(f"No scalar pattern keys found matching pattern '{pattern}'") - - return sorted(unique_matches) - - -def _expand_scalar_pattern_ranges(pattern: str) -> list[str]: - """Expand numeric range tokens (e.g., 0...4) within a scalar pattern.""" - range_bounds = parse_range(pattern) - if not range_bounds: - return [pattern] - - start_idx, end_idx = range_bounds - if start_idx >= end_idx: - raise ConfigValidationError(f"Invalid range pattern in scalar pattern key '{pattern}'") - - expanded: list[str] = [] - for idx in range(start_idx, end_idx): - replaced = substitute_pattern(pattern, idx) - expanded.extend(_expand_scalar_pattern_ranges(replaced)) - return expanded - - -def _scalar_pattern_label(full_key: str) -> str: - """Derive a categorical label for scalar pattern rows based on the key.""" - suffix = full_key.split("/", 1)[1] if "/" in full_key else full_key - layer_match = re.search(r"(layer_\d+)", suffix) - if layer_match: - return layer_match.group(1) - return suffix - - -def _expand_field_mapping( - field_name: str, - ref: ActivationVisualizationFieldRef, - layer_name: str, - arrays: Mapping[str, np.ndarray], - scalars: Mapping[str, float], - belief_states: np.ndarray | None, - analysis_concat_layers: bool, -) -> dict[str, ActivationVisualizationFieldRef]: - """Expand pattern-based mapping into concrete mappings. - - Returns dict of expanded field_name → FieldRef with concrete component/key values. - """ - # Check for projection key patterns FIRST (allows multiple field patterns for key+component) - if ref.source == "arrays" and ref.key and _has_key_pattern(ref.key): - # For key pattern expansion, we allow up to 2 patterns in field name - # (one for key expansion, one for component expansion) - total_field_patterns = count_patterns(field_name) - - if total_field_patterns == 0: - raise ConfigValidationError(f"Projection key pattern '{ref.key}' requires field name pattern") - if total_field_patterns > 2: - raise ConfigValidationError( - f"Field name '{field_name}' has too many patterns (max 2 for key+component expansion)" - ) - - return _expand_array_key_mapping(field_name, ref, layer_name, arrays, analysis_concat_layers) - - # Check for belief state factor patterns - if ref.source == "belief_states" and ref.factor is not None and isinstance(ref.factor, str): - has_factor_pattern = ref.factor == "*" or "..." in ref.factor - if has_factor_pattern: - if belief_states is None: - raise ConfigValidationError("Belief state factor patterns require belief_states to be provided") - total_field_patterns = count_patterns(field_name) - - if total_field_patterns == 0: - raise ConfigValidationError(f"Belief state factor pattern '{ref.factor}' requires field name pattern") - if total_field_patterns > 2: - raise ConfigValidationError( - f"Field name '{field_name}' has too many patterns (max 2 for factor+component expansion)" - ) - - return _expand_belief_factor_mapping(field_name, ref, belief_states) - - field_has_pattern = _has_field_pattern(field_name) - - if ref.source == "scalars": - key_has_pattern = ref.key is not None and has_pattern(ref.key) - - if field_has_pattern and not key_has_pattern: - raise ConfigValidationError(f"Field '{field_name}' has pattern but scalar key has no pattern") - if key_has_pattern and not field_has_pattern: - raise ConfigValidationError(f"Scalar key pattern '{ref.key}' requires field name pattern") - - if not field_has_pattern: - return {field_name: ref} - - scalar_expansions = _expand_scalar_keys(field_name, ref.key, scalars) - return { - field: ActivationVisualizationFieldRef(source="scalars", key=key, component=None, reducer=None) - for field, key in scalar_expansions.items() - } - - spec_type, start_idx, end_idx = _parse_component_spec(ref.component) - needs_expansion = spec_type in ("wildcard", "range") - - if field_has_pattern and not needs_expansion: - raise ConfigValidationError(f"Field '{field_name}' has pattern but component is not wildcard/range") - if needs_expansion and not field_has_pattern: - raise ConfigValidationError(f"Component pattern '{ref.component}' requires field name pattern") - - if not needs_expansion: - return {field_name: ref} - - max_components = _get_component_count(ref, layer_name, arrays, belief_states, analysis_concat_layers) - - if spec_type == "wildcard": - components = list(range(max_components)) - else: - assert start_idx is not None, "Range spec must have start index" - assert end_idx is not None, "Range spec must have end index" - if end_idx > max_components: - raise ConfigValidationError( - f"Range {start_idx}...{end_idx} exceeds available components (max: {max_components})" - ) - components = list(range(start_idx, end_idx)) - - expanded = {} - for comp_idx in components: - expanded_name = substitute_pattern(field_name, comp_idx) - - expanded[expanded_name] = ActivationVisualizationFieldRef( - source=ref.source, - key=ref.key, - component=comp_idx, - reducer=ref.reducer, - ) - - return expanded - - -__all__ = [ - "_expand_belief_factor_mapping", - "_expand_field_mapping", - "_expand_pattern_to_indices", - "_expand_array_key_mapping", - "_expand_array_key_pattern", - "_expand_scalar_keys", - "_expand_scalar_pattern_keys", - "_expand_scalar_pattern_ranges", - "_get_component_count", - "_has_field_pattern", - "_has_key_pattern", - "_parse_component_spec", - "_scalar_pattern_label", -] diff --git a/simplexity/activations/visualization/pattern_utils.py b/simplexity/activations/visualization/pattern_utils.py deleted file mode 100644 index a26febdc..00000000 --- a/simplexity/activations/visualization/pattern_utils.py +++ /dev/null @@ -1,139 +0,0 @@ -"""Shared pattern detection, parsing, and substitution utilities.""" - -from __future__ import annotations - -import re - -from simplexity.exceptions import ConfigValidationError - -# Compiled regex for range patterns (e.g., "0...10") -RANGE_PATTERN = re.compile(r"(\d+)\.\.\.(\d+)") - - -def count_patterns(text: str) -> int: - """Count wildcard (*) and range (N...M) patterns in text. - - Args: - text: String to check for patterns - - Returns: - Total number of wildcard and range patterns found - """ - return text.count("*") + len(RANGE_PATTERN.findall(text)) - - -def has_pattern(text: str) -> bool: - """Check if text contains any wildcard (*) or range (N...M) pattern. - - Args: - text: String to check for patterns - - Returns: - True if text contains at least one pattern - """ - return "*" in text or bool(RANGE_PATTERN.search(text)) - - -def validate_single_pattern(text: str, context: str) -> None: - """Validate that text has at most one pattern. - - Args: - text: String to validate - context: Description for error message (e.g., "Key", "Field name") - - Raises: - ConfigValidationError: If text contains multiple patterns - """ - if count_patterns(text) > 1: - raise ConfigValidationError(f"{context} cannot have multiple patterns: {text}") - - -def substitute_pattern(text: str, index: int) -> str: - """Replace the first wildcard or range pattern with an index. - - Handles both wildcard (*) and range (N...M) patterns. If both are present, - wildcard takes precedence. - - Args: - text: String containing a pattern - index: Index value to substitute - - Returns: - Text with first pattern replaced by index - """ - if "*" in text: - return text.replace("*", str(index), 1) - return RANGE_PATTERN.sub(str(index), text, count=1) - - -def substitute_range(text: str, index: int) -> str: - """Replace a range pattern (N...M) with an index. - - Args: - text: String containing a range pattern - index: Index value to substitute - - Returns: - Text with range pattern replaced by index - """ - return RANGE_PATTERN.sub(str(index), text, count=1) - - -def parse_range(text: str) -> tuple[int, int] | None: - """Extract (start, end) from a range pattern. - - Args: - text: String potentially containing a range pattern like "0...10" - - Returns: - Tuple of (start, end) if range found, None otherwise - """ - match = RANGE_PATTERN.search(text) - if match: - return int(match.group(1)), int(match.group(2)) - return None - - -def is_valid_range(text: str) -> bool: - """Check if text is a valid range pattern with start < end. - - Args: - text: String to check (e.g., "0...10") - - Returns: - True if text is a valid range pattern with start < end - """ - result = parse_range(text) - if result is None: - return False - start, end = result - return start < end - - -def build_wildcard_regex(pattern: str, capture: str = r"(\d+)") -> re.Pattern[str]: - """Build a regex pattern from a wildcard pattern. - - Escapes special regex characters and replaces * with a capture group. - - Args: - pattern: String with * wildcard (e.g., "factor_*/projected") - capture: Regex capture group to replace * with (default: numeric capture) - - Returns: - Compiled regex pattern for matching - """ - escaped = re.escape(pattern).replace(r"\*", capture) - return re.compile(f"^{escaped}$") - - -__all__ = [ - "RANGE_PATTERN", - "build_wildcard_regex", - "count_patterns", - "has_pattern", - "is_valid_range", - "parse_range", - "substitute_pattern", - "substitute_range", - "validate_single_pattern", -] diff --git a/simplexity/activations/visualization/preprocessing.py b/simplexity/activations/visualization/preprocessing.py deleted file mode 100644 index 48cb430a..00000000 --- a/simplexity/activations/visualization/preprocessing.py +++ /dev/null @@ -1,223 +0,0 @@ -"""Post-processing transforms for visualization DataFrames.""" - -from __future__ import annotations - -import numpy as np -import pandas as pd - -from simplexity.activations.visualization.pattern_utils import ( - build_wildcard_regex, - has_pattern, - parse_range, - substitute_pattern, -) -from simplexity.activations.visualization_configs import ActivationVisualizationPreprocessStep -from simplexity.analysis.pca import compute_weighted_pca -from simplexity.exceptions import ConfigValidationError - - -def _expand_preprocessing_fields(field_patterns: list[str], available_columns: list[str]) -> list[str]: - """Expand wildcard and range patterns in preprocessing field lists. - - Args: - field_patterns: List of field names, may contain patterns like "belief_*" or "prob_0...3" - available_columns: List of column names available in the DataFrame - - Returns: - Expanded list of field names with patterns replaced by matching columns - """ - expanded: list[str] = [] - for pattern in field_patterns: - # Check if this is a pattern - if has_pattern(pattern): - # Extract the numeric pattern if it's a range - range_bounds = parse_range(pattern) - if range_bounds: - start, end = range_bounds - component_range = list(range(start, end)) - # Replace range pattern with each index - for idx in component_range: - expanded_name = substitute_pattern(pattern, idx) - if expanded_name in available_columns: - expanded.append(expanded_name) - else: - raise ConfigValidationError( - f"Preprocessing pattern '{pattern}' expanded to '{expanded_name}' " - f"but column not found in DataFrame. " - f"Available columns: {', '.join(sorted(available_columns))}" - ) - elif "*" in pattern: - # Wildcard pattern - find all matching columns - regex = build_wildcard_regex(pattern) - matches = [] - for col in available_columns: - match = regex.match(col) - if match: - # Extract the numeric part for sorting - try: - idx = int(match.group(1)) - matches.append((idx, col)) - except (IndexError, ValueError): - continue - if not matches: - raise ConfigValidationError( - f"Preprocessing pattern '{pattern}' did not match any columns in DataFrame. " - f"Available columns: {', '.join(sorted(available_columns))}" - ) - # Sort by index and add column names - matches.sort(key=lambda x: x[0]) - expanded.extend([col for _, col in matches]) - else: - raise ConfigValidationError(f"Invalid preprocessing field pattern: {pattern}") - else: - # Not a pattern, just add as-is - expanded.append(pattern) - - return expanded - - -def _apply_preprocessing(dataframe: pd.DataFrame, steps: list[ActivationVisualizationPreprocessStep]) -> pd.DataFrame: - """Apply preprocessing steps to a DataFrame.""" - result = dataframe.copy() - available_columns = list(result.columns) - - for step in steps: - # Validate output_fields don't contain patterns - for output_field in step.output_fields: - if "*" in output_field or "..." in output_field: - raise ConfigValidationError( - f"Preprocessing output_fields cannot contain patterns. Found: '{output_field}'" - ) - - # Expand input_fields patterns - expanded_input_fields = _expand_preprocessing_fields(step.input_fields, available_columns) - - # Create a modified step with expanded fields - expanded_step = ActivationVisualizationPreprocessStep( - type=step.type, input_fields=expanded_input_fields, output_fields=step.output_fields - ) - - if step.type == "project_to_simplex": - result = _project_to_simplex(result, expanded_step) - elif step.type == "combine_rgb": - result = _combine_rgb(result, expanded_step) - else: # pragma: no cover - defensive for future types - raise ConfigValidationError(f"Unsupported preprocessing op '{step.type}'") - - # Update available columns for next step - available_columns = list(result.columns) - - return result - - -def _project_to_simplex(dataframe: pd.DataFrame, step: ActivationVisualizationPreprocessStep) -> pd.DataFrame: - """Project 3D probability coordinates to 2D simplex coordinates.""" - required = step.input_fields - for column in required: - if column not in dataframe: - raise ConfigValidationError( - f"Preprocessing step requires column '{column}' but it is missing from the dataframe." - ) - _, p1, p2 = (dataframe[col].astype(float) for col in required) - x = p1 + 0.5 * p2 - y = (np.sqrt(3.0) / 2.0) * p2 - dataframe[step.output_fields[0]] = x - dataframe[step.output_fields[1]] = y - return dataframe - - -def _combine_rgb(dataframe: pd.DataFrame, step: ActivationVisualizationPreprocessStep) -> pd.DataFrame: - """Combine input fields into RGB color values. - - Supports either: - - 3 input fields: Directly map to R, G, B channels - - >3 input fields: Project to 3D via PCA, then map to RGB - """ - # ---- Validation ---- - # Note: input_fields have already been expanded by _expand_preprocessing_fields() - # at this point, so we just validate the expanded result - if len(step.output_fields) != 1: - raise ConfigValidationError("combine_rgb requires exactly one output_field.") - if len(step.input_fields) < 3: - raise ConfigValidationError("combine_rgb requires at least three input_fields.") - - # Make sure all input columns exist - for field in step.input_fields: - if field not in dataframe: - raise ConfigValidationError(f"combine_rgb requires column '{field}' but it is missing from the dataframe.") - - def _channel_to_int(series: pd.Series) -> pd.Series: - return (series.clip(0.0, 1.0) * 255).round().astype(int) - - # ---- Case 1: exactly 3 inputs -> normalize to [0, 1] then map to RGB ---- - if len(step.input_fields) == 3: - rgb = dataframe[list(step.input_fields)].to_numpy(dtype=float) - mins = rgb.min(axis=0) - maxs = rgb.max(axis=0) - ranges = maxs - mins - ranges_safe = np.where(ranges > 0, ranges, 1.0) - rgb = (rgb - mins) / ranges_safe - rgb[:, ranges == 0] = 0.5 - - r_vals = _channel_to_int(pd.Series(rgb[:, 0], index=dataframe.index)) - g_vals = _channel_to_int(pd.Series(rgb[:, 1], index=dataframe.index)) - b_vals = _channel_to_int(pd.Series(rgb[:, 2], index=dataframe.index)) - - # ---- Case 2: >3 inputs -> PCA to 3D, then map to RGB ---- - else: - import jax.numpy as jnp - - # Stack the selected columns into an (n_samples, n_features) matrix - X_np = dataframe[step.input_fields].to_numpy(dtype=float) - X_jax = jnp.asarray(X_np) - - # Unweighted PCA (weights=None) to up to 3 components - # We pass n_components=3, but compute_weighted_pca will cap it at min(n_samples, n_features) - # via its own logic if you change it to allow that, or you can just pass None and slice. - pca_res = compute_weighted_pca( - X_jax, - n_components=None, # let it pick max_rank - weights=None, - center=True, - ) - - # Get projected coordinates, shape: (n_samples, k) where k = max_rank - proj = np.asarray(pca_res["X_proj"]) # convert from jax.Array to numpy - - # Ensure we have 3 channels: take first 3 components, pad with zeros if fewer - if proj.shape[1] >= 3: - proj3 = proj[:, :3] - else: - # This is rare (happens when n_samples < 3). Pad extra dims with zeros. - pad_width = 3 - proj.shape[1] - proj3 = np.pad(proj, ((0, 0), (0, pad_width)), mode="constant") - - # Min-max normalize each component to [0, 1] across the dataset - mins = proj3.min(axis=0) - maxs = proj3.max(axis=0) - ranges = maxs - mins - # Avoid divide-by-zero: if range is 0, just leave that channel at 0.5 - ranges_safe = np.where(ranges > 0, ranges, 1.0) - colors = (proj3 - mins) / ranges_safe - colors[:, ranges == 0] = 0.5 - - colors = np.clip(colors, 0.0, 1.0) - - # Turn into Series so we can reuse _channel_to_int - r_vals = _channel_to_int(pd.Series(colors[:, 0], index=dataframe.index)) - g_vals = _channel_to_int(pd.Series(colors[:, 1], index=dataframe.index)) - b_vals = _channel_to_int(pd.Series(colors[:, 2], index=dataframe.index)) - - # ---- Build hex color column ---- - dataframe[step.output_fields[0]] = [ - f"#{rv:02x}{gv:02x}{bv:02x}" for rv, gv, bv in zip(r_vals, g_vals, b_vals, strict=False) - ] - return dataframe - - -__all__ = [ - "_apply_preprocessing", - "_combine_rgb", - "_expand_preprocessing_fields", - "_project_to_simplex", -] diff --git a/simplexity/activations/visualization_configs.py b/simplexity/activations/visualization_configs.py deleted file mode 100644 index 027ad45b..00000000 --- a/simplexity/activations/visualization_configs.py +++ /dev/null @@ -1,364 +0,0 @@ -"""Structured configuration objects for activation visualizations.""" - -from __future__ import annotations - -from collections.abc import Mapping -from dataclasses import dataclass, field, fields, is_dataclass -from typing import Any, Literal, TypeVar, cast, get_args, get_origin, get_type_hints - -from omegaconf import DictConfig, OmegaConf - -from simplexity.activations.visualization.pattern_utils import is_valid_range -from simplexity.exceptions import ConfigValidationError -from simplexity.visualization.structured_configs import ( - DataConfig, - LayerConfig, - PlotConfig, - PlotLevelGuideConfig, - PlotSizeConfig, -) - -FieldSource = Literal["arrays", "scalars", "belief_states", "weights", "metadata", "scalar_pattern", "scalar_history"] -ReducerType = Literal["argmax", "l2_norm"] - -T = TypeVar("T") - - -def _dict_to_visualization_dataclass[T](data: dict[str, Any], schema: type[T]) -> T: - """Convert a dict to a visualization dataclass instance. - - This is a simplified converter specifically for visualization config dataclasses. - It handles nested dataclasses, lists, dicts, and optional fields. - """ - if not is_dataclass(schema): - raise TypeError(f"Expected a dataclass type, got {schema}") - - try: - type_hints = get_type_hints(schema) - except (NameError, TypeError): - type_hints = {f.name: f.type for f in fields(schema)} - - kwargs: dict[str, Any] = {} - for f in fields(schema): - if f.name not in data: - continue - value = data[f.name] - field_type = type_hints.get(f.name, f.type) - kwargs[f.name] = _convert_field_value(value, field_type) - - return schema(**kwargs) - - -def _convert_field_value(value: Any, field_type: Any) -> Any: - """Convert a field value based on its type annotation.""" - if value is None: - return None - - origin = get_origin(field_type) - args = get_args(field_type) - - # Handle list[T] - if origin is list: - item_type = args[0] if args else Any - if is_dataclass(item_type) and isinstance(item_type, type): - return [ - item if isinstance(item, item_type) else _dict_to_visualization_dataclass(item, item_type) - for item in value - ] - return list(value) - - # Handle dict[K, V] - if origin is dict: - value_type = args[1] if len(args) > 1 else Any - if is_dataclass(value_type) and isinstance(value_type, type): - return { - k: v if isinstance(v, value_type) else _dict_to_visualization_dataclass(v, value_type) - for k, v in value.items() - } - return dict(value) - - # Handle Optional[T] / T | None (any Union containing None) - if type(None) in args: - non_none_args = [arg for arg in args if arg is not type(None)] - if non_none_args: - return _convert_field_value(value, non_none_args[0]) - return value - - # Handle nested dataclass - if is_dataclass(field_type) and isinstance(field_type, type): - if isinstance(value, field_type): - return value - if isinstance(value, dict): - return _dict_to_visualization_dataclass(value, field_type) - return value - - return value - - -@dataclass -class ScalarSeriesMapping: - """Describe how to unfold indexed scalar metrics into long-format (tidy) dataframe. - - This is used for plotting scalar values over an index dimension (e.g., cumulative - variance vs. component count). For adding scalar values as columns to existing data, - use wildcard mappings instead: `mappings: {rmse: {source: scalars, key: "layer_0_rmse"}}`. - """ - - key_template: str - index_field: str - value_field: str - index_values: list[int] | None = None - - def __post_init__(self) -> None: - if "{layer}" not in self.key_template: - raise ConfigValidationError("scalar_series.key_template must include '{layer}' placeholder") - if "{index}" not in self.key_template: - raise ConfigValidationError("scalar_series.key_template must include '{index}' placeholder") - if self.index_values is not None and not self.index_values: - raise ConfigValidationError("scalar_series.index_values must not be empty") - - -@dataclass -class ActivationVisualizationFieldRef: - """Map a DataFrame column to a specific activation artifact.""" - - source: FieldSource - key: str | None = None - component: int | str | None = None - reducer: ReducerType | None = None - group_as: str | list[str] | None = None - factor: int | str | None = None # For selecting factor in factored belief states (3D arrays) - _group_value: str | None = None # Internal: populated during key/factor pattern expansion - - def __post_init__(self) -> None: - if self.source == "arrays" and not self.key: - raise ConfigValidationError("Projection field references must specify the `key` to read from.") - if self.source == "scalars" and not self.key: - raise ConfigValidationError("Scalar field references must specify the `key` to read from.") - if self.source == "scalar_pattern" and not self.key: - raise ConfigValidationError("Scalar pattern field references must specify the `key` to read from.") - if self.source == "scalar_history" and not self.key: - raise ConfigValidationError("Scalar history field references must specify the `key` to read from.") - if self.source == "metadata" and not self.key: - raise ConfigValidationError("Metadata field references must specify the `key` to read from.") - - if isinstance(self.component, str): - if self.component != "*" and not is_valid_range(self.component): - raise ConfigValidationError(f"Component pattern '{self.component}' invalid. Use '*' or 'N...M'") - if self.source not in ("arrays", "belief_states"): - raise ConfigValidationError( - f"Component patterns only supported for arrays/belief_states, not '{self.source}'" - ) - - # Validate key patterns for arrays - if self.source == "arrays" and self.key: - has_key_pattern = "*" in self.key or is_valid_range(self.key) - # Key patterns require group_as to name the resulting column(s) - if has_key_pattern and self.group_as is None: - raise ConfigValidationError( - f"Projection key pattern '{self.key}' requires `group_as` to name the expanded column(s)" - ) - - # Validate factor field (only for belief_states) - if self.factor is not None: - if self.source != "belief_states": - raise ConfigValidationError(f"`factor` is only supported for belief_states, not '{self.source}'") - if isinstance(self.factor, str): - has_factor_pattern = self.factor == "*" or is_valid_range(self.factor) - if has_factor_pattern and self.group_as is None: - raise ConfigValidationError( - f"Factor pattern '{self.factor}' requires `group_as` to name the expanded column(s)" - ) - - # Validate group_as - if self.group_as is not None and self.source not in ("arrays", "belief_states"): - raise ConfigValidationError(f"`group_as` is only supported for arrays/belief_states, not '{self.source}'") - - -@dataclass -class SamplingConfig: - """Configuration for sampling DataFrame rows to limit visualization size. - - When max_points is set, the DataFrame is sampled down to at most max_points - rows per facet group (e.g., per layer, factor, or data_type combination). - This ensures even distribution across subplots. - """ - - max_points: int | None = None - seed: int | None = None - - def __post_init__(self) -> None: - if self.max_points is not None and self.max_points <= 0: - raise ConfigValidationError("sampling.max_points must be a positive integer") - - -@dataclass -class CombinedMappingSection: - """A labeled section of field mappings for combining multiple data sources. - - Used to combine arrays and ground truth belief states into a single - DataFrame with a label column for faceting (e.g., row faceting by data_type). - """ - - label: str - mappings: dict[str, ActivationVisualizationFieldRef] = field(default_factory=dict) - - def __post_init__(self) -> None: - if not self.mappings: - raise ConfigValidationError(f"Combined mapping section '{self.label}' must have at least one mapping.") - - -@dataclass -class ActivationVisualizationDataMapping: - """Describe how to build the pandas DataFrame prior to rendering.""" - - mappings: dict[str, ActivationVisualizationFieldRef] = field(default_factory=dict) - scalar_series: ScalarSeriesMapping | None = None - combined: list[CombinedMappingSection] | None = None # For combining multiple data sources - combine_as: str | None = None # Column name for section labels (e.g., "data_type") - sampling: SamplingConfig | None = None # Optional sampling to limit visualization size - - def __post_init__(self) -> None: - has_mappings = bool(self.mappings) - has_scalar_series = self.scalar_series is not None - has_combined = self.combined is not None and len(self.combined) > 0 - - if not has_mappings and not has_scalar_series and not has_combined: - raise ConfigValidationError( - "Activation visualization data mapping must include at least one of: " - "mappings, scalar_series, or combined sections." - ) - - if has_combined: - if has_mappings: - raise ConfigValidationError( - "Cannot use both 'mappings' and 'combined' in the same data_mapping. " - "Use 'combined' for multi-source visualizations." - ) - if self.combine_as is None: - raise ConfigValidationError( - "'combine_as' is required when using 'combined' sections to specify the label column name." - ) - - -@dataclass -class ActivationVisualizationPreprocessStep: - """Preprocessing directives applied after the base DataFrame is built.""" - - type: Literal["project_to_simplex", "combine_rgb"] - input_fields: list[str] - output_fields: list[str] - - def __post_init__(self) -> None: - # Check if any input fields contain patterns (wildcards or ranges) - has_pattern = any("*" in field or "..." in field for field in self.input_fields) - - if self.type == "project_to_simplex": - # Skip input validation if patterns present (will be validated at runtime) - if not has_pattern and len(self.input_fields) != 3: - raise ConfigValidationError("project_to_simplex requires exactly three input_fields.") - if len(self.output_fields) != 2: - raise ConfigValidationError("project_to_simplex requires exactly two output_fields.") - elif self.type == "combine_rgb": - # Skip input validation if patterns present (will be validated at runtime) - if not has_pattern and len(self.input_fields) < 3: - raise ConfigValidationError("combine_rgb requires at least three input_fields.") - if len(self.output_fields) != 1: - raise ConfigValidationError("combine_rgb requires exactly one output_field.") - - -@dataclass -class ActivationVisualizationControlsConfig: - """Optional control metadata to drive interactive front-ends.""" - - slider: str | None = None - dropdown: str | None = None - toggle: str | None = None - cumulative: bool = False - accumulate_steps: bool = False - - def __post_init__(self) -> None: - if self.accumulate_steps and self.slider == "step": - raise ConfigValidationError( - "controls.accumulate_steps cannot be used together with slider targeting 'step'." - ) - - -@dataclass -class ActivationVisualizationConfig: - """Full specification for an analysis-attached visualization.""" - - name: str - data_mapping: ActivationVisualizationDataMapping - backend: str | None = None - plot: PlotConfig | None = None - layer: LayerConfig | None = None - size: PlotSizeConfig | None = None - guides: PlotLevelGuideConfig | None = None - preprocessing: list[ActivationVisualizationPreprocessStep] = field(default_factory=list) - controls: ActivationVisualizationControlsConfig | None = None - - def resolve_plot_config(self, default_backend: str) -> PlotConfig: - """Return a PlotConfig constructed from either `plot` or shorthand fields.""" - if self.plot is not None: - plot_cfg = self.plot - elif self.layer is not None: - plot_cfg = PlotConfig( - backend=self.backend or default_backend, - layers=[self.layer], - size=self.size or PlotSizeConfig(), - guides=self.guides or PlotLevelGuideConfig(), - ) - else: - raise ConfigValidationError( - f"Visualization '{self.name}' must specify either a PlotConfig (`plot`) or a single `layer`." - ) - - if plot_cfg.data is None: - plot_cfg.data = DataConfig(source="main") - else: - plot_cfg.data.source = plot_cfg.data.source or "main" - plot_cfg.backend = self.backend or plot_cfg.backend - if self.size is not None: - plot_cfg.size = self.size - if self.guides is not None: - plot_cfg.guides = self.guides - if any(step.type == "combine_rgb" for step in self.preprocessing) and plot_cfg.backend != "plotly": - raise ConfigValidationError("combine_rgb preprocessing requires backend='plotly'") - return plot_cfg - - -def _to_dict(cfg: Mapping[str, Any] | DictConfig) -> dict[str, Any]: - """Convert OmegaConf or Mapping to a plain dict.""" - if isinstance(cfg, DictConfig): - container = OmegaConf.to_container(cfg, resolve=False) - return cast(dict[str, Any], container) if isinstance(container, dict) else {} - if isinstance(cfg, dict): - return cfg - return dict(cfg) - - -def build_activation_visualization_config(raw_cfg: Mapping[str, Any]) -> ActivationVisualizationConfig: - """Convert a dictionary/OmegaConf config into an ActivationVisualizationConfig dataclass.""" - if isinstance(raw_cfg, ActivationVisualizationConfig): - return raw_cfg - - config_dict = _to_dict(raw_cfg) - - if config_dict.get("data_mapping") is None: - raise ConfigValidationError("Visualization config must include a data_mapping block.") - - return _dict_to_visualization_dataclass(config_dict, ActivationVisualizationConfig) - - -__all__ = [ - "ActivationVisualizationConfig", - "ActivationVisualizationControlsConfig", - "ActivationVisualizationDataMapping", - "ActivationVisualizationFieldRef", - "ActivationVisualizationPreprocessStep", - "CombinedMappingSection", - "SamplingConfig", - "ScalarSeriesMapping", - "build_activation_visualization_config", -] diff --git a/simplexity/activations/visualization_persistence.py b/simplexity/activations/visualization_persistence.py deleted file mode 100644 index f7a8b0ef..00000000 --- a/simplexity/activations/visualization_persistence.py +++ /dev/null @@ -1,104 +0,0 @@ -"""Persistence helpers for activation visualization payloads.""" - -from __future__ import annotations - -from collections.abc import Mapping -from pathlib import Path - -import pandas as pd -import plotly.graph_objects as go - -from simplexity.activations.activation_visualizations import ( - ActivationVisualizationPayload, - render_visualization, -) -from simplexity.visualization.history import ( - history_paths, - load_history_dataframe, - plot_config_signature, - save_history_dataframe, -) - - -def save_visualization_payloads( - visualizations: Mapping[str, ActivationVisualizationPayload], - root: Path, - step: int, -) -> Mapping[str, str]: - """Persist visualization payloads, accumulating history for slider controls. - - Non-accumulated visualizations are saved to step-specific directories: - root/analysis/step_XXXXX/name.html - - Accumulated visualizations (with slider on step) are saved to: - root/analysis/accumulated/name.html - """ - if not visualizations: - return {} - - figure_names_to_paths = {} - - for key, payload in visualizations.items(): - safe_name = key.replace("/", "_") - accumulated = _should_accumulate_steps(payload) - figure = _maybe_accumulate_history(payload, root, safe_name, step) - - if accumulated: - output_dir = root / payload.analysis / "accumulated" - else: - output_dir = root / payload.analysis / f"step_{step:05d}" - output_dir.mkdir(parents=True, exist_ok=True) - - output_path = output_dir / f"{payload.name}.html" - if isinstance(figure, go.Figure): - figure.write_html(str(output_path)) - else: - figure.save(str(output_path), format="html") - - figure_names_to_paths[key] = str(output_path) - - return figure_names_to_paths - - -def _maybe_accumulate_history( - payload: ActivationVisualizationPayload, - root: Path, - safe_name: str, - step: int, -): - if not _should_accumulate_steps(payload): - return payload.figure - - data_path, meta_path = history_paths(root, safe_name) - signature = plot_config_signature(payload.plot_config) - existing_df = load_history_dataframe(data_path, meta_path, expected_signature=signature) - new_rows = payload.dataframe.copy(deep=True) - if "step" in new_rows.columns: - new_rows["sequence_step"] = new_rows["step"] - new_rows["step"] = step - combined_df = pd.concat([existing_df, new_rows], ignore_index=True) if not existing_df.empty else new_rows - slider = payload.controls.slider if payload.controls else None - if slider and slider.field in combined_df.columns: - slider.options = list(pd.unique(combined_df[slider.field])) - save_history_dataframe( - combined_df, - data_path, - meta_path, - signature=signature, - analysis=payload.analysis, - name=payload.name, - backend=payload.backend, - ) - return render_visualization(payload.plot_config, combined_df, payload.controls) - - -def _should_accumulate_steps(payload: ActivationVisualizationPayload) -> bool: - if payload.controls is None: - return False - if getattr(payload.controls, "accumulate_steps", False): - return True - slider = payload.controls.slider - return slider is not None and slider.field == "step" - - -__all__ = ["save_visualization_payloads"] diff --git a/simplexity/visualization/altair_renderer.py b/simplexity/visualization/altair_renderer.py deleted file mode 100644 index 0834d632..00000000 --- a/simplexity/visualization/altair_renderer.py +++ /dev/null @@ -1,380 +0,0 @@ -"""Altair renderer for declarative visualization configs.""" - -from __future__ import annotations - -import logging -from collections.abc import Mapping -from typing import Any - -try: - import altair as alt # type: ignore [import-not-found] -except ImportError as exc: # pragma: no cover - dependency missing only in unsupported envs - raise ImportError("Altair is required for visualization rendering. Install `altair` to continue.") from exc - -import pandas as pd - -from simplexity.exceptions import ConfigValidationError -from simplexity.visualization.data_pipeline import ( - build_plot_level_dataframe, - resolve_layer_dataframe, -) -from simplexity.visualization.data_registry import DataRegistry -from simplexity.visualization.structured_configs import ( - AestheticsConfig, - AxisConfig, - ChannelAestheticsConfig, - FacetConfig, - GeometryConfig, - LayerConfig, - LegendConfig, - PlotConfig, - PlotLevelGuideConfig, - PlotSizeConfig, - ScaleConfig, - SelectionConfig, -) - -LOGGER = logging.getLogger(__name__) - -_CHANNEL_CLASS_MAP = { - "x": "X", - "y": "Y", - "color": "Color", - "size": "Size", - "shape": "Shape", - "opacity": "Opacity", - "row": "Row", - "column": "Column", - "detail": "Detail", -} - - -def build_altair_chart( - plot_cfg: PlotConfig, - data_registry: DataRegistry | Mapping[str, pd.DataFrame], - controls: Any | None = None, -): - """Render a PlotConfig into an Altair Chart.""" - if not plot_cfg.layers: - raise ConfigValidationError("PlotConfig.layers must include at least one layer for Altair rendering.") - - plot_df = build_plot_level_dataframe(plot_cfg.data, plot_cfg.transforms, data_registry) - - layer_charts = [ - _build_layer_chart(layer, resolve_layer_dataframe(layer, plot_df, data_registry)) for layer in plot_cfg.layers - ] - layer_charts = _apply_accumulation_detail(layer_charts, plot_cfg.layers, plot_cfg, plot_df, controls) - - chart = layer_charts[0] if len(layer_charts) == 1 else alt.layer(*layer_charts) - - if plot_cfg.selections: - chart = chart.add_params(*[_build_selection_param(sel) for sel in plot_cfg.selections]) - - # Apply size before faceting (FacetChart doesn't support width/height properties) - chart = _apply_chart_size(chart, plot_cfg.size) - - if plot_cfg.facet: - chart = _apply_facet(chart, plot_cfg.facet) - - chart = _apply_plot_level_properties(chart, plot_cfg.guides, plot_cfg.size, plot_cfg.background) - chart = _apply_chart_controls(chart, controls) - chart = _apply_default_legend_interactivity(chart, plot_cfg.layers) - - return chart - - -def _build_layer_chart(layer: LayerConfig, df: pd.DataFrame): - chart = alt.Chart(df) - chart = _apply_geometry(chart, layer.geometry) - encoding_kwargs = _encode_aesthetics(layer.aesthetics) - if encoding_kwargs: - chart = chart.encode(**encoding_kwargs) - if layer.selections: - chart = chart.add_params(*[_build_selection_param(sel) for sel in layer.selections]) - return chart - - -def _apply_geometry(chart, geometry: GeometryConfig): - mark_name = f"mark_{geometry.type}" - if not hasattr(chart, mark_name): - raise ConfigValidationError(f"Altair chart does not support geometry type '{geometry.type}'") - mark_fn = getattr(chart, mark_name) - return mark_fn(**(geometry.props or {})) - - -def _encode_aesthetics(aesthetics: AestheticsConfig) -> dict[str, Any]: - encodings: dict[str, Any] = {} - for channel_name in ("x", "y", "color", "size", "shape", "opacity", "row", "column", "detail"): - channel_cfg = getattr(aesthetics, channel_name) - channel_value = _channel_to_alt(channel_name, channel_cfg) - if channel_value is not None: - encodings[channel_name] = channel_value - - if aesthetics.tooltip: - encodings["tooltip"] = [_tooltip_to_alt(tooltip_cfg) for tooltip_cfg in aesthetics.tooltip] - - return encodings - - -def _channel_to_alt(channel_name: str, cfg: ChannelAestheticsConfig | None): - if cfg is None: - return None - if cfg.value is not None and cfg.field is None: - return alt.value(cfg.value) - channel_cls_name = _CHANNEL_CLASS_MAP[channel_name] - channel_cls = getattr(alt, channel_cls_name) - kwargs: dict[str, Any] = {} - if cfg.field: - kwargs["field"] = cfg.field - if cfg.type: - kwargs["type"] = cfg.type - if cfg.title: - kwargs["title"] = cfg.title - if cfg.aggregate: - kwargs["aggregate"] = cfg.aggregate - if cfg.bin is not None: - kwargs["bin"] = cfg.bin - if cfg.time_unit: - kwargs["timeUnit"] = cfg.time_unit - if cfg.sort is not None: - kwargs["sort"] = alt.Sort(cfg.sort) if isinstance(cfg.sort, list) else cfg.sort - if cfg.scale: - kwargs["scale"] = _scale_to_alt(cfg.scale) - if cfg.axis and channel_name in {"x", "y", "row", "column"}: - kwargs["axis"] = _axis_to_alt(cfg.axis) - if cfg.legend and channel_name in {"color", "size", "shape", "opacity"}: - if cfg.legend.visible is False: - kwargs["legend"] = None - else: - kwargs["legend"] = _legend_to_alt(cfg.legend) - return channel_cls(**kwargs) - - -def _tooltip_to_alt(cfg: ChannelAestheticsConfig): - if cfg.value is not None and cfg.field is None: - return alt.Tooltip(value=cfg.value, title=cfg.title) - if cfg.field is None: - raise ConfigValidationError("Tooltip channels must set either a field or a constant value.") - - kwargs: dict[str, Any] = {"field": cfg.field} - if cfg.type: - kwargs["type"] = cfg.type - if cfg.title: - kwargs["title"] = cfg.title - return alt.Tooltip(**kwargs) - - -def _scale_to_alt(cfg: ScaleConfig): - kwargs = {k: v for k, v in vars(cfg).items() if v is not None} - return alt.Scale(**kwargs) - - -def _axis_to_alt(cfg: AxisConfig): - kwargs = {k: v for k, v in vars(cfg).items() if v is not None} - return alt.Axis(**kwargs) - - -def _legend_to_alt(cfg: LegendConfig): - kwargs = {k: v for k, v in vars(cfg).items() if v is not None} - return alt.Legend(**kwargs) - - -def _build_selection_param(cfg: SelectionConfig): - kwargs: dict[str, Any] = {} - if cfg.name: - kwargs["name"] = cfg.name - if cfg.encodings: - kwargs["encodings"] = cfg.encodings - if cfg.fields: - kwargs["fields"] = cfg.fields - if cfg.bind: - kwargs["bind"] = cfg.bind - if cfg.type == "interval": - return alt.selection_interval(**kwargs) - if cfg.type == "single": - return alt.selection_single(**kwargs) - if cfg.type == "multi": - return alt.selection_multi(**kwargs) - raise ConfigValidationError(f"Unsupported selection type '{cfg.type}' for Altair renderer.") - - -def _apply_facet(chart, facet_cfg: FacetConfig): - facet_args: dict[str, Any] = {} - if facet_cfg.row: - facet_args["row"] = alt.Row(facet_cfg.row) - if facet_cfg.column: - facet_args["column"] = alt.Column(facet_cfg.column) - if facet_cfg.wrap: - raise ConfigValidationError("FacetConfig.wrap is not yet implemented for Altair rendering.") - if not facet_args: - return chart - return chart.facet(**facet_args) - - -def _apply_chart_size(chart, size: PlotSizeConfig): - """Apply width/height to chart. Must be called before faceting.""" - width = size.width - height = size.height - if width is not None or height is not None: - chart = chart.properties(width=width, height=height) - return chart - - -def _apply_plot_level_properties(chart, guides: PlotLevelGuideConfig, size: PlotSizeConfig, background: str | None): - title_params = _build_title_params(guides) - if title_params is not None: - chart = chart.properties(title=title_params) - if size.autosize: - chart.autosize = size.autosize - if background: - chart = chart.configure(background=background) - if guides.labels: - LOGGER.info("Plot-level labels are not yet implemented for Altair; skipping %s labels.", len(guides.labels)) - return chart - - -def _apply_chart_controls(chart, controls: Any | None): - if not controls: - return chart - chart = _apply_dropdown_control(chart, getattr(controls, "dropdown", None)) - slider_detail = None if getattr(controls, "accumulate_steps", False) else getattr(controls, "slider", None) - chart = _apply_slider_control(chart, slider_detail) - return chart - - -def _apply_dropdown_control(chart, dropdown): - field_name = getattr(dropdown, "field", None) - if dropdown and field_name == "layer": - options = [_normalize_control_value(value) for value in getattr(dropdown, "options", []) or []] - if len(options) > 1: - binding = alt.binding_select(options=options, name="Layer: ") - param = alt.param(name=f"{field_name}_dropdown", bind=binding, value=options[0]) - # Include layer-independent rows (layer == "_no_layer_") along with selected layer - filter_expr = f"(datum.{field_name} == {param.name}) || (datum.{field_name} == '_no_layer_')" - return chart.add_params(param).transform_filter(filter_expr) - return chart - - -def _apply_slider_control(chart, slider): - field_name = getattr(slider, "field", None) - options = [_normalize_control_value(value) for value in getattr(slider, "options", []) or []] - if not slider or not field_name or len(options) <= 1: - return chart - - numeric_options = _numeric_control_values(options) - if numeric_options: - min_val, max_val = numeric_options[0], numeric_options[-1] - step = _infer_slider_step(numeric_options) - binding = alt.binding_range(min=min_val, max=max_val, step=step, name=f"{field_name}: ") - initial_value = numeric_options[0] - else: - binding = alt.binding_select(options=options, name=f"{field_name}: ") - initial_value = options[0] - - param = alt.param(name=f"{field_name}_slider", bind=binding, value=initial_value) - return chart.add_params(param).transform_filter(f"datum.{field_name} == {param.name}") - - -def _apply_default_legend_interactivity(chart, layers: list[LayerConfig]): - if not layers: - return chart - # FacetChart doesn't support encode() - skip legend interactivity for faceted charts - if isinstance(chart, alt.FacetChart): - return chart - color_fields: set[str] = set() - for layer in layers: - aesthetics = layer.aesthetics - if aesthetics and aesthetics.color and aesthetics.color.field: - color_fields.add(aesthetics.color.field) - if len(color_fields) != 1: - return chart - if any(layer.aesthetics and layer.aesthetics.opacity is not None for layer in layers): - return chart - field_name = next(iter(color_fields)) - legend_selection = alt.selection_multi(fields=[field_name], bind="legend", toggle=True, empty="all") - chart = chart.add_params(legend_selection) - opacity_encoding = alt.condition(legend_selection, alt.value(1.0), alt.value(0.05)) - return chart.encode(opacity=opacity_encoding) - - -def _normalize_control_value(value): - return value.item() if hasattr(value, "item") else value - - -def _numeric_control_values(options: list[Any]) -> list[float]: - numeric: list[float] = [] - for value in options: - try: - numeric.append(float(value)) - except (TypeError, ValueError): - return [] - numeric = sorted(dict.fromkeys(numeric)) - return numeric - - -def _infer_slider_step(values: list[float]) -> float: - if len(values) < 2: - return 1.0 - diffs = [round(values[idx + 1] - values[idx], 10) for idx in range(len(values) - 1)] - # Use smallest positive difference or default to 1.0 - step = min((diff for diff in diffs if diff > 0), default=1.0) - return step - - -def _apply_accumulation_detail(layer_charts, layers, plot_cfg, plot_df: pd.DataFrame, controls: Any | None): - if not controls or not getattr(controls, "accumulate_steps", False): - return layer_charts - if "step" not in plot_df.columns: - return layer_charts - updated = [] - for chart, layer_cfg in zip(layer_charts, layers, strict=False): - aesthetics = layer_cfg.aesthetics - if aesthetics and aesthetics.detail is not None: - updated.append(chart) - continue - if _layer_references_field(layer_cfg, "step"): - updated.append(chart) - continue - updated.append(chart.encode(detail=alt.Detail(field="step", type="ordinal"))) - return updated - - -def _layer_references_field(layer_cfg: LayerConfig, field: str) -> bool: - aesthetics = layer_cfg.aesthetics - if not aesthetics: - return False - - channel_names = [ - "x", - "y", - "x2", - "y2", - "color", - "stroke", - "strokeDash", - "size", - "shape", - "tooltip", - ] - for name in channel_names: - channel = getattr(aesthetics, name, None) - if channel is None: - continue - # tooltip can be list-like - if isinstance(channel, list): - for entry in channel: - if getattr(entry, "field", None) == field: - return True - continue - if getattr(channel, "field", None) == field: - return True - return False - - -def _build_title_params(guides: PlotLevelGuideConfig): - subtitle_lines = [text for text in (guides.subtitle, guides.caption) if text] - if not guides.title and not subtitle_lines: - return None - if subtitle_lines: - return alt.TitleParams(text=guides.title or "", subtitle=subtitle_lines) - return guides.title diff --git a/simplexity/visualization/data_pipeline.py b/simplexity/visualization/data_pipeline.py deleted file mode 100644 index be7d88cf..00000000 --- a/simplexity/visualization/data_pipeline.py +++ /dev/null @@ -1,194 +0,0 @@ -"""Reusable helpers for preparing data prior to rendering.""" - -from __future__ import annotations - -import math -from collections.abc import Mapping - -import numpy as np -import pandas as pd - -from simplexity.exceptions import ConfigValidationError -from simplexity.visualization.data_registry import DataRegistry, resolve_data_source -from simplexity.visualization.structured_configs import ( - DataConfig, - LayerConfig, - TransformConfig, -) - -CALC_ENV = { - "np": np, - "pd": pd, - "math": math, - "log": np.log, - "exp": np.exp, - "sqrt": np.sqrt, - "abs": np.abs, - "clip": np.clip, -} - - -def normalize_expression(expr: str) -> str: - """Normalize expressions shared between pandas and Vega-Lite syntaxes.""" - return expr.replace("datum.", "").strip() - - -def materialize_data(data_cfg: DataConfig, data_registry: DataRegistry | Mapping[str, pd.DataFrame]) -> pd.DataFrame: - """Resolve a logical data source and apply lightweight filters/column selection.""" - df = resolve_data_source(data_cfg.source, data_registry).copy() - if data_cfg.filters: - df = apply_filters(df, data_cfg.filters) - if data_cfg.columns: - missing = [col for col in data_cfg.columns if col not in df.columns] - if missing: - raise ConfigValidationError(f"Columns {missing} are not present in data source '{data_cfg.source}'") - df = df.loc[:, data_cfg.columns] - return df - - -def build_plot_level_dataframe( - data_cfg: DataConfig, - transforms: list[TransformConfig], - data_registry: DataRegistry | Mapping[str, pd.DataFrame], -) -> pd.DataFrame: - """Materialize the base dataframe for a plot, applying plot-level transforms.""" - df = materialize_data(data_cfg, data_registry) - return apply_transforms(df, transforms) - - -def resolve_layer_dataframe( - layer: LayerConfig, - plot_df: pd.DataFrame, - data_registry: DataRegistry | Mapping[str, pd.DataFrame], -) -> pd.DataFrame: - """Resolve the dataframe for an individual layer.""" - if layer.data is None: - df = plot_df.copy() - else: - df = materialize_data(layer.data, data_registry) - if layer.transforms: - df = apply_transforms(df, layer.transforms) - return df - - -def apply_filters(df: pd.DataFrame, filters: list[str]) -> pd.DataFrame: - """Apply pandas-compatible query filters.""" - result = df.copy() - for expr in filters: - norm_expr = normalize_expression(expr) - result = result.query(norm_expr, engine="python", local_dict=CALC_ENV) - return result - - -def apply_transforms(df: pd.DataFrame, transforms: list[TransformConfig]) -> pd.DataFrame: - """Sequentially apply configured transforms to a dataframe.""" - result = df.copy() - for transform in transforms: - result = _apply_transform(result, transform) - return result - - -def _apply_transform(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: - if transform.op == "filter": - if transform.filter is None: - raise ConfigValidationError("Filter transforms require the `filter` expression.") - return apply_filters(df, [transform.filter]) - if transform.op == "calculate": - return _apply_calculate(df, transform) - if transform.op == "aggregate": - return _apply_aggregate(df, transform) - if transform.op == "bin": - return _apply_bin(df, transform) - if transform.op == "window": - return _apply_window(df, transform) - if transform.op == "fold": - return _apply_fold(df, transform) - if transform.op == "pivot": - raise ConfigValidationError("Pivot transforms are not implemented yet.") - raise ConfigValidationError(f"Unsupported transform operation '{transform.op}'") - - -def _apply_calculate(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: - expr = normalize_expression(transform.expr or "") - target = transform.as_field or "" - if not target: - raise ConfigValidationError("TransformConfig.as_field is required for calculate transforms") - result = df.copy() - result[target] = result.eval(expr, engine="python", local_dict=CALC_ENV) - return result - - -def _apply_aggregate(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: - groupby = transform.groupby or [] - aggregations = transform.aggregations or {} - if not groupby or not aggregations: - raise ConfigValidationError("Aggregate transforms require `groupby` and `aggregations` fields.") - - agg_kwargs: dict[str, tuple[str, str]] = {} - for alias, expr in aggregations.items(): - func, field = _parse_function_expr(expr, expected_arg=True) - agg_kwargs[alias] = (field, func) - - grouped = df.groupby(groupby, dropna=False).agg(**agg_kwargs).reset_index() - return grouped - - -def _apply_bin(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: - if not transform.field or not transform.binned_as: - raise ConfigValidationError("Bin transforms require `field` and `binned_as`.") - bins = transform.maxbins or 10 - result = df.copy() - result[transform.binned_as] = pd.cut(result[transform.field], bins=bins, include_lowest=True) - return result - - -def _apply_window(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: - if not transform.window: - raise ConfigValidationError("Window transforms require the `window` mapping.") - result = df.copy() - for alias, expr in transform.window.items(): - func, field = _parse_function_expr(expr, expected_arg=True) - if func == "rank": - result[alias] = result[field].rank(method="average") - elif func == "cumsum": - result[alias] = result[field].cumsum() - else: - raise ConfigValidationError(f"Window function '{func}' is not supported.") - return result - - -def _apply_fold(df: pd.DataFrame, transform: TransformConfig) -> pd.DataFrame: - if not transform.fold_fields: - raise ConfigValidationError("Fold transforms require `fold_fields`.") - var_name, value_name = _derive_fold_names(transform.as_fields) - return df.melt(value_vars=transform.fold_fields, var_name=var_name, value_name=value_name) - - -def _parse_function_expr(expr: str, expected_arg: bool) -> tuple[str, str]: - if "(" not in expr or not expr.endswith(")"): - raise ConfigValidationError(f"Expression '{expr}' must be of the form func(field).") - func, rest = expr.split("(", 1) - value = rest[:-1].strip() - func = func.strip() - if expected_arg and not value: - raise ConfigValidationError(f"Expression '{expr}' must supply an argument.") - return func, value - - -def _derive_fold_names(as_fields: list[str] | None) -> tuple[str, str]: - if not as_fields: - return "key", "value" - if len(as_fields) == 1: - return as_fields[0], "value" - return as_fields[0], as_fields[1] - - -__all__ = [ - "CALC_ENV", - "apply_filters", - "apply_transforms", - "build_plot_level_dataframe", - "materialize_data", - "normalize_expression", - "resolve_layer_dataframe", -] diff --git a/simplexity/visualization/data_registry.py b/simplexity/visualization/data_registry.py deleted file mode 100644 index c70f44c6..00000000 --- a/simplexity/visualization/data_registry.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Helpers for resolving logical visualization data sources.""" - -from __future__ import annotations - -from collections.abc import Mapping -from typing import Protocol - -import pandas as pd - - -class DataRegistry(Protocol): # pylint: disable=too-few-public-methods - """Protocol for registry objects that return pandas DataFrames.""" - - def get(self, source_name: str) -> pd.DataFrame: - """Return the DataFrame associated with ``source_name``.""" - ... # pylint: disable=unnecessary-ellipsis - - -class DictDataRegistry: # pylint: disable=too-few-public-methods - """Simple registry backed by an in-memory mapping.""" - - def __init__(self, data: Mapping[str, pd.DataFrame] | None = None) -> None: - self._data: dict[str, pd.DataFrame] = dict(data or {}) - - def get(self, source_name: str) -> pd.DataFrame: - """Get the DataFrame associated with ``source_name``.""" - try: - return self._data[source_name] - except KeyError as exc: # pragma: no cover - simple error wrapper - raise ValueError(f"Data source '{source_name}' is not registered") from exc - - -def resolve_data_source(source_name: str, data_registry: DataRegistry | Mapping[str, pd.DataFrame]) -> pd.DataFrame: - """Resolve a logical source name regardless of the registry implementation.""" - if isinstance(data_registry, Mapping): - if source_name not in data_registry: - raise ValueError(f"Data source '{source_name}' is not registered") - return data_registry[source_name] - return data_registry.get(source_name) diff --git a/simplexity/visualization/history.py b/simplexity/visualization/history.py deleted file mode 100644 index e57e657b..00000000 --- a/simplexity/visualization/history.py +++ /dev/null @@ -1,105 +0,0 @@ -"""Utilities for persisting visualization history for interactive controls.""" - -from __future__ import annotations - -import dataclasses -import hashlib -import json -import logging -from pathlib import Path -from typing import Any - -import pandas as pd - -from simplexity.visualization.structured_configs import PlotConfig - -LOGGER = logging.getLogger(__name__) - -HISTORY_VERSION = 1 -HISTORY_DIRNAME = "history" -HISTORY_DATA_SUFFIX = ".jsonl" -HISTORY_META_SUFFIX = ".meta.json" - - -def history_paths(root: Path, safe_name: str) -> tuple[Path, Path]: - """Return the data and metadata file paths for a visualization history entry.""" - history_dir = root / HISTORY_DIRNAME - data_path = history_dir / f"{safe_name}{HISTORY_DATA_SUFFIX}" - meta_path = history_dir / f"{safe_name}{HISTORY_META_SUFFIX}" - return data_path, meta_path - - -def plot_config_signature(plot_cfg: PlotConfig) -> str: - """Create a stable hash of a PlotConfig to detect incompatible history files.""" - serialized = json.dumps( - dataclasses.asdict(plot_cfg), - sort_keys=True, - default=_serialize_unknown, - ) - return hashlib.sha256(serialized.encode("utf-8")).hexdigest() - - -def load_history_dataframe(data_path: Path, meta_path: Path, *, expected_signature: str) -> pd.DataFrame: - """Load previously saved visualization dataframe if metadata matches signature.""" - if not data_path.exists() or not meta_path.exists(): - return pd.DataFrame() - - try: - with meta_path.open(encoding="utf-8") as source: - metadata = json.load(source) - except json.JSONDecodeError: - LOGGER.warning("Visualization history metadata at %s is corrupted; ignoring existing history.", meta_path) - return pd.DataFrame() - - if metadata.get("version") != HISTORY_VERSION or metadata.get("signature") != expected_signature: - LOGGER.info("Visualization history metadata at %s is outdated or mismatched; starting fresh.", meta_path) - return pd.DataFrame() - - try: - return pd.read_json(data_path, orient="records", lines=True) - except ValueError: - LOGGER.warning("Visualization history data at %s is corrupted; ignoring existing history.", data_path) - return pd.DataFrame() - - -def save_history_dataframe( - dataframe: pd.DataFrame, - data_path: Path, - meta_path: Path, - *, - signature: str, - analysis: str, - name: str, - backend: str, -) -> None: - """Persist visualization dataframe and metadata for future accumulation.""" - data_path.parent.mkdir(parents=True, exist_ok=True) - dataframe.to_json(data_path, orient="records", lines=True) - metadata = { - "version": HISTORY_VERSION, - "analysis": analysis, - "name": name, - "backend": backend, - "signature": signature, - "rows": len(dataframe), - } - with meta_path.open("w", encoding="utf-8") as sink: - json.dump(metadata, sink, indent=2) - - -def _serialize_unknown(value: Any) -> str: - """Best-effort serialization hook for dataclasses.asdict JSON dumps.""" - if isinstance(value, Path): - return str(value) - return str(value) - - -__all__ = [ - "HISTORY_DIRNAME", - "HISTORY_DATA_SUFFIX", - "HISTORY_META_SUFFIX", - "history_paths", - "load_history_dataframe", - "plot_config_signature", - "save_history_dataframe", -] diff --git a/simplexity/visualization/plotly_renderer.py b/simplexity/visualization/plotly_renderer.py deleted file mode 100644 index 1f54d9ac..00000000 --- a/simplexity/visualization/plotly_renderer.py +++ /dev/null @@ -1,1338 +0,0 @@ -"""Plotly renderer for visualization PlotConfigs.""" - -from __future__ import annotations - -import logging -import re -from collections.abc import Mapping -from dataclasses import dataclass -from typing import Any, Literal - -import pandas as pd -import plotly.graph_objects as go -from plotly.colors import qualitative as qualitative_colors -from plotly.subplots import make_subplots - -from simplexity.exceptions import ConfigValidationError -from simplexity.visualization.data_pipeline import ( - build_plot_level_dataframe, - resolve_layer_dataframe, -) -from simplexity.visualization.data_registry import DataRegistry -from simplexity.visualization.structured_configs import ( - AestheticsConfig, - ChannelAestheticsConfig, - FacetConfig, - LayerConfig, - PlotConfig, - PlotLevelGuideConfig, - PlotSizeConfig, -) - -LOGGER = logging.getLogger(__name__) - -_HEX_COLOR_PATTERN = re.compile(r"^#([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$") - - -def build_plotly_figure( - plot_cfg: PlotConfig, - data_registry: DataRegistry | Mapping[str, pd.DataFrame], - controls: Any | None = None, -) -> go.Figure: - """Render a PlotConfig into a Plotly Figure (currently 3D scatter only).""" - if not plot_cfg.layers: - raise ConfigValidationError("PlotConfig.layers must include at least one layer for Plotly rendering.") - if len(plot_cfg.layers) != 1: - raise ConfigValidationError("Plotly renderer currently supports exactly one layer.") - - layer = plot_cfg.layers[0] - if layer.geometry.type != "point": - raise ConfigValidationError("Plotly renderer currently supports point geometry.") - - plot_df = build_plot_level_dataframe(plot_cfg.data, plot_cfg.transforms, data_registry) - layer_df = resolve_layer_dataframe(layer, plot_df, data_registry) - - # Handle faceting - if plot_cfg.facet: - figure = _build_faceted_figure(layer, layer_df, plot_cfg.facet, plot_cfg.size, controls) - # Use empty size config to avoid overwriting the computed facet dimensions - empty_size = PlotSizeConfig(width=None, height=None) - figure = _apply_plot_level_properties( - figure, plot_cfg.guides, empty_size, plot_cfg.background, layer.aesthetics - ) - return figure - - has_z = bool(layer.aesthetics and layer.aesthetics.z and layer.aesthetics.z.field) - if has_z: - figure = _build_scatter3d(layer, layer_df, controls) - else: - figure = _build_scatter2d(layer, layer_df, controls) - figure = _apply_plot_level_properties(figure, plot_cfg.guides, plot_cfg.size, plot_cfg.background, layer.aesthetics) - return figure - - -def _build_faceted_figure( - layer: LayerConfig, - df: pd.DataFrame, - facet_cfg: FacetConfig, - size_cfg: PlotSizeConfig, - controls: Any | None, -): - """Build a faceted subplot figure.""" - aes = layer.aesthetics - x_field = _require_field(aes.x, "x") - y_field = _require_field(aes.y, "y") - has_z = bool(aes.z and aes.z.field) - z_field = _require_field(aes.z, "z") if has_z else None - - row_field = facet_cfg.row - col_field = facet_cfg.column - - # Resolve controls - dropdown = _resolve_layer_dropdown(df, controls) - slider_enabled = not (getattr(controls, "accumulate_steps", False)) - slider = _resolve_slider_control(df, controls if slider_enabled else None) - - # Filter by initial layer if dropdown is active - # Keep rows that don't depend on layer (e.g., ground truth from belief states with layer="_no_layer_") - layer_field, layer_options = dropdown if dropdown else (None, [None]) - working_df: pd.DataFrame - if layer_field is None: - working_df = df - else: - layer_independent_filter = df.loc[df[layer_field] == "_no_layer_"] - layer_dependent_filter = df.loc[(df[layer_field] != "_no_layer_") & (df[layer_field] == layer_options[0])] - working_df = pd.concat([layer_dependent_filter, layer_independent_filter], ignore_index=True) - - # Get unique values for faceting dimensions - # Use dict.fromkeys to deduplicate by string representation (handles mixed int/str types) - row_values: list[str | None] - col_values: list[str | None] - if row_field: - raw_row = list(pd.unique(working_df[row_field])) - row_values = list(dict.fromkeys(str(v) for v in sorted(raw_row, key=str))) - else: - row_values = [None] - if col_field: - raw_col = list(pd.unique(working_df[col_field])) - col_values = list(dict.fromkeys(str(v) for v in sorted(raw_col, key=str))) - else: - col_values = [None] - - n_rows = len(row_values) - n_cols = len(col_values) - - # Build subplot titles - subplot_titles = [] - for row_val in row_values: - for col_val in col_values: - if row_val is not None and col_val is not None: - subplot_titles.append(f"{col_val}") - elif col_val is not None: - subplot_titles.append(str(col_val)) - elif row_val is not None: - subplot_titles.append(str(row_val)) - else: - subplot_titles.append("") - - # Create subplot grid with appropriate specs for 2D or 3D - if has_z: - specs = [[{"type": "scene"} for _ in range(n_cols)] for _ in range(n_rows)] - fig = make_subplots( - rows=n_rows, - cols=n_cols, - subplot_titles=subplot_titles, - horizontal_spacing=0.02, - vertical_spacing=0.05, - specs=specs, - row_titles=[str(v) for v in row_values] if row_field else None, - ) - else: - fig = make_subplots( - rows=n_rows, - cols=n_cols, - subplot_titles=subplot_titles, - horizontal_spacing=0.05, - vertical_spacing=0.08, - row_titles=[str(v) for v in row_values] if row_field else None, - ) - - color_field = _optional_field(aes.color) - size_field_name = _optional_field(aes.size) - size_value = _resolve_size_value(aes.size) - opacity_value = _resolve_opacity(aes.opacity) - hover_fields = _collect_tooltip_fields(aes.tooltip) - color_map = _build_color_discrete_map(df, color_field, aes.color) - color_specs = _build_color_group_specs(df, color_field, aes.color, color_map) - - # Helper to build traces for a given filtered dataframe - def build_facet_traces(source_df: pd.DataFrame, show_legend: bool = True): - traces_by_cell: dict[tuple[int, int], list[Any]] = {} - for row_idx, row_val in enumerate(row_values, start=1): - for col_idx, col_val in enumerate(col_values, start=1): - cell_df = source_df.copy() - if row_field: - cell_df = cell_df.loc[cell_df[row_field].astype(str) == row_val] - if col_field: - cell_df = cell_df.loc[cell_df[col_field].astype(str) == col_val] - - if cell_df.empty: - traces_by_cell[(row_idx, col_idx)] = [] - continue - - if has_z: - assert z_field is not None - traces = _scatter3d_traces( - cell_df, - x_field, - y_field, - z_field, - color_field, - size_field_name, - hover_fields, - opacity_value, - color_specs, - layer_name=layer.name, - size_value=size_value, - ) - scene_idx = (row_idx - 1) * n_cols + col_idx - scene_name = "scene" if scene_idx == 1 else f"scene{scene_idx}" - for trace in traces: - trace.scene = scene_name - else: - traces = _scatter2d_traces( - cell_df, - x_field, - y_field, - color_field, - size_field_name, - hover_fields, - opacity_value, - color_specs, - layer_name=layer.name, - size_value=size_value, - ) - - # Control legend visibility - for trace in traces: - if not show_legend or row_idx > 1 or col_idx > 1: - trace.showlegend = False - - traces_by_cell[(row_idx, col_idx)] = traces - return traces_by_cell - - # Get slider values if slider is active - slider_field, slider_values = slider if slider else (None, [None]) - - if slider and layer_field: - assert slider_field is not None - # Both slider and dropdown: build traces for ALL layers with visibility control - layer_independent = df.loc[df[layer_field] == "_no_layer_"] - - trace_ranges: list[tuple[int, int]] = [] - trace_count = 0 - - for layer_idx, layer_opt in enumerate(layer_options): - layer_specific = df.loc[(df[layer_field] != "_no_layer_") & (df[layer_field] == layer_opt)] - layer_df = pd.concat([layer_specific, layer_independent], ignore_index=True) - initial_df = layer_df.loc[layer_df[slider_field] == slider_values[0]] - - traces_by_cell = build_facet_traces(initial_df, show_legend=(layer_idx == 0)) - - start = trace_count - for (row_idx, col_idx), traces in sorted(traces_by_cell.items()): - for trace in traces: - trace.visible = layer_idx == 0 - fig.add_trace(trace, row=row_idx, col=col_idx) - trace_count += 1 - trace_ranges.append((start, trace_count)) - - # Build frames for ALL layers at each slider step - frames = [] - for step_val in slider_values: - frame_traces: list[Any] = [] - for layer_opt in layer_options: - layer_specific = df.loc[(df[layer_field] != "_no_layer_") & (df[layer_field] == layer_opt)] - layer_df = pd.concat([layer_specific, layer_independent], ignore_index=True) - step_df = layer_df.loc[layer_df[slider_field] == step_val] - - traces_by_cell = build_facet_traces(step_df, show_legend=False) - for row_idx, col_idx in sorted(traces_by_cell.keys()): - frame_traces.extend(traces_by_cell[(row_idx, col_idx)]) - - frames.append(go.Frame(name=str(step_val), data=frame_traces)) - - fig.frames = frames - _add_slider_layout(fig, slider_field, slider_values) - - # Add layer dropdown using visibility toggling - if len(layer_options) > 1: - _add_layer_dropdown_menu(fig, layer_options, trace_ranges) - - elif slider: - assert slider_field is not None - # Slider only - initial_step = slider_values[0] if slider_values else None - initial_df = working_df - if initial_step is not None and slider_field in working_df.columns: - initial_df = working_df.loc[working_df[slider_field] == initial_step] - - traces_by_cell = build_facet_traces(initial_df) - for (row_idx, col_idx), traces in traces_by_cell.items(): - for trace in traces: - fig.add_trace(trace, row=row_idx, col=col_idx) - - # Build frames for slider animation - frames = [] - for step_val in slider_values: - step_filtered = working_df.loc[working_df[slider_field] == step_val] - frame_traces_by_cell = build_facet_traces(step_filtered, show_legend=False) - frame_traces: list[Any] = [] - for row_idx, col_idx in sorted(frame_traces_by_cell.keys()): - frame_traces.extend(frame_traces_by_cell[(row_idx, col_idx)]) - frames.append(go.Frame(name=str(step_val), data=frame_traces)) - fig.frames = frames - _add_slider_layout(fig, slider_field, slider_values) - - elif dropdown and len(layer_options) > 1: - assert layer_field is not None - # Dropdown only: build traces for ALL layers with visibility control - layer_independent = df.loc[df[layer_field] == "_no_layer_"] - - trace_ranges: list[tuple[int, int]] = [] - trace_count = 0 - - for layer_idx, layer_opt in enumerate(layer_options): - layer_specific = df.loc[(df[layer_field] != "_no_layer_") & (df[layer_field] == layer_opt)] - layer_df = pd.concat([layer_specific, layer_independent], ignore_index=True) - - traces_by_cell = build_facet_traces(layer_df, show_legend=(layer_idx == 0)) - - start = trace_count - for (row_idx, col_idx), traces in sorted(traces_by_cell.items()): - for trace in traces: - trace.visible = layer_idx == 0 - fig.add_trace(trace, row=row_idx, col=col_idx) - trace_count += 1 - trace_ranges.append((start, trace_count)) - - _add_layer_dropdown_menu(fig, layer_options, trace_ranges) - else: - # No controls - traces_by_cell = build_facet_traces(working_df) - for (row_idx, col_idx), traces in traces_by_cell.items(): - for trace in traces: - fig.add_trace(trace, row=row_idx, col=col_idx) - - # Apply size to individual subplots if specified - subplot_width = size_cfg.width or 200 - subplot_height = size_cfg.height or 200 - total_width = subplot_width * n_cols + 100 # Extra space for margins - total_height = subplot_height * n_rows + 100 - - fig.update_layout( - width=total_width, - height=total_height, - showlegend=True, - ) - - # For 3D, set axis titles, ranges, and aspect ratio on each scene - if has_z: - x_title = _axis_title(aes.x) - y_title = _axis_title(aes.y) - z_title = _axis_title(aes.z) - x_range = _axis_domain(aes.x) - y_range = _axis_domain(aes.y) - z_range = _axis_domain(aes.z) - for row_idx in range(1, n_rows + 1): - for col_idx in range(1, n_cols + 1): - scene_idx = (row_idx - 1) * n_cols + col_idx - scene_key = "scene" if scene_idx == 1 else f"scene{scene_idx}" - scene_update: dict[str, Any] = {"aspectmode": "cube"} - xaxis_cfg: dict[str, Any] = {} - yaxis_cfg: dict[str, Any] = {} - zaxis_cfg: dict[str, Any] = {} - if x_title: - xaxis_cfg["title"] = x_title - if y_title: - yaxis_cfg["title"] = y_title - if z_title: - zaxis_cfg["title"] = z_title - if x_range: - xaxis_cfg["range"] = x_range - if y_range: - yaxis_cfg["range"] = y_range - if z_range: - zaxis_cfg["range"] = z_range - if xaxis_cfg: - scene_update["xaxis"] = xaxis_cfg - if yaxis_cfg: - scene_update["yaxis"] = yaxis_cfg - if zaxis_cfg: - scene_update["zaxis"] = zaxis_cfg - layout_update: dict[str, Any] = {scene_key: scene_update} - fig.update_layout(**layout_update) - - return fig - - -def _build_scatter3d(layer: LayerConfig, df: pd.DataFrame, controls: Any | None): - aes = layer.aesthetics - x_field = _require_field(aes.x, "x") - y_field = _require_field(aes.y, "y") - z_field = _require_field(aes.z, "z") - - color_field = _optional_field(aes.color) - size_field = _optional_field(aes.size) - size_value = _resolve_size_value(aes.size) - opacity_value = _resolve_opacity(aes.opacity) - hover_fields = _collect_tooltip_fields(aes.tooltip) - - dropdown = _resolve_layer_dropdown(df, controls) - slider_enabled = not (getattr(controls, "accumulate_steps", False)) - slider = _resolve_slider_control(df, controls if slider_enabled else None) - color_map = _build_color_discrete_map(df, color_field, aes.color) - color_specs = _build_color_group_specs(df, color_field, aes.color, color_map) - - if slider: - figure = _build_slider_scatter3d( - df, - slider, - dropdown, - x_field, - y_field, - z_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - aes, - layer, - size_value, - ) - elif dropdown: - figure = _build_layer_filtered_scatter3d( - df, - dropdown, - x_field, - y_field, - z_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - aes, - layer, - size_value, - ) - else: - traces = _scatter3d_traces( - df, - x_field, - y_field, - z_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - layer_name=layer.name, - size_value=size_value, - ) - figure = go.Figure(data=traces) - figure = _apply_constant_channels(figure, aes) - _maybe_update_trace_name(figure, layer, color_field) - - _apply_legend_visibility(figure, aes) - - return figure - - -def _build_scatter2d(layer: LayerConfig, df: pd.DataFrame, controls: Any | None): - aes = layer.aesthetics - x_field = _require_field(aes.x, "x") - y_field = _require_field(aes.y, "y") - - color_field = _optional_field(aes.color) - size_field = _optional_field(aes.size) - size_value = _resolve_size_value(aes.size) - opacity_value = _resolve_opacity(aes.opacity) - hover_fields = _collect_tooltip_fields(aes.tooltip) - - dropdown = _resolve_layer_dropdown(df, controls) - slider_enabled = not (getattr(controls, "accumulate_steps", False)) - slider = _resolve_slider_control(df, controls if slider_enabled else None) - color_map = _build_color_discrete_map(df, color_field, aes.color) - color_specs = _build_color_group_specs(df, color_field, aes.color, color_map) - - if slider: - figure = _build_slider_scatter2d( - df, - slider, - dropdown, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - aes, - layer, - size_value, - ) - elif dropdown: - figure = _build_layer_filtered_scatter2d( - df, - dropdown, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - aes, - layer, - size_value, - ) - else: - traces = _scatter2d_traces( - df, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - layer_name=layer.name, - size_value=size_value, - ) - figure = go.Figure(data=traces) - figure = _apply_constant_channels(figure, aes) - _maybe_update_trace_name(figure, layer, color_field) - - _apply_legend_visibility(figure, aes) - return figure - - -def _apply_plot_level_properties( - figure, - guides: PlotLevelGuideConfig, - size: PlotSizeConfig, - background: str | None, - aes: AestheticsConfig, -): - title_lines = [guides.title] if guides.title else [] - title_lines += [text for text in (guides.subtitle, guides.caption) if text] - if title_lines: - figure.update_layout(title="
".join(title_lines)) - if size.width or size.height: - figure.update_layout(width=size.width, height=size.height) - - has_3d = any(trace.type == "scatter3d" for trace in figure.data) - x_title = _axis_title(aes.x) - y_title = _axis_title(aes.y) - z_title = _axis_title(aes.z) - if has_3d: - scene_updates: dict[str, Any] = {} - if x_title: - scene_updates.setdefault("xaxis", {})["title"] = x_title - if y_title: - scene_updates.setdefault("yaxis", {})["title"] = y_title - if z_title: - scene_updates.setdefault("zaxis", {})["title"] = z_title - if background: - scene_updates["bgcolor"] = background - if scene_updates: - figure.update_layout(scene=scene_updates) - else: - axis_updates: dict[str, Any] = {} - if x_title: - axis_updates.setdefault("xaxis", {})["title"] = x_title - if y_title: - axis_updates.setdefault("yaxis", {})["title"] = y_title - if background: - axis_updates["plot_bgcolor"] = background - if axis_updates: - figure.update_layout(**axis_updates) - - if guides.labels: - LOGGER.info("Plot-level labels are not yet implemented for Plotly; skipping %s labels.", len(guides.labels)) - return figure - - -def _require_field(channel: ChannelAestheticsConfig | None, name: str) -> str: - if channel is None or not channel.field: - raise ConfigValidationError(f"Plotly renderer requires '{name}' channel with a field specified.") - return channel.field - - -def _optional_field(channel: ChannelAestheticsConfig | None) -> str | None: - if channel is None: - return None - return channel.field - - -def _collect_tooltip_fields(tooltips: list[ChannelAestheticsConfig] | None) -> list[str]: - if not tooltips: - return [] - fields: list[str] = [] - for tooltip in tooltips: - if tooltip.field is None: - raise ConfigValidationError("Plotly renderer tooltip entries must reference a data field.") - fields.append(tooltip.field) - return fields - - -def _resolve_opacity(channel: ChannelAestheticsConfig | None) -> float | None: - if channel is None: - return None - if channel.value is None: - raise ConfigValidationError("Plotly renderer opacity channel must specify a constant value.") - try: - opacity = float(channel.value) - except (TypeError, ValueError) as exc: - raise ConfigValidationError("Opacity channel must be a numeric constant.") from exc - if not 0.0 <= opacity <= 1.0: - raise ConfigValidationError("Opacity value must be between 0 and 1.") - return opacity - - -def _resolve_size_value(channel: ChannelAestheticsConfig | None) -> float | None: - if channel is None or channel.value is None: - return None - try: - return float(channel.value) - except (TypeError, ValueError) as exc: - raise ConfigValidationError("Size channel value must be numeric.") from exc - - -def _axis_title(channel: ChannelAestheticsConfig | None) -> str | None: - if channel is None: - return None - return channel.title or channel.field - - -def _axis_domain(channel: ChannelAestheticsConfig | None) -> list[Any] | None: - if channel is None or channel.scale is None: - return None - return channel.scale.domain - - -def _apply_constant_channels(figure, aes: AestheticsConfig): - if aes.color and aes.color.value is not None: - figure.update_traces(marker={"color": aes.color.value}, selector={"type": "scatter3d"}) - figure.update_traces(marker={"color": aes.color.value}, selector={"type": "scatter"}) - for frame in getattr(figure, "frames", []) or []: - for trace in frame.data: - if hasattr(trace, "marker"): - trace.marker = trace.marker or {} - trace.marker["color"] = aes.color.value - if aes.size and aes.size.value is not None: - figure.update_traces(marker={"size": aes.size.value}, selector={"type": "scatter3d"}) - figure.update_traces(marker={"size": aes.size.value}, selector={"type": "scatter"}) - for frame in getattr(figure, "frames", []) or []: - for trace in frame.data: - if hasattr(trace, "marker"): - trace.marker = trace.marker or {} - trace.marker["size"] = aes.size.value - return figure - - -def _apply_legend_visibility(figure, aes: AestheticsConfig): - if not _legend_hidden(aes.color): - return - for trace in figure.data: - trace.showlegend = False - for frame in getattr(figure, "frames", []) or []: - for trace in frame.data: - trace.showlegend = False - - -def _legend_hidden(color_cfg: ChannelAestheticsConfig | None) -> bool: - return bool(color_cfg and color_cfg.legend and color_cfg.legend.visible is False) - - -def _maybe_update_trace_name(figure, layer: LayerConfig, color_field: str | None): - if len(figure.data) != 1: - return - trace_name = layer.name or (color_field or "3d_scatter") - figure.update_traces(name=trace_name, selector={"type": "scatter3d"}) - - -def _resolve_layer_dropdown(df: pd.DataFrame, controls: Any | None) -> tuple[str, list[Any]] | None: - if not controls: - return None - dropdown = getattr(controls, "dropdown", None) - field_name = getattr(dropdown, "field", None) if dropdown else None - if field_name != "layer" or field_name not in df.columns: - return None - raw_options = getattr(dropdown, "options", None) or [] - options = [_normalize_option(value) for value in raw_options] - # Filter out "_no_layer_" placeholder used for layer-independent data - valid_values = [value for value in options if value in set(df[field_name]) and value != "_no_layer_"] - if len(valid_values) <= 1: - return None - return field_name, valid_values - - -def _resolve_slider_control(df: pd.DataFrame, controls: Any | None) -> tuple[str, list[Any]] | None: - if not controls: - return None - slider = getattr(controls, "slider", None) - field_name = getattr(slider, "field", None) if slider else None - if field_name is None or field_name not in df.columns: - return None - raw_options = getattr(slider, "options", None) - option_values = raw_options or list(pd.unique(df[field_name])) - options = [_normalize_option(value) for value in option_values] - if len(options) <= 1: - return None - # Preserve order if numeric; otherwise keep as strings - try: - options = sorted(set(options), key=lambda v: float(v)) - except (TypeError, ValueError): # pragma: no cover - fallback for non-numeric - options = sorted(dict.fromkeys(options)) - return field_name, options - - -def _build_layer_filtered_scatter3d( - df: pd.DataFrame, - dropdown: tuple[str, list[Any]], - x_field: str, - y_field: str, - z_field: str, - color_field: str | None, - size_field: str | None, - hover_fields: list[str], - opacity_value: float | None, - color_specs: list[ColorGroupSpec], - aes: AestheticsConfig, - layer: LayerConfig, - size_value: float | None = None, -): - field_name, options = dropdown - traces: list[Any] = [] - trace_ranges: list[tuple[int, int]] = [] - available: list[Any] = [] - - for option in options: - subset = df.loc[df[field_name] == option] - if subset.empty: - continue - layer_index = len(available) - available.append(option) - subset_traces = _scatter3d_traces( - subset, - x_field, - y_field, - z_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - layer_name=str(option), - size_value=size_value, - ) - for trace in subset_traces: - trace.visible = layer_index == 0 - start = len(traces) - traces.extend(subset_traces) - trace_ranges.append((start, len(traces))) - - if len(available) <= 1: - traces = _scatter3d_traces( - df, - x_field, - y_field, - z_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - layer_name=layer.name, - size_value=size_value, - ) - figure = go.Figure(data=traces) - figure = _apply_constant_channels(figure, aes) - _maybe_update_trace_name(figure, layer, color_field) - return figure - - figure = go.Figure(data=traces) - figure = _apply_constant_channels(figure, aes) - _add_layer_dropdown_menu(figure, available, trace_ranges) - _maybe_update_trace_name(figure, layer, color_field) - return figure - - -def _build_layer_filtered_scatter2d( - df: pd.DataFrame, - dropdown: tuple[str, list[Any]], - x_field: str, - y_field: str, - color_field: str | None, - size_field: str | None, - hover_fields: list[str], - opacity_value: float | None, - color_specs: list[ColorGroupSpec], - aes: AestheticsConfig, - layer: LayerConfig, - size_value: float | None = None, -): - field_name, options = dropdown - traces: list[Any] = [] - trace_ranges: list[tuple[int, int]] = [] - available: list[Any] = [] - - for option in options: - subset = df.loc[df[field_name] == option] - if subset.empty: - continue - layer_index = len(available) - available.append(option) - subset_traces = _scatter2d_traces( - subset, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - layer_name=str(option), - size_value=size_value, - ) - for trace in subset_traces: - trace.visible = layer_index == 0 - start = len(traces) - traces.extend(subset_traces) - trace_ranges.append((start, len(traces))) - - if len(available) <= 1: - traces = _scatter2d_traces( - df, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - layer_name=layer.name, - size_value=size_value, - ) - figure = go.Figure(data=traces) - figure = _apply_constant_channels(figure, aes) - _maybe_update_trace_name(figure, layer, color_field) - return figure - - figure = go.Figure(data=traces) - figure = _apply_constant_channels(figure, aes) - _add_layer_dropdown_menu(figure, available, trace_ranges) - _maybe_update_trace_name(figure, layer, color_field) - return figure - - -def _build_slider_scatter( - df: pd.DataFrame, - slider: tuple[str, list[Any]], - dropdown: tuple[str, list[Any]] | None, - x_field: str, - y_field: str, - color_field: str | None, - size_field: str | None, - hover_fields: list[str], - opacity_value: float | None, - color_specs: list[ColorGroupSpec], - aes: AestheticsConfig, - layer: LayerConfig, - *, - z_field: str | None = None, - size_value: float | None = None, -): - """Build slider-controlled scatter plot (2D or 3D based on z_field presence).""" - slider_field, slider_values = slider - layer_field = dropdown[0] if dropdown else None - layer_options = dropdown[1] if dropdown else [None] - - traces: list[Any] = [] - trace_ranges: list[tuple[int, int]] = [] - available_layers: list[Any] = [] - frames_by_value: dict[str, list[Any]] = {str(value): [] for value in slider_values} - - for option in layer_options: - subset = df if option is None else df.loc[df[layer_field] == option] - if subset.empty: - continue - layer_index = len(available_layers) - available_layers.append(option) - layer_label = str(option) if option is not None else layer.name - - initial_subset = subset.loc[subset[slider_field] == slider_values[0]] - subset_traces = _scatter_traces( - initial_subset, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - z_field=z_field, - layer_name=layer_label, - keep_empty=True, - size_value=size_value, - ) - if dropdown: - for trace in subset_traces: - trace.visible = layer_index == 0 - start = len(traces) - traces.extend(subset_traces) - trace_ranges.append((start, len(traces))) - - for slider_value in slider_values: - slider_subset = subset.loc[subset[slider_field] == slider_value] - frame_traces = _scatter_traces( - slider_subset, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - z_field=z_field, - layer_name=layer_label, - keep_empty=True, - size_value=size_value, - ) - frames_by_value[str(slider_value)].extend(frame_traces) - - figure = go.Figure(data=traces) - figure.frames = _build_slider_frames(frames_by_value, slider_values) - _add_slider_layout(figure, slider_field, slider_values) - - if dropdown and available_layers: - _add_layer_dropdown_menu(figure, available_layers, trace_ranges) - else: - _maybe_update_trace_name(figure, layer, color_field) - - figure = _apply_constant_channels(figure, aes) - return figure - - -def _build_slider_scatter3d( - df: pd.DataFrame, - slider: tuple[str, list[Any]], - dropdown: tuple[str, list[Any]] | None, - x_field: str, - y_field: str, - z_field: str, - color_field: str | None, - size_field: str | None, - hover_fields: list[str], - opacity_value: float | None, - color_specs: list[ColorGroupSpec], - aes: AestheticsConfig, - layer: LayerConfig, - size_value: float | None = None, -): - """Build 3D slider scatter. Wrapper around _build_slider_scatter.""" - return _build_slider_scatter( - df, - slider, - dropdown, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - aes, - layer, - z_field=z_field, - size_value=size_value, - ) - - -def _build_slider_scatter2d( - df: pd.DataFrame, - slider: tuple[str, list[Any]], - dropdown: tuple[str, list[Any]] | None, - x_field: str, - y_field: str, - color_field: str | None, - size_field: str | None, - hover_fields: list[str], - opacity_value: float | None, - color_specs: list[ColorGroupSpec], - aes: AestheticsConfig, - layer: LayerConfig, - size_value: float | None = None, -): - """Build 2D slider scatter. Wrapper around _build_slider_scatter.""" - return _build_slider_scatter( - df, - slider, - dropdown, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - aes, - layer, - z_field=None, - size_value=size_value, - ) - - -def _add_layer_dropdown_menu( - figure, - options: list[Any], - trace_ranges: list[tuple[int, int]], -): - total_traces = len(figure.data) - buttons = [] - for option, (start, end) in zip(options, trace_ranges, strict=False): - visible = [False] * total_traces - for idx in range(start, end): - visible[idx] = True - buttons.append( - { - "label": str(option), - "method": "update", - "args": [{"visible": visible}], - } - ) - - figure.update_layout( - updatemenus=[ - { - "buttons": buttons, - "direction": "down", - "showactive": True, - "x": 1.05, - "xanchor": "left", - "y": 1, - "yanchor": "top", - "pad": {"l": 10, "r": 10, "t": 0, "b": 0}, - } - ] - ) - - -def _build_slider_frames(frames_by_value: dict[str, list[Any]], slider_values: list[Any]): - frames: list[go.Frame] = [] - for value in slider_values: - name = str(value) - frame_traces = frames_by_value.get(name, []) - frames.append(go.Frame(name=name, data=frame_traces)) - return frames - - -def _add_slider_layout(figure, field_name: str, slider_values: list[Any]): - if not slider_values: - return - steps = [] - for value in slider_values: - label = str(value) - steps.append( - { - "label": label, - "method": "animate", - "args": [ - [label], - { - "frame": {"duration": 0, "redraw": True}, - "mode": "immediate", - "transition": {"duration": 0}, - }, - ], - } - ) - figure.update_layout( - sliders=[ - { - "active": 0, - "currentvalue": {"prefix": f"{field_name}="}, - "pad": {"t": 40, "b": 0}, - "steps": steps, - } - ] - ) - - -@dataclass -class ColorGroupSpec: - """Specification for a color grouping in Plotly rendering.""" - - label: str | None - value: Any | None - constant_color: str | None - mode: Literal["none", "literal", "discrete", "field"] = "none" - - -def _scatter_traces( - df: pd.DataFrame, - x_field: str, - y_field: str, - color_field: str | None, - size_field: str | None, - hover_fields: list[str], - opacity_value: float | None, - color_specs: list[ColorGroupSpec], - *, - z_field: str | None = None, - layer_name: str | None = None, - keep_empty: bool = False, - size_value: float | None = None, -) -> list[go.Scatter3d] | list[go.Scatter]: - """Build scatter traces (2D or 3D based on z_field presence).""" - is_3d = z_field is not None - traces: list[Any] = [] - - for idx, spec in enumerate(color_specs): - subset = _subset_for_spec(df, color_field, spec) - if subset.empty and not keep_empty: - continue - marker = _build_marker(subset, color_field, size_field, spec, size_value) - customdata = _build_customdata(subset, hover_fields) - - if is_3d: - assert z_field is not None - trace = go.Scatter3d( - x=subset[x_field].tolist(), - y=subset[y_field].tolist(), - z=subset[z_field].tolist(), - mode="markers", - name=_derive_trace_name(layer_name, spec, idx), - marker=marker, - customdata=customdata, - hovertemplate=_build_hovertemplate(hover_fields), - ) - else: - trace = go.Scatter( - x=subset[x_field].tolist(), - y=subset[y_field].tolist(), - mode="markers", - name=_derive_trace_name(layer_name, spec, idx), - marker=marker, - customdata=customdata, - hovertemplate=_build_hovertemplate(hover_fields), - ) - - if spec.mode == "literal": - trace.showlegend = False - if opacity_value is not None: - trace.opacity = opacity_value - traces.append(trace) - - if not traces: - default_name = layer_name or ("scatter3d" if is_3d else "scatter") - if is_3d: - empty_trace = go.Scatter3d(x=[], y=[], z=[], mode="markers", name=default_name) - else: - empty_trace = go.Scatter(x=[], y=[], mode="markers", name=default_name) - if opacity_value is not None: - empty_trace.opacity = opacity_value - traces.append(empty_trace) - - return traces - - -def _scatter3d_traces( - df: pd.DataFrame, - x_field: str, - y_field: str, - z_field: str, - color_field: str | None, - size_field: str | None, - hover_fields: list[str], - opacity_value: float | None, - color_specs: list[ColorGroupSpec], - *, - layer_name: str | None = None, - keep_empty: bool = False, - size_value: float | None = None, -) -> list[go.Scatter3d]: - """Build 3D scatter traces. Wrapper around _scatter_traces for type safety.""" - return _scatter_traces( - df, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - z_field=z_field, - layer_name=layer_name, - keep_empty=keep_empty, - size_value=size_value, - ) # type: ignore[return-value] - - -def _scatter2d_traces( - df: pd.DataFrame, - x_field: str, - y_field: str, - color_field: str | None, - size_field: str | None, - hover_fields: list[str], - opacity_value: float | None, - color_specs: list[ColorGroupSpec], - *, - layer_name: str | None = None, - keep_empty: bool = False, - size_value: float | None = None, -) -> list[go.Scatter]: - """Build 2D scatter traces. Wrapper around _scatter_traces for type safety.""" - return _scatter_traces( - df, - x_field, - y_field, - color_field, - size_field, - hover_fields, - opacity_value, - color_specs, - z_field=None, - layer_name=layer_name, - keep_empty=keep_empty, - size_value=size_value, - ) # type: ignore[return-value] - - -def _subset_for_spec(df: pd.DataFrame, color_field: str | None, spec: ColorGroupSpec) -> pd.DataFrame: - if spec.mode != "discrete" or color_field is None: - return df - return df.loc[df[color_field] == spec.value] - - -def _build_marker( - df: pd.DataFrame, - color_field: str | None, - size_field: str | None, - spec: ColorGroupSpec, - size_value: float | None = None, -) -> dict[str, Any]: - marker: dict[str, Any] = {} - if size_field and size_field in df.columns: - marker["size"] = df[size_field].tolist() - elif size_value is not None: - marker["size"] = size_value - if spec.mode == "literal" and color_field and color_field in df.columns: - marker["color"] = df[color_field].tolist() - elif spec.mode == "discrete" and spec.constant_color is not None: - marker["color"] = spec.constant_color - elif spec.mode == "field" and color_field and color_field in df.columns: - marker["color"] = df[color_field].tolist() - return marker - - -def _build_customdata(df: pd.DataFrame, hover_fields: list[str]) -> Any: - if not hover_fields: - return None - missing = [field for field in hover_fields if field not in df.columns] - if missing: - raise ConfigValidationError(f"Tooltip field(s) {missing} are missing from dataframe.") - return df[hover_fields].to_numpy() - - -def _build_hovertemplate(hover_fields: list[str]) -> str | None: - if not hover_fields: - return None - template_parts = [f"{field}: %{{customdata[{idx}]}}" for idx, field in enumerate(hover_fields)] - return "
".join(template_parts) + "" - - -def _derive_trace_name(layer_name: str | None, spec: ColorGroupSpec, idx: int) -> str: - if spec.label and layer_name: - return f"{layer_name} - {spec.label}" - if spec.label: - return spec.label - if layer_name: - return layer_name - return f"series_{idx + 1}" - - -def _build_color_discrete_map( - df: pd.DataFrame, color_field: str | None, color_cfg: ChannelAestheticsConfig | None -) -> dict[str, str] | None: - if color_field is None or color_cfg is None: - return None - if color_cfg.type not in {"nominal", "ordinal"}: - return None - if color_field not in df.columns: - return None - series: pd.Series = df.loc[:, color_field] - if _series_is_literal_color(series): - return None - palette = qualitative_colors.Plotly - values = [_normalize_option(value) for value in pd.unique(series)] - return {value: palette[idx % len(palette)] for idx, value in enumerate(values)} - - -def _build_color_group_specs( - df: pd.DataFrame, - color_field: str | None, - color_cfg: ChannelAestheticsConfig | None, - color_map: dict[str, str] | None, -) -> list[ColorGroupSpec]: - if color_field is None or color_field not in df.columns: - return [ColorGroupSpec(label=None, value=None, constant_color=None, mode="none")] - series: pd.Series = df.loc[:, color_field] - if _series_is_literal_color(series): - return [ColorGroupSpec(label=None, value=None, constant_color=None, mode="literal")] - if color_cfg and color_cfg.type in {"nominal", "ordinal"}: - specs: list[ColorGroupSpec] = [] - for value in pd.unique(series): - normalized = _normalize_option(value) - constant_color = (color_map or {}).get(normalized) - specs.append( - ColorGroupSpec( - label=str(value), - value=value, - constant_color=constant_color, - mode="discrete", - ) - ) - return specs - if color_cfg: - return [ColorGroupSpec(label=color_cfg.title or color_field, value=None, constant_color=None, mode="field")] - return [ColorGroupSpec(label=None, value=None, constant_color=None, mode="none")] - - -def _normalize_option(value: Any) -> Any: - if hasattr(value, "item"): - try: - return value.item() - except AttributeError: # pragma: no cover - defensive - return value - return value - - -def _series_is_literal_color(series: pd.Series) -> bool: - if series.empty: - return False - return bool(series.dropna().map(_value_is_color_string).all()) - - -def _value_is_color_string(value: Any) -> bool: - if isinstance(value, str): - candidate = value.strip() - if _HEX_COLOR_PATTERN.match(candidate): - return True - lowered = candidate.lower() - return lowered.startswith("rgb(") or lowered.startswith("rgba(") - return False diff --git a/simplexity/visualization/structured_configs.py b/simplexity/visualization/structured_configs.py deleted file mode 100644 index 95d9a781..00000000 --- a/simplexity/visualization/structured_configs.py +++ /dev/null @@ -1,238 +0,0 @@ -"""Structured visualization configuration dataclasses. - -This module implements the schema described in docs/visualization.md. The -dataclasses are intentionally backend-agnostic so that Hydra configs can be -validated once and rendered by different visualization engines (Altair, -plotnine, matplotlib, etc.). -""" - -from __future__ import annotations - -from dataclasses import dataclass, field -from typing import Any - -from simplexity.exceptions import ConfigValidationError - - -def _ensure(condition: bool, message: str) -> None: - """Raise ConfigValidationError if condition is not met.""" - if not condition: - raise ConfigValidationError(message) - - -@dataclass -class DataConfig: - """Specifies the logical data source and lightweight filtering.""" - - source: str = "main" - filters: list[str] = field(default_factory=list) - columns: list[str] | None = None - - -@dataclass -class TransformConfig: # pylint: disable=too-many-instance-attributes - """Represents a single data transform stage.""" - - op: str # ["filter", "calculate", "aggregate", "bin", "window", "fold", "pivot"] - filter: str | None = None - as_field: str | None = None - expr: str | None = None - groupby: list[str] | None = None - aggregations: dict[str, str] | None = None - field: str | None = None - binned_as: str | None = None - maxbins: int | None = None - window: dict[str, str] | None = None - frame: list[int | None] | None = None - fold_fields: list[str] | None = None - as_fields: list[str] | None = None - - def __post_init__(self) -> None: - if self.op == "filter": - _ensure(bool(self.filter), "TransformConfig.filter must be provided when op='filter'") - if self.op == "calculate": - _ensure(bool(self.as_field), "TransformConfig.as_field is required for calculate transforms") - _ensure(bool(self.expr), "TransformConfig.expr is required for calculate transforms") - if self.op == "aggregate": - _ensure(bool(self.groupby), "TransformConfig.groupby is required for aggregate transforms") - _ensure( - bool(self.aggregations), - "TransformConfig.aggregations is required for aggregate transforms", - ) - if self.op == "bin": - _ensure(bool(self.field), "TransformConfig.field is required for bin transforms") - _ensure(bool(self.binned_as), "TransformConfig.binned_as is required for bin transforms") - if self.op == "window": - _ensure(bool(self.window), "TransformConfig.window is required for window transforms") - - -@dataclass -class ScaleConfig: - """Describes how raw data values are mapped to visual ranges.""" - - type: str | None = None # ["linear", "log", "sqrt", "pow", "symlog", "time", "utc", "ordinal", "band", "point"] - domain: list[Any] | None = None - range: list[Any] | None = None - clamp: bool | None = None - nice: bool | None = None - reverse: bool | None = None - - -@dataclass -class AxisConfig: - """Axis settings for positional channels.""" - - title: str | None = None - grid: bool | None = None - format: str | None = None - tick_count: int | None = None - label_angle: float | None = None - visible: bool = True - - -@dataclass -class LegendConfig: - """Legend settings for categorical or continuous mappings.""" - - title: str | None = None - orient: str | None = None - visible: bool = True - - -@dataclass -class ChannelAestheticsConfig: # pylint: disable=too-many-instance-attributes - """Represents one visual encoding channel (x, y, color, etc.).""" - - field: str | None = None - type: str | None = None # ["quantitative", "ordinal", "nominal", "temporal"] - value: Any | None = None - aggregate: str | None = None - bin: bool | None = None - time_unit: str | None = None - scale: ScaleConfig | None = None - axis: AxisConfig | None = None - legend: LegendConfig | None = None - sort: str | list[Any] | None = None - title: str | None = None - - def __post_init__(self) -> None: - if self.field is not None and self.value is not None: - raise ConfigValidationError( - "ChannelAestheticsConfig cannot specify both 'field' and 'value'; prefer 'field'." - ) - - -@dataclass -class AestheticsConfig: # pylint: disable=too-many-instance-attributes - """Collection of channel encodings for a layer.""" - - x: ChannelAestheticsConfig | None = None - y: ChannelAestheticsConfig | None = None - z: ChannelAestheticsConfig | None = None - color: ChannelAestheticsConfig | None = None - size: ChannelAestheticsConfig | None = None - shape: ChannelAestheticsConfig | None = None - opacity: ChannelAestheticsConfig | None = None - tooltip: list[ChannelAestheticsConfig] | None = None - row: ChannelAestheticsConfig | None = None - column: ChannelAestheticsConfig | None = None - detail: ChannelAestheticsConfig | None = None - - -@dataclass -class GeometryConfig: - """Visual primitive used to draw the layer.""" - - type: str # [point, line, area, bar, rect, rule, tick, circle, square, text, boxplot, errorbar, errorband] - props: dict[str, Any] = field(default_factory=dict) - - def __post_init__(self) -> None: - _ensure(isinstance(self.props, dict), "GeometryConfig.props must be a dictionary") - - -@dataclass -class SelectionConfig: - """Interactive selection definition.""" - - name: str - type: str = "interval" # ["interval", "single", "multi"] - encodings: list[str] | None = None - fields: list[str] | None = None - bind: dict[str, Any] | None = None - - -@dataclass -class PlotSizeConfig: - """Size and layout metadata for an entire plot.""" - - width: int | None = None - height: int | None = None - autosize: str | None = None - - -@dataclass -class LabelConfig: - """Free-form labels or annotations.""" - - text: str | None = None - x: float | str | None = None - y: float | str | None = None - props: dict[str, Any] = field(default_factory=dict) - - -@dataclass -class PlotLevelGuideConfig: - """Titles and caption level guides.""" - - title: str | None = None - subtitle: str | None = None - caption: str | None = None - labels: list[LabelConfig] | None = None - title_scalars: dict[str, str] | None = None - - -@dataclass -class FacetConfig: - """High-level faceting instructions.""" - - row: str | None = None - column: str | None = None - wrap: int | None = None - - -@dataclass -class LayerConfig: - """A single layer in a composed plot.""" - - name: str | None = None - data: DataConfig | None = None - transforms: list[TransformConfig] = field(default_factory=list) - geometry: GeometryConfig = field(default_factory=lambda: GeometryConfig(type="point")) - aesthetics: AestheticsConfig = field(default_factory=AestheticsConfig) - selections: list[SelectionConfig] = field(default_factory=list) - - -@dataclass -class PlotConfig: # pylint: disable=too-many-instance-attributes - """Top-level configuration for one plot.""" - - backend: str = "altair" # ["altair", "plotly"] - data: DataConfig = field(default_factory=DataConfig) - transforms: list[TransformConfig] = field(default_factory=list) - layers: list[LayerConfig] = field(default_factory=list) - facet: FacetConfig | None = None - size: PlotSizeConfig = field(default_factory=PlotSizeConfig) - guides: PlotLevelGuideConfig = field(default_factory=PlotLevelGuideConfig) - background: str | None = None - selections: list[SelectionConfig] = field(default_factory=list) - - def __post_init__(self) -> None: - _ensure(self.layers is not None, "PlotConfig.layers must be a list (can be empty)") - - -@dataclass -class GraphicsConfig: - """Root Visualization config that multiplexes multiple named plots.""" - - default_backend: str = "altair" # ["altair", "plotly"] - plots: dict[str, PlotConfig] = field(default_factory=dict) diff --git a/tests/activations/test_activation_visualizations.py b/tests/activations/test_activation_visualizations.py deleted file mode 100644 index c771a4ea..00000000 --- a/tests/activations/test_activation_visualizations.py +++ /dev/null @@ -1,298 +0,0 @@ -"""Tests for activation visualization functions.""" - -# pylint: disable=all -# Temporarily disable all pylint checkers during AST traversal to prevent crash. -# pylint: enable=all - -import numpy as np -import pytest - -from simplexity.activations.activation_visualizations import ( - PreparedMetadata, - _compute_aggregation, - _parse_scalar_expression, - _render_title_template, - build_visualization_payloads, -) -from simplexity.activations.visualization_configs import build_activation_visualization_config -from simplexity.exceptions import ConfigValidationError - - -class TestParseScalarExpression: - """Tests for _parse_scalar_expression function.""" - - def test_simple_key(self): - """Test parsing a simple scalar key without aggregation.""" - key, agg = _parse_scalar_expression("my_scalar") - assert key == "my_scalar" - assert agg is None - - def test_min_aggregation(self): - """Test parsing min aggregation.""" - key, agg = _parse_scalar_expression("min(my_scalar)") - assert key == "my_scalar" - assert agg == "min" - - def test_max_aggregation(self): - """Test parsing max aggregation.""" - key, agg = _parse_scalar_expression("max(my_scalar)") - assert key == "my_scalar" - assert agg == "max" - - def test_avg_aggregation(self): - """Test parsing avg aggregation.""" - key, agg = _parse_scalar_expression("avg(my_scalar)") - assert key == "my_scalar" - assert agg == "avg" - - def test_mean_aggregation(self): - """Test parsing mean aggregation.""" - key, agg = _parse_scalar_expression("mean(my_scalar)") - assert key == "my_scalar" - assert agg == "mean" - - def test_latest_aggregation(self): - """Test parsing latest aggregation.""" - key, agg = _parse_scalar_expression("latest(my_scalar)") - assert key == "my_scalar" - assert agg == "latest" - - def test_first_aggregation(self): - """Test parsing first aggregation.""" - key, agg = _parse_scalar_expression("first(my_scalar)") - assert key == "my_scalar" - assert agg == "first" - - def test_last_aggregation(self): - """Test parsing last aggregation.""" - key, agg = _parse_scalar_expression("last(my_scalar)") - assert key == "my_scalar" - assert agg == "last" - - def test_strips_whitespace(self): - """Test that whitespace is stripped.""" - key, agg = _parse_scalar_expression(" min( my_scalar ) ") - assert key == "my_scalar" - assert agg == "min" - - -class TestComputeAggregation: - """Tests for _compute_aggregation function.""" - - def test_min(self): - """Test min aggregation.""" - history = [(0, 5.0), (1, 3.0), (2, 7.0)] - result = _compute_aggregation(history, "min") - assert result == 3.0 - - def test_max(self): - """Test max aggregation.""" - history = [(0, 5.0), (1, 3.0), (2, 7.0)] - result = _compute_aggregation(history, "max") - assert result == 7.0 - - def test_avg(self): - """Test avg aggregation.""" - history = [(0, 3.0), (1, 6.0), (2, 9.0)] - result = _compute_aggregation(history, "avg") - assert result == 6.0 - - def test_mean(self): - """Test mean aggregation (alias for avg).""" - history = [(0, 3.0), (1, 6.0), (2, 9.0)] - result = _compute_aggregation(history, "mean") - assert result == 6.0 - - def test_latest(self): - """Test latest aggregation.""" - history = [(0, 5.0), (1, 3.0), (2, 7.0)] - result = _compute_aggregation(history, "latest") - assert result == 7.0 - - def test_last(self): - """Test last aggregation (alias for latest).""" - history = [(0, 5.0), (1, 3.0), (2, 7.0)] - result = _compute_aggregation(history, "last") - assert result == 7.0 - - def test_first(self): - """Test first aggregation.""" - history = [(0, 5.0), (1, 3.0), (2, 7.0)] - result = _compute_aggregation(history, "first") - assert result == 5.0 - - def test_empty_history_raises(self): - """Test that empty history raises error.""" - with pytest.raises(ConfigValidationError, match="empty history"): - _compute_aggregation([], "min") - - def test_unknown_function_raises(self): - """Test that unknown function raises error.""" - history = [(0, 5.0)] - with pytest.raises(ConfigValidationError, match="Unknown aggregation"): - _compute_aggregation(history, "unknown_func") - - -class TestRenderTitleTemplate: - """Tests for _render_title_template function.""" - - def test_none_title_returns_none(self): - """Test that None title returns None.""" - result = _render_title_template(None, None, {}, {}) - assert result is None - - def test_simple_title_no_substitution(self): - """Test title without placeholders.""" - result = _render_title_template("My Title", None, {}, {}) - assert result == "My Title" - - def test_title_with_scalar_substitution(self): - """Test title with scalar value substitution.""" - title = "Loss: {loss:.3f}" - title_scalars = {"loss": "test/loss"} - scalars = {"test/loss": 0.12345} - result = _render_title_template(title, title_scalars, scalars, {}) - assert result == "Loss: 0.123" - - def test_title_with_history_aggregation(self): - """Test title with scalar history aggregation.""" - title = "Min Loss: {min_loss:.2f}" - title_scalars = {"min_loss": "min(test/loss)"} - scalars = {} - scalar_history = {"test/loss": [(0, 0.5), (1, 0.3), (2, 0.4)]} - result = _render_title_template(title, title_scalars, scalars, scalar_history) - assert result == "Min Loss: 0.30" - - -class TestBuildVisualizationPayloads: - """Tests for build_visualization_payloads function.""" - - @pytest.fixture - def basic_metadata(self): - """Create basic metadata for testing.""" - return PreparedMetadata( - sequences=[(1, 2), (1, 3)], - steps=np.array([2, 2]), - select_last_token=False, - ) - - @pytest.fixture - def basic_viz_config(self): - """Create a basic visualization config.""" - return build_activation_visualization_config( - { - "name": "test_viz", - "data_mapping": { - "mappings": { - "pc0": {"source": "arrays", "key": "pca", "component": 0}, - }, - }, - "layer": { - "geometry": {"type": "point"}, - "aesthetics": { - "x": {"field": "pc0", "type": "quantitative"}, - }, - }, - } - ) - - def test_builds_payload_with_arrays(self, basic_metadata, basic_viz_config): - """Test building a payload with array data.""" - arrays = {"pca/layer_0": np.array([[1.0, 2.0], [3.0, 4.0]])} - payloads = build_visualization_payloads( - analysis_name="test", - viz_cfgs=[basic_viz_config], - default_backend="altair", - prepared_metadata=basic_metadata, - weights=np.array([0.5, 0.5]), - belief_states=None, - arrays=arrays, - scalars={}, - scalar_history={}, - scalar_history_step=None, - analysis_concat_layers=False, - layer_names=["layer_0"], - ) - assert len(payloads) == 1 - payload = payloads[0] - assert payload.name == "test_viz" - assert not payload.dataframe.empty - - def test_builds_payload_with_belief_states(self, basic_metadata): - """Test building a payload with belief state data.""" - viz_config = build_activation_visualization_config( - { - "name": "belief_viz", - "data_mapping": { - "mappings": { - "belief_0": {"source": "belief_states", "component": 0}, - }, - }, - "layer": { - "geometry": {"type": "point"}, - "aesthetics": { - "x": {"field": "belief_0", "type": "quantitative"}, - }, - }, - } - ) - belief_states = np.array([[0.5, 0.5], [0.3, 0.7]]) - payloads = build_visualization_payloads( - analysis_name="test", - viz_cfgs=[viz_config], - default_backend="altair", - prepared_metadata=basic_metadata, - weights=np.array([0.5, 0.5]), - belief_states=belief_states, - arrays={}, - scalars={}, - scalar_history={}, - scalar_history_step=None, - analysis_concat_layers=False, - layer_names=["layer_0"], - ) - assert len(payloads) == 1 - assert "belief_0" in payloads[0].dataframe.columns - - def test_handles_multiple_configs(self, basic_metadata): - """Test building multiple payloads from multiple configs.""" - configs = [ - build_activation_visualization_config( - { - "name": "viz_1", - "data_mapping": {"mappings": {"pc0": {"source": "arrays", "key": "pca", "component": 0}}}, - "layer": { - "geometry": {"type": "point"}, - "aesthetics": {"x": {"field": "pc0", "type": "quantitative"}}, - }, - } - ), - build_activation_visualization_config( - { - "name": "viz_2", - "data_mapping": {"mappings": {"pc1": {"source": "arrays", "key": "pca", "component": 1}}}, - "layer": { - "geometry": {"type": "point"}, - "aesthetics": {"x": {"field": "pc1", "type": "quantitative"}}, - }, - } - ), - ] - arrays = {"pca/layer_0": np.array([[1.0, 2.0], [3.0, 4.0]])} - payloads = build_visualization_payloads( - analysis_name="test", - viz_cfgs=configs, - default_backend="altair", - prepared_metadata=basic_metadata, - weights=np.array([0.5, 0.5]), - belief_states=None, - arrays=arrays, - scalars={}, - scalar_history={}, - scalar_history_step=None, - analysis_concat_layers=False, - layer_names=["layer_0"], - ) - assert len(payloads) == 2 - assert payloads[0].name == "viz_1" - assert payloads[1].name == "viz_2" diff --git a/tests/activations/test_dataframe_integration.py b/tests/activations/test_dataframe_integration.py deleted file mode 100644 index bb6dd4b6..00000000 --- a/tests/activations/test_dataframe_integration.py +++ /dev/null @@ -1,349 +0,0 @@ -"""Integration tests for projection DataFrame building with factor patterns.""" - -import jax.numpy as jnp -import numpy as np -import pandas as pd -import pytest - -from simplexity.activations.visualization.dataframe_builders import ( - _build_dataframe, - _build_dataframe_for_mappings, -) -from simplexity.activations.visualization_configs import ( - ActivationVisualizationConfig, - ActivationVisualizationDataMapping, - ActivationVisualizationFieldRef, - CombinedMappingSection, -) -from simplexity.analysis.linear_regression import layer_linear_regression -from simplexity.exceptions import ConfigValidationError - - -class TestProjectionDataframeIntegration: - """Integration tests for projection DataFrame building with factor patterns.""" - - def test_factored_projection_dataframe_values_match(self): - """Test that factored projection values are correctly associated with each factor. - - This is a regression test for the bug where arrays looked 'random' - when visualizing factored linear regression results. - """ - # Simulate projection keys as produced by LayerwiseAnalysis with to_factors=True - # Keys format: layer_name_factor_idx/projected - factor_0_data = np.array([[0.1, 0.8, 0.1], [0.2, 0.7, 0.1], [0.3, 0.6, 0.1]]) - factor_1_data = np.array([[0.5, 0.5], [0.4, 0.6], [0.3, 0.7]]) - - arrays = { - "projected/layer_0-F0": factor_0_data, - "projected/layer_0-F1": factor_1_data, - } - - # Metadata columns with 3 samples - metadata_columns = { - "step": np.array([3, 3, 3]), - "sample_index": np.array([0, 1, 2]), - } - - # Create mappings that use key patterns (as in user's YAML config) - # Note: Each mapping is for a SPECIFIC component, not a wildcard - mappings = { - "factor_*_prob_0": ActivationVisualizationFieldRef( - source="arrays", - key="projected/F*", - component=0, - group_as="factor", - ), - "factor_*_prob_1": ActivationVisualizationFieldRef( - source="arrays", - key="projected/F*", - component=1, - group_as="factor", - ), - } - - # Build the DataFrame - df = _build_dataframe_for_mappings( - mappings=mappings, - metadata_columns=metadata_columns, - arrays=arrays, - scalars={}, - belief_states=None, - analysis_concat_layers=False, - layer_names=["layer_0"], - ) - - # Verify structure: should have 2 groups (factor_0 and factor_1) * 3 samples = 6 rows - assert len(df) == 6, f"Expected 6 rows, got {len(df)}" - - # Verify factor column exists - assert "factor" in df.columns, "Missing 'factor' column" - - # Check factor_0 data - factor_0_rows = df[df["factor"] == "0"] - assert len(factor_0_rows) == 3, f"Expected 3 rows for factor_0, got {len(factor_0_rows)}" - - # Verify factor_0 prob_0 values match the source data - np.testing.assert_array_almost_equal( - np.asarray(factor_0_rows["prob_0"]), - factor_0_data[:, 0], - err_msg="Factor 0 prob_0 values don't match source data", - ) - - # Verify factor_0 prob_1 values match the source data - np.testing.assert_array_almost_equal( - np.asarray(factor_0_rows["prob_1"]), - factor_0_data[:, 1], - err_msg="Factor 0 prob_1 values don't match source data", - ) - - # Check factor_1 data - factor_1_rows = df[df["factor"] == "1"] - assert len(factor_1_rows) == 3, f"Expected 3 rows for factor_1, got {len(factor_1_rows)}" - - # Verify factor_1 prob_0 values match the source data - np.testing.assert_array_almost_equal( - np.asarray(factor_1_rows["prob_0"]), - factor_1_data[:, 0], - err_msg="Factor 1 prob_0 values don't match source data", - ) - - # Verify factor_1 prob_1 values match the source data - np.testing.assert_array_almost_equal( - np.asarray(factor_1_rows["prob_1"]), - factor_1_data[:, 1], - err_msg="Factor 1 prob_1 values don't match source data", - ) - - def test_factored_projection_different_component_counts(self): - """Test that factors with different numbers of components are handled correctly. - - Factor 0 has 3 components (states), factor 1 has 2 components. - Requesting component 2 should work for factor 0 but raise an error for factor 1. - """ - factor_0_data = np.array([[0.1, 0.8, 0.1], [0.2, 0.7, 0.1]]) # 3 components - factor_1_data = np.array([[0.5, 0.5], [0.4, 0.6]]) # 2 components - - arrays = { - "projected/layer_0-F0": factor_0_data, - "projected/layer_0-F1": factor_1_data, - } - - metadata_columns = { - "step": np.array([1, 1]), - "sample_index": np.array([0, 1]), - } - - # Request component 2 - this should fail for factor_1 which only has 2 components - mappings = { - "factor_*_prob_2": ActivationVisualizationFieldRef( - source="arrays", - key="projected/F*", - component=2, - group_as="factor", - ), - } - - # Should raise an error because factor_1 doesn't have component 2 - with pytest.raises(ConfigValidationError, match="out of bounds"): - _build_dataframe_for_mappings( - mappings=mappings, - metadata_columns=metadata_columns, - arrays=arrays, - scalars={}, - belief_states=None, - analysis_concat_layers=False, - layer_names=["layer_0"], - ) - - def test_combined_arrays_and_beliefs_data_integrity(self): - """Test combined mode with arrays and belief states.""" - n_samples = 4 - n_factors = 2 - n_states = 3 - - belief_states = np.array( - [ - [[0.1, 0.2, 0.7], [0.3, 0.4, 0.3]], - [[0.2, 0.3, 0.5], [0.5, 0.3, 0.2]], - [[0.3, 0.4, 0.3], [0.2, 0.6, 0.2]], - [[0.4, 0.5, 0.1], [0.1, 0.1, 0.8]], - ] - ) - - noise = np.random.default_rng(42).standard_normal((n_samples, n_factors, n_states)) * 0.01 - projected_values = belief_states + noise - - arrays = { - "projected/layer_0-F0": projected_values[:, 0, :], - "projected/layer_0-F1": projected_values[:, 1, :], - } - - metadata_columns = { - "analysis": np.array(["test"] * n_samples), - "step": np.array([10] * n_samples), - "sample_index": np.arange(n_samples), - } - - config = ActivationVisualizationConfig( - name="test_combined", - backend="altair", - plot=None, - data_mapping=ActivationVisualizationDataMapping( - combined=[ - CombinedMappingSection( - label="prediction", - mappings={ - f"factor_*_prob_{i}": ActivationVisualizationFieldRef( - source="arrays", key="projected/F*", component=i, group_as="factor" - ) - for i in range(n_states) - }, - ), - CombinedMappingSection( - label="ground_truth", - mappings={ - f"factor_*_prob_{i}": ActivationVisualizationFieldRef( - source="belief_states", factor="*", component=i, group_as="factor" - ) - for i in range(n_states) - }, - ), - ], - combine_as="data_type", - ), - ) - - df = _build_dataframe( - viz_cfg=config, - metadata_columns=metadata_columns, - arrays=arrays, - scalars={}, - scalar_history={}, - scalar_history_step=None, - belief_states=belief_states, - analysis_concat_layers=False, - layer_names=["layer_0"], - ) - - assert "data_type" in df.columns - assert "factor" in df.columns - assert len(df) == 2 * n_factors * n_samples - - def test_combined_mode_multiple_layers(self): - """Test that multiple layers appear correctly in combined mode.""" - n_samples = 3 - n_layers = 4 - n_factors = 2 - n_states = 3 - - belief_states = np.random.rand(n_samples, n_factors, n_states) - arrays = { - f"projected/layer_{layer_idx}-F{factor_idx}": np.random.rand(n_samples, n_states) - for layer_idx in range(n_layers) - for factor_idx in range(n_factors) - } - - metadata_columns = { - "analysis": np.array(["test"] * n_samples), - "step": np.array([10] * n_samples), - "sample_index": np.arange(n_samples), - } - - config = ActivationVisualizationConfig( - name="test_multilayer", - backend="altair", - plot=None, - data_mapping=ActivationVisualizationDataMapping( - combined=[ - CombinedMappingSection( - label="prediction", - mappings={ - "factor_*_prob_0": ActivationVisualizationFieldRef( - source="arrays", key="projected/F*", component=0, group_as="factor" - ), - }, - ), - CombinedMappingSection( - label="ground_truth", - mappings={ - "factor_*_prob_0": ActivationVisualizationFieldRef( - source="belief_states", factor="*", component=0, group_as="factor" - ), - }, - ), - ], - combine_as="data_type", - ), - ) - - df = _build_dataframe( - viz_cfg=config, - metadata_columns=metadata_columns, - arrays=arrays, - scalars={}, - scalar_history={}, - scalar_history_step=None, - belief_states=belief_states, - analysis_concat_layers=False, - layer_names=[f"layer_{i}" for i in range(n_layers)], - ) - - pred_df = df[df["data_type"] == "prediction"] - gt_df = df[df["data_type"] == "ground_truth"] - assert set(np.unique(np.asarray(pred_df["layer"]))) == {f"layer_{i}" for i in range(n_layers)} - assert set(np.unique(np.asarray(gt_df["layer"]))) == {"_no_layer_"} - - def test_full_visualization_pipeline_factored_vs_nonfactored(self): - """Test that factored and non-factored arrays produce same results for single factor.""" - projection_data = np.array([[0.7, 0.2, 0.1], [0.1, 0.8, 0.1], [0.2, 0.2, 0.6]]) - metadata = {"step": np.array([1, 1, 1]), "sample_index": np.arange(3)} - - nf_df = _build_dataframe_for_mappings( - mappings={"prob_0": ActivationVisualizationFieldRef(source="arrays", key="projected", component=0)}, - metadata_columns=metadata, - arrays={"projected/layer_0": projection_data}, - scalars={}, - belief_states=None, - analysis_concat_layers=False, - layer_names=["layer_0"], - ) - f_df = _build_dataframe_for_mappings( - mappings={ - "factor_*_prob_0": ActivationVisualizationFieldRef( - source="arrays", key="projected/F*", component=0, group_as="factor" - ) - }, - metadata_columns=metadata, - arrays={"projected/layer_0-F0": projection_data}, - scalars={}, - belief_states=None, - analysis_concat_layers=False, - layer_names=["layer_0"], - ) - - nf_sorted = pd.DataFrame(nf_df).sort_values(by="sample_index") - f_filtered = pd.DataFrame(f_df[f_df["factor"] == "0"]).sort_values(by="sample_index") - np.testing.assert_array_almost_equal( - np.asarray(nf_sorted["prob_0"]), - np.asarray(f_filtered["prob_0"]), - ) - - def test_linear_regression_arrays_match_beliefs(self): - """Test that linear regression arrays closely match original beliefs.""" - n_samples, n_features, n_factors, n_states = 50, 10, 3, 3 - - rng = np.random.default_rng(42) - ds = rng.standard_normal((n_samples, n_features)).astype(np.float32) - beliefs_combined = ds @ rng.standard_normal((n_features, n_factors * n_states)).astype(np.float32) * 0.1 - beliefs_softmax = np.exp(beliefs_combined.reshape(n_samples, n_factors, n_states)) - beliefs_softmax = beliefs_softmax / beliefs_softmax.sum(axis=2, keepdims=True) - - belief_states = tuple(jnp.array(beliefs_softmax[:, f, :]) for f in range(n_factors)) - scalars, arrays = layer_linear_regression( - jnp.array(ds), jnp.ones(n_samples) / n_samples, belief_states, use_svd=True - ) - - for f in range(n_factors): - assert scalars[f"r2/F{f}"] > 0.8, f"Factor {f} R² too low" - diff = np.abs(np.asarray(arrays[f"projected/F{f}"]) - np.asarray(belief_states[f])) - assert diff.max() < 0.2, f"Factor {f} arrays differ too much from beliefs" diff --git a/tests/activations/test_field_expansion.py b/tests/activations/test_field_expansion.py deleted file mode 100644 index b10182b4..00000000 --- a/tests/activations/test_field_expansion.py +++ /dev/null @@ -1,980 +0,0 @@ -"""Tests for field expansion and pattern parsing in activation visualizations.""" - -import numpy as np -import pytest - -from simplexity.activations.visualization.dataframe_builders import ( - _extract_base_column_name, -) -from simplexity.activations.visualization.field_resolution import ( - _resolve_belief_states, -) -from simplexity.activations.visualization.pattern_expansion import ( - _expand_array_key_pattern, - _expand_belief_factor_mapping, - _expand_field_mapping, - _get_component_count, - _has_field_pattern, - _has_key_pattern, - _parse_component_spec, -) -from simplexity.activations.visualization.preprocessing import ( - _expand_preprocessing_fields, -) -from simplexity.activations.visualization_configs import ( - ActivationVisualizationDataMapping, - ActivationVisualizationFieldRef, - CombinedMappingSection, -) -from simplexity.exceptions import ConfigValidationError - - -class TestPatternParsing: - """Test pattern detection and parsing.""" - - def test_parse_wildcard(self): - """Test parsing of wildcard component pattern.""" - spec_type, start, end = _parse_component_spec("*") - assert spec_type == "wildcard" - assert start is None - assert end is None - - def test_parse_range(self): - """Test parsing of range component pattern.""" - spec_type, start, end = _parse_component_spec("0...10") - assert spec_type == "range" - assert start == 0 - assert end == 10 - - def test_parse_range_non_zero_start(self): - """Test parsing of range component pattern with non-zero start.""" - spec_type, start, end = _parse_component_spec("5...20") - assert spec_type == "range" - assert start == 5 - assert end == 20 - - def test_parse_single_component(self): - """Test parsing of single component pattern.""" - spec_type, start, end = _parse_component_spec(5) - assert spec_type == "single" - assert start == 5 - assert end is None - - def test_parse_none(self): - """Test parsing of None component pattern.""" - spec_type, start, end = _parse_component_spec(None) - assert spec_type == "none" - assert start is None - assert end is None - - def test_parse_invalid_range_wrong_order(self): - """Test parsing of invalid range with start greater than end.""" - with pytest.raises(ConfigValidationError, match="start must be < end"): - _parse_component_spec("10...5") - - def test_parse_invalid_range_equal(self): - """Test parsing of invalid range with start equal to end.""" - with pytest.raises(ConfigValidationError, match="start must be < end"): - _parse_component_spec("5...5") - - def test_parse_invalid_range_format(self): - """Test parsing of invalid range format.""" - with pytest.raises(ConfigValidationError, match="Unrecognized component pattern"): - _parse_component_spec("0..10") - - def test_parse_invalid_range_single_value(self): - """Test parsing of invalid range with single value.""" - with pytest.raises(ConfigValidationError, match="Invalid range"): - _parse_component_spec("10...") - - def test_parse_invalid_range_non_numeric(self): - """Test parsing of invalid range with non-numeric values.""" - with pytest.raises(ConfigValidationError, match="Invalid range"): - _parse_component_spec("a...b") - - def test_parse_invalid_pattern(self): - """Test parsing of completely invalid component pattern.""" - with pytest.raises(ConfigValidationError, match="Unrecognized component pattern"): - _parse_component_spec("invalid") - - def test_is_expansion_pattern_star(self): - """Test detection of wildcard expansion patterns.""" - assert _has_field_pattern("prob_*") - assert _has_field_pattern("*_prob") - assert _has_field_pattern("prob_*_normalized") - - def test_is_expansion_pattern_range(self): - """Test detection of range expansion patterns.""" - assert _has_field_pattern("prob_0...10") - assert _has_field_pattern("pc_5...20") - - def test_is_expansion_pattern_no_pattern(self): - """Test detection of non-expansion patterns.""" - assert not _has_field_pattern("prob_0") - assert not _has_field_pattern("probability") - assert not _has_field_pattern("pc_component") - - def test_is_expansion_pattern_multiple_patterns(self): - """Test detection of invalid multiple patterns in field name.""" - with pytest.raises(ConfigValidationError, match="multiple patterns"): - _has_field_pattern("prob_*_layer_*") - - with pytest.raises(ConfigValidationError, match="multiple patterns"): - _has_field_pattern("prob_*_0...5") - - -class TestComponentCount: - """Test component count determination.""" - - def test_get_component_count_arrays_2d(self): - """Test getting component count from 2D arrays.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca") - arrays = {"pca/layer_0": np.random.randn(100, 10)} - count = _get_component_count(ref, "layer_0", arrays, None, False) - assert count == 10 - - def test_get_component_count_arrays_different_sizes(self): - """Test getting component count from 2D arrays with different sizes.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca") - arrays = {"pca/layer_0": np.random.randn(50, 15)} - count = _get_component_count(ref, "layer_0", arrays, None, False) - assert count == 15 - - def test_get_component_count_arrays_concat_layers(self): - """Test getting component count from concatenated layer arrays.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca") - arrays = {"pca": np.random.randn(200, 20)} - count = _get_component_count(ref, "any_layer", arrays, None, True) - assert count == 20 - - def test_get_component_count_arrays_1d_raises(self): - """Test that 1D arrays raise an error when getting component count.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca") - arrays = {"pca/layer_0": np.random.randn(100)} - with pytest.raises(ConfigValidationError, match="1D projection"): - _get_component_count(ref, "layer_0", arrays, None, False) - - def test_get_component_count_arrays_3d_raises(self): - """Test that 3D arrays raise an error when getting component count.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca") - arrays = {"pca/layer_0": np.random.randn(10, 10, 10)} - with pytest.raises(ConfigValidationError, match="1D or 2D"): - _get_component_count(ref, "layer_0", arrays, None, False) - - def test_get_component_count_belief_states(self): - """Test getting component count from belief states.""" - ref = ActivationVisualizationFieldRef(source="belief_states") - belief_states = np.random.randn(100, 3) - count = _get_component_count(ref, "layer_0", {}, belief_states, False) - assert count == 3 - - def test_get_component_count_belief_states_different_size(self): - """Test getting component count from belief states with different size.""" - ref = ActivationVisualizationFieldRef(source="belief_states") - belief_states = np.random.randn(50, 7) - count = _get_component_count(ref, "layer_0", {}, belief_states, False) - assert count == 7 - - def test_get_component_count_belief_states_none_raises(self): - """Test that None belief states raise an error when getting component count.""" - ref = ActivationVisualizationFieldRef(source="belief_states") - with pytest.raises(ConfigValidationError, match="not available"): - _get_component_count(ref, "layer_0", {}, None, False) - - def test_get_component_count_belief_states_1d_raises(self): - """Test that 1D belief states raise an error when getting component count.""" - ref = ActivationVisualizationFieldRef(source="belief_states") - belief_states = np.random.randn(100) - with pytest.raises(ConfigValidationError, match="2D"): - _get_component_count(ref, "layer_0", {}, belief_states, False) - - def test_get_component_count_unsupported_source(self): - """Test that unsupported sources raise an error when getting component count.""" - ref = ActivationVisualizationFieldRef(source="scalars", key="some_scalar") - with pytest.raises(ConfigValidationError, match="not supported"): - _get_component_count(ref, "layer_0", {}, None, False) - - -class TestFieldExpansion: - """Test field mapping expansion.""" - - def test_wildcard_expansion_arrays(self): - """Test detection of wildcard expansion patterns.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="*") - arrays = {"pca/layer_0": np.random.randn(50, 3)} - - expanded = _expand_field_mapping("pc_*", ref, "layer_0", arrays, {}, None, False) - - assert len(expanded) == 3 - assert "pc_0" in expanded - assert "pc_1" in expanded - assert "pc_2" in expanded - assert expanded["pc_0"].component == 0 - assert expanded["pc_1"].component == 1 - assert expanded["pc_2"].component == 2 - assert all(r.key == "pca" for r in expanded.values()) - assert all(r.source == "arrays" for r in expanded.values()) - - def test_wildcard_expansion_belief_states(self): - """Test detection of wildcard expansion patterns.""" - ref = ActivationVisualizationFieldRef(source="belief_states", component="*") - belief_states = np.random.randn(50, 4) - - expanded = _expand_field_mapping("belief_*", ref, "layer_0", {}, {}, belief_states, False) - - assert len(expanded) == 4 - assert "belief_0" in expanded - assert "belief_3" in expanded - assert expanded["belief_0"].component == 0 - assert expanded["belief_3"].component == 3 - assert all(r.source == "belief_states" for r in expanded.values()) - - def test_range_expansion(self): - """Test detection of range expansion patterns.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="0...5") - arrays = {"pca/layer_0": np.random.randn(50, 10)} - - expanded = _expand_field_mapping("pc_0...5", ref, "layer_0", arrays, {}, None, False) - - assert len(expanded) == 5 - assert "pc_0" in expanded - assert "pc_4" in expanded - assert "pc_5" not in expanded - assert expanded["pc_0"].component == 0 - assert expanded["pc_4"].component == 4 - - def test_range_expansion_with_offset(self): - """Test detection of range expansion patterns with offset.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="projected", component="2...5") - arrays = {"projected/layer_0": np.random.randn(50, 10)} - - expanded = _expand_field_mapping("prob_2...5", ref, "layer_0", arrays, {}, None, False) - - assert len(expanded) == 3 - assert "prob_2" in expanded - assert "prob_3" in expanded - assert "prob_4" in expanded - assert "prob_5" not in expanded - assert expanded["prob_2"].component == 2 - assert expanded["prob_4"].component == 4 - - def test_wildcard_in_middle_of_name(self): - """Test detection of wildcard expansion patterns.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="*") - arrays = {"pca/layer_0": np.random.randn(50, 3)} - - expanded = _expand_field_mapping("component_*_normalized", ref, "layer_0", arrays, {}, None, False) - - assert len(expanded) == 3 - assert "component_0_normalized" in expanded - assert "component_1_normalized" in expanded - assert "component_2_normalized" in expanded - - def test_no_expansion_needed(self): - """Test that no expansion occurs when component is a specific integer.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component=0) - arrays = {"pca/layer_0": np.random.randn(50, 5)} - - expanded = _expand_field_mapping("pc_0", ref, "layer_0", arrays, {}, None, False) - - assert len(expanded) == 1 - assert "pc_0" in expanded - assert expanded["pc_0"].component == 0 - - def test_no_expansion_none_component(self): - """Test that no expansion occurs when component is None.""" - ref = ActivationVisualizationFieldRef(source="metadata", key="step") - arrays = {} - - expanded = _expand_field_mapping("step", ref, "layer_0", arrays, {}, None, False) - - assert len(expanded) == 1 - assert "step" in expanded - assert expanded["step"].component is None - - def test_field_pattern_without_component_pattern_raises(self): - """Test that a field pattern without a component pattern raises an error.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component=0) - arrays = {"pca/layer_0": np.random.randn(50, 5)} - - with pytest.raises(ConfigValidationError, match="has pattern but component is not"): - _expand_field_mapping("pc_*", ref, "layer_0", arrays, {}, None, False) - - def test_component_pattern_without_field_pattern_raises(self): - """Test that a component pattern without a field pattern raises an error.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="*") - arrays = {"pca/layer_0": np.random.randn(50, 5)} - - with pytest.raises(ConfigValidationError, match="requires field name pattern"): - _expand_field_mapping("pc_0", ref, "layer_0", arrays, {}, None, False) - - def test_range_exceeds_available_components(self): - """Test that a range exceeding available components raises an error.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="0...20") - arrays = {"pca/layer_0": np.random.randn(50, 10)} - - with pytest.raises(ConfigValidationError, match="exceeds available components"): - _expand_field_mapping("pc_0...20", ref, "layer_0", arrays, {}, None, False) - - def test_range_partially_exceeds_available_components(self): - """Test that a range partially exceeding available components raises an error.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="5...15") - arrays = {"pca/layer_0": np.random.randn(50, 10)} - - with pytest.raises(ConfigValidationError, match="exceeds available components"): - _expand_field_mapping("pc_5...15", ref, "layer_0", arrays, {}, None, False) - - def test_expansion_preserves_reducer(self): - """Test that expansion preserves the reducer attribute.""" - ref = ActivationVisualizationFieldRef(source="belief_states", component="*", reducer="l2_norm") - belief_states = np.random.randn(50, 3) - - expanded = _expand_field_mapping("belief_*", ref, "layer_0", {}, {}, belief_states, False) - - assert all(r.reducer == "l2_norm" for r in expanded.values()) - - def test_expansion_with_concat_layers(self): - """Test expansion when arrays are concatenated across layers.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="*") - arrays = {"pca": np.random.randn(50, 5)} - - expanded = _expand_field_mapping("pc_*", ref, "layer_0", arrays, {}, None, True) - - assert len(expanded) == 5 - assert all(f"pc_{i}" in expanded for i in range(5)) - - -class TestFieldRefValidation: - """Test ActivationVisualizationFieldRef validation.""" - - def test_valid_wildcard_arrays(self): - """Test that wildcard patterns in arrays are valid.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="*") - assert ref.component == "*" - - def test_valid_range_arrays(self): - """Test that range patterns in arrays are valid.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="pca", component="0...10") - assert ref.component == "0...10" - - def test_valid_wildcard_belief_states(self): - """Test that wildcard patterns in belief_states are valid.""" - ref = ActivationVisualizationFieldRef(source="belief_states", component="*") - assert ref.component == "*" - - def test_invalid_pattern_format(self): - """Test that invalid pattern formats raise a ConfigValidationError.""" - with pytest.raises(ConfigValidationError, match="invalid"): - ActivationVisualizationFieldRef(source="arrays", key="pca", component="invalid_pattern") - - def test_invalid_range_wrong_separator(self): - """Test that invalid range separators raise a ConfigValidationError.""" - with pytest.raises(ConfigValidationError, match="invalid"): - ActivationVisualizationFieldRef(source="arrays", key="pca", component="0..10") - - def test_pattern_on_unsupported_source_scalars(self): - """Test that pattern expansion is not supported for scalars source.""" - with pytest.raises(ConfigValidationError, match="only supported for arrays/belief_states"): - ActivationVisualizationFieldRef(source="scalars", key="some_scalar", component="*") - - def test_pattern_on_unsupported_source_metadata(self): - """Test that pattern expansion is not supported for metadata source.""" - with pytest.raises(ConfigValidationError, match="only supported for arrays/belief_states"): - ActivationVisualizationFieldRef(source="metadata", key="step", component="*") - - def test_pattern_on_unsupported_source_weights(self): - """Test that pattern expansion is not supported for weights source.""" - with pytest.raises(ConfigValidationError, match="only supported for arrays/belief_states"): - ActivationVisualizationFieldRef(source="weights", component="*") - - -class TestPreprocessingFieldExpansion: - """Test wildcard expansion for preprocessing input_fields.""" - - def test_wildcard_expansion(self): - """Test that wildcard patterns in preprocessing fields are expanded correctly.""" - columns = ["belief_0", "belief_1", "belief_2", "belief_3", "step", "layer"] - patterns = ["belief_*"] - - expanded = _expand_preprocessing_fields(patterns, columns) - - assert expanded == ["belief_0", "belief_1", "belief_2", "belief_3"] - - def test_range_expansion(self): - """Test that range patterns in preprocessing fields are expanded correctly.""" - columns = ["prob_0", "prob_1", "prob_2", "prob_3", "prob_4", "prob_5"] - patterns = ["prob_0...3"] - - expanded = _expand_preprocessing_fields(patterns, columns) - - assert expanded == ["prob_0", "prob_1", "prob_2"] - - def test_range_expansion_with_offset(self): - """Test that range patterns with offsets in preprocessing fields are expanded correctly.""" - columns = ["pc_0", "pc_1", "pc_2", "pc_3", "pc_4", "pc_5", "pc_6"] - patterns = ["pc_2...5"] - - expanded = _expand_preprocessing_fields(patterns, columns) - - assert expanded == ["pc_2", "pc_3", "pc_4"] - - def test_mixed_patterns_and_literals(self): - """Test that mixed wildcard patterns and literal fields are expanded correctly.""" - columns = ["belief_0", "belief_1", "belief_2", "prob_0", "prob_1", "step"] - patterns = ["belief_*", "step"] - - expanded = _expand_preprocessing_fields(patterns, columns) - - assert expanded == ["belief_0", "belief_1", "belief_2", "step"] - - def test_multiple_wildcards(self): - """Test that multiple wildcard patterns in preprocessing fields are expanded correctly.""" - columns = ["belief_0", "belief_1", "prob_0", "prob_1", "prob_2"] - patterns = ["belief_*", "prob_*"] - - expanded = _expand_preprocessing_fields(patterns, columns) - - assert expanded == ["belief_0", "belief_1", "prob_0", "prob_1", "prob_2"] - - def test_wildcard_no_matches_raises(self): - """Test that a wildcard pattern with no matches raises a ConfigValidationError.""" - columns = ["step", "layer", "sequence"] - patterns = ["belief_*"] - - with pytest.raises(ConfigValidationError, match="did not match any columns"): - _expand_preprocessing_fields(patterns, columns) - - def test_range_missing_column_raises(self): - """Test that a range pattern with missing columns raises a ConfigValidationError.""" - columns = ["prob_0", "prob_1"] # Missing prob_2 - patterns = ["prob_0...3"] - - with pytest.raises(ConfigValidationError, match="column not found"): - _expand_preprocessing_fields(patterns, columns) - - def test_literal_fields_preserved(self): - """Test that literal fields in preprocessing fields are preserved.""" - columns = ["field_a", "field_b", "field_c"] - patterns = ["field_a", "field_c"] - - expanded = _expand_preprocessing_fields(patterns, columns) - - assert expanded == ["field_a", "field_c"] - - def test_wildcard_sorts_numerically(self): - """Test that wildcard patterns in preprocessing fields are sorted numerically.""" - columns = ["item_10", "item_2", "item_1", "item_20"] - patterns = ["item_*"] - - expanded = _expand_preprocessing_fields(patterns, columns) - - # Should be sorted by numeric value, not lexicographic - assert expanded == ["item_1", "item_2", "item_10", "item_20"] - - def test_pattern_in_middle_of_name(self): - """Test that patterns in the middle of field names are expanded correctly.""" - columns = ["component_0_norm", "component_1_norm", "component_2_norm"] - patterns = ["component_*_norm"] - - expanded = _expand_preprocessing_fields(patterns, columns) - - assert expanded == ["component_0_norm", "component_1_norm", "component_2_norm"] - - def test_empty_patterns_list(self): - """Test that an empty patterns list returns an empty list.""" - columns = ["field_a", "field_b"] - patterns = [] - - expanded = _expand_preprocessing_fields(patterns, columns) - - assert not expanded - - def test_range_pattern_in_middle(self): - """Test that range patterns in the middle of field names are expanded correctly.""" - columns = ["feature_0_scaled", "feature_1_scaled", "feature_2_scaled"] - patterns = ["feature_0...2_scaled"] - - expanded = _expand_preprocessing_fields(patterns, columns) - - assert expanded == ["feature_0_scaled", "feature_1_scaled"] - - -class TestKeyPatternExpansion: - """Test projection key pattern expansion (e.g., projected/F*).""" - - def test_has_key_pattern_wildcard(self): - """Test that _has_key_pattern detects wildcard patterns correctly.""" - assert _has_key_pattern("projected/F*") - assert _has_key_pattern("projected/*") - assert _has_key_pattern("factor_*") - - def test_has_key_pattern_range(self): - """Test that _has_key_pattern detects range patterns correctly.""" - assert _has_key_pattern("projected/F0...3") - assert _has_key_pattern("projected/F0...5") - - def test_has_key_pattern_none(self): - """Test that _has_key_pattern returns False for non-pattern keys.""" - assert not _has_key_pattern(None) - assert not _has_key_pattern("projected") - assert not _has_key_pattern("projected/F0") - - def test_has_key_pattern_multiple_raises(self): - """Test that _has_key_pattern raises an error for multiple patterns.""" - with pytest.raises(ConfigValidationError, match="multiple patterns"): - _has_key_pattern("projected/L*/F*") - - def test_expand_array_key_pattern_wildcard(self): - """Test that _expand_array_key_pattern expands wildcard patterns correctly.""" - arrays = { - "projected/layer_0-F0": np.random.randn(10, 3), - "projected/layer_0-F1": np.random.randn(10, 3), - "projected/layer_0-F2": np.random.randn(10, 3), - } - - result = _expand_array_key_pattern("projected/F*", "layer_0", arrays, False) - - assert len(result) == 3 - assert result["0"] == "projected/F0" - assert result["1"] == "projected/F1" - assert result["2"] == "projected/F2" - - def test_expand_array_key_pattern_range(self): - """Test that _expand_array_key_pattern expands range patterns correctly.""" - arrays = { - "projected/layer_0-F0": np.random.randn(10, 3), - "projected/layer_0-F1": np.random.randn(10, 3), - "projected/layer_0-F2": np.random.randn(10, 3), - } - - result = _expand_array_key_pattern("projected/F0...2", "layer_0", arrays, False) - - assert len(result) == 2 - assert result["0"] == "projected/F0" - assert result["1"] == "projected/F1" - - def test_expand_array_key_pattern_concat_layers(self): - """Test that _expand_array_key_pattern works with concatenated layers.""" - arrays = { - "projected/F0": np.random.randn(10, 3), - "projected/F1": np.random.randn(10, 3), - } - - result = _expand_array_key_pattern("projected/F*", "any_layer", arrays, True) - - assert len(result) == 2 - assert result["0"] == "projected/F0" - assert result["1"] == "projected/F1" - - def test_expand_array_key_pattern_no_matches_raises(self): - """Test that _expand_array_key_pattern raises an error when no keys match.""" - arrays = {"pca/layer_0": np.random.randn(10, 3)} - - with pytest.raises(ConfigValidationError, match="No array keys found"): - _expand_array_key_pattern("projected/F*", "layer_0", arrays, False) - - def test_field_mapping_with_key_pattern(self): - """Test that field mappings with key patterns are expanded correctly.""" - ref = ActivationVisualizationFieldRef( - source="arrays", - key="projected/F*", - component=0, - group_as="factor", - ) - arrays = { - "projected/layer_0-F0": np.random.randn(10, 3), - "projected/layer_0-F1": np.random.randn(10, 3), - } - - expanded = _expand_field_mapping("factor_*_prob", ref, "layer_0", arrays, {}, None, False) - - assert len(expanded) == 2 - assert "factor_0_prob" in expanded - assert "factor_1_prob" in expanded - assert expanded["factor_0_prob"].key == "projected/F0" - assert expanded["factor_1_prob"].key == "projected/F1" - assert expanded["factor_0_prob"]._group_value == "0" # pylint: disable=protected-access - assert expanded["factor_1_prob"]._group_value == "1" # pylint: disable=protected-access - assert expanded["factor_0_prob"].group_as == "factor" - - def test_field_mapping_with_key_and_component_patterns(self): - """Test that field mappings with key and component patterns are expanded correctly.""" - ref = ActivationVisualizationFieldRef( - source="arrays", - key="projected/F*", - component="*", - group_as="factor", - ) - arrays = { - "projected/layer_0-F0": np.random.randn(10, 3), - "projected/layer_0-F1": np.random.randn(10, 3), - } - - expanded = _expand_field_mapping("factor_*_prob_*", ref, "layer_0", arrays, {}, None, False) - - # Cross-product: 2 factors * 3 components = 6 expanded fields - assert len(expanded) == 6 - assert "factor_0_prob_0" in expanded - assert "factor_0_prob_1" in expanded - assert "factor_0_prob_2" in expanded - assert "factor_1_prob_0" in expanded - assert "factor_1_prob_1" in expanded - assert "factor_1_prob_2" in expanded - - # Check that components are correct - assert expanded["factor_0_prob_0"].component == 0 - assert expanded["factor_0_prob_1"].component == 1 - assert expanded["factor_1_prob_2"].component == 2 - - # Check that keys and group values are correct - assert expanded["factor_0_prob_0"].key == "projected/F0" - assert expanded["factor_1_prob_0"].key == "projected/F1" - assert expanded["factor_0_prob_0"]._group_value == "0" # pylint: disable=protected-access - assert expanded["factor_1_prob_0"]._group_value == "1" # pylint: disable=protected-access - - def test_key_pattern_without_field_pattern_raises(self): - """Test that a key pattern without a field pattern raises an error.""" - ref = ActivationVisualizationFieldRef( - source="arrays", - key="projected/F*", - component=0, - group_as="factor", - ) - arrays = {"projected/layer_0-F0": np.random.randn(10, 3)} - - with pytest.raises(ConfigValidationError, match="requires field name pattern"): - _expand_field_mapping("prob_0", ref, "layer_0", arrays, {}, None, False) - - -class TestGroupAsValidation: - """Test group_as parameter validation.""" - - def test_key_pattern_requires_group_as(self): - """Test that a key pattern requires the group_as parameter.""" - with pytest.raises(ConfigValidationError, match="requires `group_as`"): - ActivationVisualizationFieldRef( - source="arrays", - key="projected/F*", - component=0, - ) - - def test_group_as_only_for_arrays(self): - """Test that group_as is only valid for arrays source.""" - with pytest.raises(ConfigValidationError, match="only supported for arrays"): - ActivationVisualizationFieldRef( - source="scalars", - key="some_key", - group_as="factor", - ) - - def test_valid_key_pattern_with_group_as(self): - """Test that a valid key pattern with group_as is accepted.""" - ref = ActivationVisualizationFieldRef( - source="arrays", - key="projected/F*", - component=0, - group_as="factor", - ) - assert ref.group_as == "factor" - assert ref.key == "projected/F*" - - def test_valid_key_pattern_with_list_group_as(self): - """Test that a valid key pattern with list group_as is accepted.""" - ref = ActivationVisualizationFieldRef( - source="arrays", - key="projected/F*", - component=0, - group_as=["factor", "layer"], - ) - assert ref.group_as == ["factor", "layer"] - - -class TestExtractBaseColumnName: - """Test base column name extraction for group expansion.""" - - def test_extract_prefix_pattern(self): - """Test that base column names are correctly extracted from prefixed patterns.""" - assert _extract_base_column_name("factor_0_prob_0", "0") == "prob_0" - assert _extract_base_column_name("factor_1_prob_0", "1") == "prob_0" - assert _extract_base_column_name("factor_2_belief", "2") == "belief" - - def test_extract_suffix_only_pattern_returns_original(self): - """Test that base column names are unchanged when no prefix pattern is present.""" - # Columns without a _N_suffix pattern are returned unchanged - # This ensures we don't strip meaningful parts of column names - assert _extract_base_column_name("factor_0", "0") == "factor_0" - assert _extract_base_column_name("group_1", "1") == "group_1" - - def test_no_pattern_match_returns_original(self): - """Test that base column names are unchanged when no pattern match is found.""" - assert _extract_base_column_name("prob_0", "0") == "prob_0" - assert _extract_base_column_name("some_column", "1") == "some_column" - - -class TestCombinedMappingSection: - """Test CombinedMappingSection validation.""" - - def test_valid_combined_section(self): - """Test that a valid CombinedMappingSection is accepted.""" - section = CombinedMappingSection( - label="prediction", - mappings={ - "prob_0": ActivationVisualizationFieldRef(source="arrays", key="proj", component=0), - }, - ) - assert section.label == "prediction" - assert len(section.mappings) == 1 - - def test_empty_mappings_raises(self): - """Test that an empty mappings dictionary raises a ConfigValidationError.""" - with pytest.raises(ConfigValidationError, match="must have at least one mapping"): - CombinedMappingSection(label="empty", mappings={}) - - -class TestCombinedDataMapping: - """Test ActivationVisualizationDataMapping with combined sections.""" - - def test_valid_combined_mapping(self): - """Test that a valid ActivationVisualizationDataMapping with combined sections is accepted.""" - mapping = ActivationVisualizationDataMapping( - combined=[ - CombinedMappingSection( - label="prediction", - mappings={"prob_0": ActivationVisualizationFieldRef(source="arrays", key="proj", component=0)}, - ), - CombinedMappingSection( - label="ground_truth", - mappings={"prob_0": ActivationVisualizationFieldRef(source="belief_states", component=0)}, - ), - ], - combine_as="data_type", - ) - assert mapping.combined is not None - assert len(mapping.combined) == 2 - assert mapping.combine_as == "data_type" - - def test_combined_without_combine_as_raises(self): - """Test that an ActivationVisualizationDataMapping without 'combine_as' raises a ConfigValidationError.""" - with pytest.raises(ConfigValidationError, match="'combine_as' is required"): - ActivationVisualizationDataMapping( - combined=[ - CombinedMappingSection( - label="prediction", - mappings={"prob_0": ActivationVisualizationFieldRef(source="arrays", key="proj", component=0)}, - ), - ], - ) - - def test_combined_with_mappings_raises(self): - """Test that a DataMapping with both 'mappings' and 'combined' raises ConfigValidationError.""" - with pytest.raises(ConfigValidationError, match="Cannot use both"): - ActivationVisualizationDataMapping( - mappings={"prob_0": ActivationVisualizationFieldRef(source="arrays", key="proj", component=0)}, - combined=[ - CombinedMappingSection( - label="prediction", - mappings={"prob_1": ActivationVisualizationFieldRef(source="arrays", key="proj", component=1)}, - ), - ], - combine_as="data_type", - ) - - -class TestBeliefStateFactorPatterns: - """Test belief state factor pattern expansion for 3D belief states.""" - - def test_factor_field_only_for_belief_states(self): - """Test that factor field is only supported for belief_states source.""" - with pytest.raises(ConfigValidationError, match="only supported for belief_states"): - ActivationVisualizationFieldRef( - source="arrays", - key="proj", - factor=0, - ) - - def test_factor_pattern_requires_group_as(self): - """Test that factor patterns require the group_as parameter.""" - with pytest.raises(ConfigValidationError, match="requires `group_as`"): - ActivationVisualizationFieldRef( - source="belief_states", - factor="*", - component=0, - ) - - def test_valid_factor_with_group_as(self): - """Test that a valid factor pattern with group_as is accepted.""" - ref = ActivationVisualizationFieldRef( - source="belief_states", - factor="*", - component=0, - group_as="factor", - ) - assert ref.factor == "*" - assert ref.group_as == "factor" - - def test_valid_single_factor(self): - """Test that a valid single factor is accepted.""" - ref = ActivationVisualizationFieldRef( - source="belief_states", - factor=0, - component=0, - ) - assert ref.factor == 0 - - def test_expand_belief_factor_mapping_wildcard(self): - """Test expanding belief factor mapping with wildcard pattern.""" - ref = ActivationVisualizationFieldRef( - source="belief_states", - factor="*", - component=0, - group_as="factor", - ) - # 3D beliefs: (samples, factors, states) - beliefs = np.random.randn(10, 3, 4) - - expanded = _expand_belief_factor_mapping("factor_*_prob", ref, beliefs) - - assert len(expanded) == 3 - assert "factor_0_prob" in expanded - assert "factor_1_prob" in expanded - assert "factor_2_prob" in expanded - assert expanded["factor_0_prob"].factor == 0 - assert expanded["factor_1_prob"].factor == 1 - assert expanded["factor_2_prob"].factor == 2 - assert expanded["factor_0_prob"]._group_value == "0" # pylint: disable=protected-access - assert expanded["factor_1_prob"]._group_value == "1" # pylint: disable=protected-access - - def test_expand_belief_factor_mapping_range(self): - """Test expanding belief factor mapping with range pattern.""" - ref = ActivationVisualizationFieldRef( - source="belief_states", - factor="0...2", - component=0, - group_as="factor", - ) - beliefs = np.random.randn(10, 5, 4) - - expanded = _expand_belief_factor_mapping("factor_0...2_prob", ref, beliefs) - - assert len(expanded) == 2 - assert "factor_0_prob" in expanded - assert "factor_1_prob" in expanded - assert "factor_2_prob" not in expanded - - def test_expand_belief_factor_and_component_patterns(self): - """Test expanding belief factor mapping with both factor and component patterns.""" - ref = ActivationVisualizationFieldRef( - source="belief_states", - factor="*", - component="*", - group_as="factor", - ) - beliefs = np.random.randn(10, 2, 3) - - expanded = _expand_belief_factor_mapping("factor_*_state_*", ref, beliefs) - - # Cross-product: 2 factors * 3 states = 6 - assert len(expanded) == 6 - assert "factor_0_state_0" in expanded - assert "factor_0_state_1" in expanded - assert "factor_0_state_2" in expanded - assert "factor_1_state_0" in expanded - assert "factor_1_state_1" in expanded - assert "factor_1_state_2" in expanded - assert expanded["factor_0_state_0"].factor == 0 - assert expanded["factor_0_state_0"].component == 0 - assert expanded["factor_1_state_2"].factor == 1 - assert expanded["factor_1_state_2"].component == 2 - - def test_expand_belief_factor_mapping_2d_raises(self): - """Test that expanding belief factor mapping with 2D beliefs raises an error.""" - ref = ActivationVisualizationFieldRef( - source="belief_states", - factor="*", - component=0, - group_as="factor", - ) - beliefs = np.random.randn(10, 4) # 2D, not 3D - - with pytest.raises(ConfigValidationError, match="require 3D beliefs"): - _expand_belief_factor_mapping("factor_*_prob", ref, beliefs) - - def test_expand_belief_factor_range_exceeds_raises(self): - """Test that expanding belief factor mapping with out-of-bounds range raises an error.""" - ref = ActivationVisualizationFieldRef( - source="belief_states", - factor="0...10", - component=0, - group_as="factor", - ) - beliefs = np.random.randn(10, 3, 4) - - with pytest.raises(ConfigValidationError, match="exceeds available factors"): - _expand_belief_factor_mapping("factor_0...10_prob", ref, beliefs) - - -class TestResolveBeliefStates: - """Test belief state resolution with factor dimension.""" - - def test_resolve_2d_belief_states(self): - """Test resolving 2D belief states without factor dimension.""" - ref = ActivationVisualizationFieldRef(source="belief_states", component=1) - beliefs = np.array([[0.1, 0.2, 0.7], [0.3, 0.4, 0.3]]) - - result = _resolve_belief_states(beliefs, ref) - - np.testing.assert_array_almost_equal(result, [0.2, 0.4]) - - def test_resolve_3d_belief_states_with_factor(self): - """Test resolving 3D belief states with specified factor.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor=1, component=2) - # Shape: (samples=2, factors=3, states=4) - beliefs = np.random.randn(2, 3, 4) - - result = _resolve_belief_states(beliefs, ref) - - # Should select factor 1, component 2 - np.testing.assert_array_almost_equal(result, beliefs[:, 1, 2]) - - def test_resolve_3d_without_factor_raises(self): - """Test resolving 3D belief states without specifying factor raises an error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", component=0) - beliefs = np.random.randn(10, 3, 4) - - with pytest.raises(ConfigValidationError, match="no `factor` was specified"): - _resolve_belief_states(beliefs, ref) - - def test_resolve_2d_with_factor_raises(self): - """Test resolving 2D belief states with factor specified raises an error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor=0, component=0) - beliefs = np.random.randn(10, 4) - - with pytest.raises(ConfigValidationError, match="2D but `factor=0` was specified"): - _resolve_belief_states(beliefs, ref) - - def test_resolve_3d_factor_out_of_bounds_raises(self): - """Test resolving 3D belief states with out-of-bounds factor raises an error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor=5, component=0) - beliefs = np.random.randn(10, 3, 4) - - with pytest.raises(ConfigValidationError, match="out of bounds"): - _resolve_belief_states(beliefs, ref) - - def test_resolve_3d_with_reducer_argmax(self): - """Test resolving 3D belief states with argmax reducer.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor=0, reducer="argmax") - beliefs = np.array([[[0.1, 0.2, 0.7], [0.3, 0.4, 0.3]], [[0.8, 0.1, 0.1], [0.2, 0.6, 0.2]]]) - - result = _resolve_belief_states(beliefs, ref) - - # Factor 0: [[0.1, 0.2, 0.7], [0.8, 0.1, 0.1]] -> argmax = [2, 0] - np.testing.assert_array_equal(result, [2, 0]) - - def test_resolve_3d_with_reducer_l2_norm(self): - """Test resolving 3D belief states with l2_norm reducer.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor=0, reducer="l2_norm") - beliefs = np.array([[[3.0, 4.0, 0.0]], [[1.0, 0.0, 0.0]]]) - - result = _resolve_belief_states(beliefs, ref) - - np.testing.assert_array_almost_equal(result, [5.0, 1.0]) diff --git a/tests/activations/test_scalar_wildcard_expansion.py b/tests/activations/test_scalar_wildcard_expansion.py deleted file mode 100644 index 8a68450d..00000000 --- a/tests/activations/test_scalar_wildcard_expansion.py +++ /dev/null @@ -1,182 +0,0 @@ -"""Tests for scalar wildcard expansion in activation visualizations.""" - -import pytest - -from simplexity.activations.visualization.pattern_expansion import _expand_scalar_keys -from simplexity.exceptions import ConfigValidationError - - -class TestScalarWildcardExpansion: - """Tests for _expand_scalar_keys function.""" - - def test_scalar_no_pattern_returns_identity(self): - """Scalars without patterns should return as-is.""" - scalars = {"layer_0_rmse": 0.5} - result = _expand_scalar_keys("rmse", "layer_0_rmse", scalars) - - assert result == {"rmse": "layer_0_rmse"} - - def test_scalar_wildcard_expansion(self): - """Wildcard in scalar key should expand to all matching keys.""" - scalars = { - "cumvar_0": 0.8, - "cumvar_1": 0.9, - "cumvar_2": 0.95, - "cumvar_3": 0.99, - "other_metric": 1.0, - } - result = _expand_scalar_keys("cumvar_*", "cumvar_*", scalars) - - assert len(result) == 4 - assert result == { - "cumvar_0": "cumvar_0", - "cumvar_1": "cumvar_1", - "cumvar_2": "cumvar_2", - "cumvar_3": "cumvar_3", - } - - def test_scalar_wildcard_with_prefix_suffix(self): - """Wildcard pattern with prefix and suffix should match correctly.""" - scalars = { - "layer_0_cumvar_0": 0.8, - "layer_0_cumvar_1": 0.9, - "layer_0_cumvar_2": 0.95, - "layer_1_cumvar_0": 0.7, - "other": 1.0, - } - result = _expand_scalar_keys("cv_*", "layer_0_cumvar_*", scalars) - - assert len(result) == 3 - assert result == { - "cv_0": "layer_0_cumvar_0", - "cv_1": "layer_0_cumvar_1", - "cv_2": "layer_0_cumvar_2", - } - - def test_scalar_range_expansion(self): - """Range pattern should expand to specified indices.""" - scalars = { - "cumvar_0": 0.8, - "cumvar_1": 0.9, - "cumvar_2": 0.95, - "cumvar_3": 0.99, - "cumvar_4": 0.995, - } - result = _expand_scalar_keys("cumvar_1...4", "cumvar_1...4", scalars) - - assert len(result) == 3 - assert result == { - "cumvar_1": "cumvar_1", - "cumvar_2": "cumvar_2", - "cumvar_3": "cumvar_3", - } - - def test_scalar_wildcard_no_matches_raises_error(self): - """Wildcard with no matches should raise an error.""" - scalars = {"other_metric": 1.0} - - with pytest.raises(ConfigValidationError, match="No keys found matching pattern"): - _expand_scalar_keys("cumvar_*", "cumvar_*", scalars) - - def test_scalar_wildcard_requires_key_pattern(self): - """Wildcard expansion without a key should raise an error.""" - scalars = {"metric": 1.0} - - with pytest.raises(ConfigValidationError, match="Scalar wildcard expansion requires a key pattern"): - _expand_scalar_keys("field_*", None, scalars) - - def test_scalar_expansion_sorts_indices(self): - """Expanded scalar keys should be sorted by index.""" - scalars = { - "var_5": 0.5, - "var_1": 0.1, - "var_3": 0.3, - "var_2": 0.2, - } - result = _expand_scalar_keys("v_*", "var_*", scalars) - - # Check that keys are in sorted order - keys = list(result.keys()) - assert keys == ["v_1", "v_2", "v_3", "v_5"] - - def test_scalar_wildcard_field_name_pattern_mismatch(self): - """Field pattern but no key pattern should be handled in parent function.""" - # This test verifies that _expand_scalar_keys expects both patterns together - # The validation happens in _expand_field_mapping, not here - scalars = {"metric": 1.0} - - # _expand_scalar_keys just returns identity if no pattern in key - result = _expand_scalar_keys("field_*", "metric", scalars) - assert result == {"field_*": "metric"} - - def test_scalar_range_invalid_format_returns_identity(self): - """Invalid range format (two dots instead of three) should return as-is.""" - scalars = {"metric_1..4": 1.0} - - # Two dots instead of three - not a valid range pattern, returns identity - result = _expand_scalar_keys("field_1..4", "metric_1..4", scalars) - assert result == {"field_1..4": "metric_1..4"} - - def test_scalar_wildcard_with_non_numeric_ignored(self): - """Keys with non-numeric wildcards should be ignored.""" - scalars = { - "metric_0": 0.0, - "metric_1": 0.1, - "metric_abc": 0.2, - "metric_xyz": 0.3, - } - result = _expand_scalar_keys("m_*", "metric_*", scalars) - - # Only numeric indices should be included - assert len(result) == 2 - assert result == { - "m_0": "metric_0", - "m_1": "metric_1", - } - - def test_scalar_expansion_deduplicates_indices(self): - """Duplicate indices should be deduplicated.""" - # In practice this wouldn't happen with scalar keys, but test for robustness - scalars = { - "var_1": 0.1, - "var_01": 0.1, # This would match as index 1 if not carefully handled - } - # This test verifies basic behavior - exact matching prevents this issue - result = _expand_scalar_keys("v_*", "var_*", scalars) - - # Should only match exact numeric patterns - assert "v_1" in result - - def test_scalar_range_expansion_with_field_pattern(self): - """Range in both field and key should expand correctly.""" - scalars = { - "metric_0": 0.0, - "metric_1": 0.1, - "metric_2": 0.2, - "metric_3": 0.3, - } - result = _expand_scalar_keys("m_0...3", "metric_0...3", scalars) - - assert len(result) == 3 - assert result == { - "m_0": "metric_0", - "m_1": "metric_1", - "m_2": "metric_2", - } - - def test_scalar_wildcard_complex_key_pattern(self): - """Complex patterns with multiple underscores should work.""" - scalars = { - "layer_0_pca_cumvar_0": 0.8, - "layer_0_pca_cumvar_1": 0.9, - "layer_0_pca_cumvar_2": 0.95, - "layer_1_pca_cumvar_0": 0.7, - } - result = _expand_scalar_keys("pc_cv_*", "layer_0_pca_cumvar_*", scalars) - - assert len(result) == 3 - assert result == { - "pc_cv_0": "layer_0_pca_cumvar_0", - "pc_cv_1": "layer_0_pca_cumvar_1", - "pc_cv_2": "layer_0_pca_cumvar_2", - } diff --git a/tests/activations/test_visualization_modules.py b/tests/activations/test_visualization_modules.py deleted file mode 100644 index 3ff0a074..00000000 --- a/tests/activations/test_visualization_modules.py +++ /dev/null @@ -1,969 +0,0 @@ -"""Tests for visualization submodules to improve coverage.""" - -from typing import Any, cast - -import numpy as np -import pandas as pd -import pytest - -from simplexity.activations.visualization.data_structures import PreparedMetadata -from simplexity.activations.visualization.dataframe_builders import ( - _apply_sampling, - _build_dataframe, - _build_dataframe_for_mappings, - _build_metadata_columns, - _build_scalar_dataframe, - _build_scalar_series_dataframe, - _extract_base_column_name, - _infer_scalar_series_indices, - _scalar_series_metadata, -) -from simplexity.activations.visualization.field_resolution import ( - _lookup_array, - _lookup_scalar_value, - _maybe_component, - _resolve_belief_states, - _resolve_field, -) -from simplexity.activations.visualization.pattern_expansion import ( - _expand_array_key_pattern, - _expand_belief_factor_mapping, - _expand_field_mapping, - _expand_pattern_to_indices, - _expand_scalar_pattern_ranges, - _get_component_count, - _parse_component_spec, -) -from simplexity.activations.visualization.preprocessing import ( - _apply_preprocessing, - _combine_rgb, - _expand_preprocessing_fields, - _project_to_simplex, -) -from simplexity.activations.visualization_configs import ( - ActivationVisualizationConfig, - ActivationVisualizationDataMapping, - ActivationVisualizationFieldRef, - ActivationVisualizationPreprocessStep, - CombinedMappingSection, - SamplingConfig, - ScalarSeriesMapping, -) -from simplexity.exceptions import ConfigValidationError - - -# pylint: disable=too-many-public-methods -class TestFieldResolution: - """Tests for field_resolution.py functions.""" - - def test_lookup_array_not_found(self): - """Test that missing array raises error.""" - arrays = {"other/layer_0": np.array([1, 2, 3])} - with pytest.raises(ConfigValidationError, match="not available for layer"): - _lookup_array(arrays, "layer_0", "missing", False) - - def test_lookup_array_concat_layers_exact_match(self): - """Test exact key match with concat_layers.""" - arrays = {"my_key": np.array([1, 2, 3])} - result = _lookup_array(arrays, "layer_0", "my_key", True) - np.testing.assert_array_equal(result, [1, 2, 3]) - - def test_lookup_array_concat_layers_prefix_match(self): - """Test prefix match with concat_layers.""" - arrays = {"my_key/Lcat": np.array([4, 5, 6])} - result = _lookup_array(arrays, "layer_0", "my_key", True) - np.testing.assert_array_equal(result, [4, 5, 6]) - - def test_lookup_scalar_value_concat_layers_exact(self): - """Test scalar lookup with concat_layers exact match.""" - scalars = {"my_scalar": 0.5} - result = _lookup_scalar_value(scalars, "layer_0", "my_scalar", True) - assert result == 0.5 - - def test_lookup_scalar_value_concat_layers_prefix(self): - """Test scalar lookup with concat_layers prefix match.""" - scalars = {"my_scalar/Lcat": 0.7} - result = _lookup_scalar_value(scalars, "layer_0", "my_scalar", True) - assert result == 0.7 - - def test_lookup_scalar_value_not_found(self): - """Test that missing scalar raises error.""" - with pytest.raises(ConfigValidationError, match="not available for layer"): - _lookup_scalar_value({"other/layer_0": 1.0}, "layer_0", "missing", False) - - def test_maybe_component_1d_with_component(self): - """Test that 1D array with component raises error.""" - with pytest.raises(ConfigValidationError, match="invalid for 1D"): - _maybe_component(np.array([1, 2, 3]), 0) - - def test_maybe_component_wrong_dim(self): - """Test that 3D array raises error.""" - with pytest.raises(ConfigValidationError, match="must be 1D or 2D"): - _maybe_component(np.ones((2, 3, 4)), None) - - def test_maybe_component_2d_no_component(self): - """Test that 2D array without component raises error.""" - with pytest.raises(ConfigValidationError, match="must specify `component`"): - _maybe_component(np.ones((3, 4)), None) - - def test_maybe_component_out_of_bounds(self): - """Test that out of bounds component raises error.""" - with pytest.raises(ConfigValidationError, match="out of bounds"): - _maybe_component(np.ones((3, 4)), 10) - - def test_resolve_belief_states_wrong_dim(self): - """Test that 1D belief states raise error.""" - ref = ActivationVisualizationFieldRef(source="belief_states") - with pytest.raises(ConfigValidationError, match="must be 2D or 3D"): - _resolve_belief_states(np.array([1, 2, 3]), ref) - - def test_resolve_belief_states_3d_no_factor(self): - """Test that 3D beliefs without factor raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor=None) - with pytest.raises(ConfigValidationError, match="no `factor` was specified"): - _resolve_belief_states(np.ones((5, 3, 4)), ref) - - def test_resolve_belief_states_2d_with_factor(self): - """Test that 2D beliefs with factor raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor=0) - with pytest.raises(ConfigValidationError, match="Factor selection requires 3D"): - _resolve_belief_states(np.ones((5, 4)), ref) - - def test_resolve_belief_states_factor_out_of_bounds(self): - """Test that out of bounds factor raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor=10) - with pytest.raises(ConfigValidationError, match="out of bounds"): - _resolve_belief_states(np.ones((5, 3, 4)), ref) - - def test_resolve_belief_states_component_out_of_bounds(self): - """Test that out of bounds component raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", component=10) - with pytest.raises(ConfigValidationError, match="out of bounds"): - _resolve_belief_states(np.ones((5, 4)), ref) - - def test_resolve_field_metadata_existing_key(self): - """Test metadata source with existing key.""" - ref = ActivationVisualizationFieldRef(source="metadata", key="sample_index") - metadata = {"sample_index": np.array([0, 1, 2])} - result = _resolve_field(ref, "layer_0", {}, {}, None, False, 3, metadata) - np.testing.assert_array_equal(result, [0, 1, 2]) - - def test_resolve_field_metadata_layer(self): - """Test metadata source with layer key.""" - ref = ActivationVisualizationFieldRef(source="metadata", key="layer") - result = _resolve_field(ref, "layer_0", {}, {}, None, False, 3, {}) - assert list(result) == ["layer_0", "layer_0", "layer_0"] - - def test_resolve_field_metadata_missing_key(self): - """Test metadata source with missing key.""" - ref = ActivationVisualizationFieldRef(source="metadata", key="missing") - with pytest.raises(ConfigValidationError, match="not available"): - _resolve_field(ref, "layer_0", {}, {}, None, False, 3, {}) - - def test_resolve_field_weights_missing(self): - """Test weights source when not available.""" - ref = ActivationVisualizationFieldRef(source="weights") - with pytest.raises(ConfigValidationError, match="unavailable"): - _resolve_field(ref, "layer_0", {}, {}, None, False, 3, {}) - - def test_resolve_field_belief_states_missing(self): - """Test belief_states source when not available.""" - ref = ActivationVisualizationFieldRef(source="belief_states") - with pytest.raises(ConfigValidationError, match="were not retained"): - _resolve_field(ref, "layer_0", {}, {}, None, False, 3, {}) - - def test_resolve_field_scalars_success(self): - """Test scalars source returns repeated value.""" - ref = ActivationVisualizationFieldRef(source="scalars", key="my_scalar") - scalars = {"my_scalar/layer_0": 0.42} - result = _resolve_field(ref, "layer_0", {}, scalars, None, False, 3, {}) - np.testing.assert_array_equal(result, [0.42, 0.42, 0.42]) - - def test_resolve_field_unsupported_source(self): - """Test unsupported source raises error.""" - ref = ActivationVisualizationFieldRef(source=cast(Any, "unknown")) - with pytest.raises(ConfigValidationError, match="Unsupported field source"): - _resolve_field(ref, "layer_0", {}, {}, None, False, 3, {}) - - -# pylint: disable=too-many-public-methods -class TestPatternExpansion: - """Tests for pattern_expansion.py functions.""" - - def test_parse_component_spec_invalid_range_parts(self): - """Test that malformed range raises error.""" - with pytest.raises(ConfigValidationError, match="Invalid range"): - _parse_component_spec("1...2...3") - - def test_parse_component_spec_range_not_ascending(self): - """Test that descending range raises error.""" - with pytest.raises(ConfigValidationError, match="start must be < end"): - _parse_component_spec("5...3") - - def test_parse_component_spec_non_numeric_range(self): - """Test that non-numeric range raises error.""" - with pytest.raises(ConfigValidationError, match="Invalid range"): - _parse_component_spec("a...b") - - def test_parse_component_spec_unrecognized(self): - """Test that unrecognized pattern raises error.""" - with pytest.raises(ConfigValidationError, match="Unrecognized component pattern"): - _parse_component_spec("invalid") - - def test_expand_pattern_to_indices_no_pattern(self): - """Test that pattern without wildcards raises error.""" - with pytest.raises(ConfigValidationError, match="has no wildcard or range"): - _expand_pattern_to_indices("plain_key", ["key_0", "key_1"]) - - def test_expand_pattern_to_indices_no_matches(self): - """Test that no matches raises error.""" - with pytest.raises(ConfigValidationError, match="No keys found"): - _expand_pattern_to_indices("missing_*", ["key_0", "key_1"]) - - def test_expand_pattern_to_indices_non_numeric_ignored(self): - """Test that non-numeric matches are ignored.""" - keys = ["item_0", "item_1", "item_abc"] - result = _expand_pattern_to_indices("item_*", keys) - assert result == [0, 1] - - def test_get_component_count_projection_success(self): - """Test getting component count from 2D projection.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="proj", component="*") - arrays = {"proj/layer_0": np.ones((10, 5))} - result = _get_component_count(ref, "layer_0", arrays, None, False) - assert result == 5 - - def test_get_component_count_1d_projection(self): - """Test that 1D projection raises error for expansion.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="proj") - arrays = {"proj/layer_0": np.array([1, 2, 3])} - with pytest.raises(ConfigValidationError, match="Cannot expand 1D"): - _get_component_count(ref, "layer_0", arrays, None, False) - - def test_get_component_count_belief_states_missing(self): - """Test that missing belief states raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states") - with pytest.raises(ConfigValidationError, match="not available"): - _get_component_count(ref, "layer_0", {}, None, False) - - def test_get_component_count_belief_states_wrong_dim(self): - """Test that non-2D belief states raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states") - with pytest.raises(ConfigValidationError, match="must be 2D"): - _get_component_count(ref, "layer_0", {}, np.ones((2, 3, 4)), False) - - def test_get_component_count_unsupported_source(self): - """Test that unsupported source raises error.""" - ref = ActivationVisualizationFieldRef(source="metadata", key="test") - with pytest.raises(ConfigValidationError, match="not supported"): - _get_component_count(ref, "layer_0", {}, None, False) - - def test_expand_array_key_pattern_invalid(self): - """Test that invalid key pattern raises error.""" - with pytest.raises(ConfigValidationError, match="Invalid key pattern"): - _expand_array_key_pattern("plain_key", "layer_0", {}, False) - - def test_expand_array_key_pattern_invalid_range(self): - """Test that invalid range in key pattern raises error.""" - with pytest.raises(ConfigValidationError, match="Invalid range"): - _expand_array_key_pattern("key_5...3", "layer_0", {}, False) - - def test_expand_array_key_pattern_no_matches(self): - """Test that no matching arrays raises error.""" - arrays = {"other/layer_0": np.ones((3, 4))} - with pytest.raises(ConfigValidationError, match="No array keys found"): - _expand_array_key_pattern("key_*", "layer_0", arrays, False) - - def test_expand_belief_factor_mapping_wrong_dim(self): - """Test that non-3D beliefs for factor expansion raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor=0, component=0) - # Manually set factor to pattern string to bypass validation - object.__setattr__(ref, "factor", "*") - with pytest.raises(ConfigValidationError, match="require 3D beliefs"): - _expand_belief_factor_mapping("field_*", ref, np.ones((5, 4))) - - def test_expand_belief_factor_mapping_invalid_factor(self): - """Test that invalid factor pattern raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor=0, component=0) - # Manually set factor to invalid string to bypass validation - object.__setattr__(ref, "factor", "invalid") - with pytest.raises(ConfigValidationError, match="Invalid factor pattern"): - _expand_belief_factor_mapping("field_*", ref, np.ones((5, 3, 4))) - - def test_expand_belief_factor_mapping_factor_out_of_bounds(self): - """Test that out of bounds factor range raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor="0...10", group_as="factor") - with pytest.raises(ConfigValidationError, match="exceeds available factors"): - _expand_belief_factor_mapping("field_*", ref, np.ones((5, 3, 4))) - - def test_expand_belief_factor_mapping_component_out_of_bounds(self): - """Test that out of bounds component range raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor="*", component="0...10", group_as="factor") - with pytest.raises(ConfigValidationError, match="exceeds states"): - _expand_belief_factor_mapping("f_*_c_*", ref, np.ones((5, 2, 4))) - - def test_expand_scalar_pattern_ranges_invalid(self): - """Test that invalid range in scalar pattern raises error.""" - with pytest.raises(ConfigValidationError, match="Invalid range pattern"): - _expand_scalar_pattern_ranges("metric_5...3") - - def test_expand_field_mapping_projection_no_field_pattern(self): - """Test projection key pattern without field pattern raises error.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="factor_*", group_as="factor") - with pytest.raises(ConfigValidationError, match="requires field name pattern"): - _expand_field_mapping("plain_field", ref, "layer_0", {}, {}, None, False) - - def test_expand_field_mapping_projection_too_many_patterns(self): - """Test projection with too many field patterns raises error.""" - ref = ActivationVisualizationFieldRef(source="arrays", key="factor_*", group_as="factor") - with pytest.raises(ConfigValidationError, match="too many patterns"): - _expand_field_mapping("f_*_g_*_h_*", ref, "layer_0", {}, {}, None, False) - - def test_expand_field_mapping_belief_no_field_pattern(self): - """Test belief factor pattern without field pattern raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor="*", group_as="factor") - beliefs = np.ones((5, 3, 4)) - with pytest.raises(ConfigValidationError, match="requires field name pattern"): - _expand_field_mapping("plain_field", ref, "layer_0", {}, {}, beliefs, False) - - def test_expand_field_mapping_belief_too_many_patterns(self): - """Test belief with too many field patterns raises error.""" - ref = ActivationVisualizationFieldRef(source="belief_states", factor="*", group_as="factor") - beliefs = np.ones((5, 3, 4)) - with pytest.raises(ConfigValidationError, match="too many patterns"): - _expand_field_mapping("f_*_g_*_h_*", ref, "layer_0", {}, {}, beliefs, False) - - def test_expand_field_mapping_scalar_field_pattern_no_key_pattern(self): - """Test scalar with field pattern but no key pattern raises error.""" - ref = ActivationVisualizationFieldRef(source="scalars", key="plain_key") - with pytest.raises(ConfigValidationError, match="has pattern but scalar key has no pattern"): - _expand_field_mapping("field_*", ref, "layer_0", {}, {"plain_key": 1.0}, None, False) - - def test_expand_field_mapping_scalar_key_pattern_no_field_pattern(self): - """Test scalar with key pattern but no field pattern raises error.""" - ref = ActivationVisualizationFieldRef(source="scalars", key="metric_*") - with pytest.raises(ConfigValidationError, match="requires field name pattern"): - _expand_field_mapping("plain_field", ref, "layer_0", {}, {"metric_0": 1.0}, None, False) - - -class TestPreprocessing: - """Tests for preprocessing.py functions.""" - - def test_expand_preprocessing_fields_no_matches(self): - """Test that wildcard with no matches raises error.""" - with pytest.raises(ConfigValidationError, match="did not match any columns"): - _expand_preprocessing_fields(["missing_*"], ["col_a", "col_b"]) - - def test_expand_preprocessing_fields_range_missing_column(self): - """Test that range expanding to missing column raises error.""" - with pytest.raises(ConfigValidationError, match="column not found"): - _expand_preprocessing_fields(["col_0...3"], ["col_0", "col_1"]) - - def test_apply_preprocessing_output_pattern_error(self): - """Test that output field with pattern raises error.""" - df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}) - step = ActivationVisualizationPreprocessStep( - type="project_to_simplex", input_fields=["a", "b", "c"], output_fields=["out_*", "out_y"] - ) - with pytest.raises(ConfigValidationError, match="cannot contain patterns"): - _apply_preprocessing(df, [step]) - - def test_apply_preprocessing_output_range_pattern_error(self): - """Test that output field with range pattern raises error.""" - df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}) - step = ActivationVisualizationPreprocessStep( - type="project_to_simplex", input_fields=["a", "b", "c"], output_fields=["out_0...3", "out_y"] - ) - with pytest.raises(ConfigValidationError, match="cannot contain patterns"): - _apply_preprocessing(df, [step]) - - def test_project_to_simplex_missing_column(self): - """Test that missing column raises error.""" - df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]}) - step = ActivationVisualizationPreprocessStep( - type="project_to_simplex", input_fields=["a", "b", "missing"], output_fields=["x", "y"] - ) - with pytest.raises(ConfigValidationError, match="missing from the dataframe"): - _project_to_simplex(df, step) - - def test_project_to_simplex_success(self): - """Test successful simplex projection.""" - df = pd.DataFrame({"p0": [0.5, 0.3], "p1": [0.3, 0.4], "p2": [0.2, 0.3]}) - step = ActivationVisualizationPreprocessStep( - type="project_to_simplex", input_fields=["p0", "p1", "p2"], output_fields=["x", "y"] - ) - result = _project_to_simplex(df, step) - assert "x" in result.columns - assert "y" in result.columns - # x = p1 + 0.5 * p2 - np.testing.assert_allclose(result["x"], [0.3 + 0.1, 0.4 + 0.15]) - # y = sqrt(3)/2 * p2 - np.testing.assert_allclose(result["y"], [0.2 * np.sqrt(3) / 2, 0.3 * np.sqrt(3) / 2]) - - def test_combine_rgb_wrong_output_count(self): - """Test that combine_rgb with wrong output count raises error.""" - df = pd.DataFrame({"r": [0.5], "g": [0.5], "b": [0.5]}) - # Create step manually to bypass validation - step = ActivationVisualizationPreprocessStep.__new__(ActivationVisualizationPreprocessStep) - object.__setattr__(step, "type", "combine_rgb") - object.__setattr__(step, "input_fields", ["r", "g", "b"]) - object.__setattr__(step, "output_fields", ["color1", "color2"]) - with pytest.raises(ConfigValidationError, match="exactly one output_field"): - _combine_rgb(df, step) - - def test_combine_rgb_too_few_inputs(self): - """Test that combine_rgb with <3 inputs raises error.""" - df = pd.DataFrame({"r": [0.5], "g": [0.5]}) - # Create step manually to bypass validation - step = ActivationVisualizationPreprocessStep.__new__(ActivationVisualizationPreprocessStep) - object.__setattr__(step, "type", "combine_rgb") - object.__setattr__(step, "input_fields", ["r", "g"]) - object.__setattr__(step, "output_fields", ["color"]) - with pytest.raises(ConfigValidationError, match="at least three"): - _combine_rgb(df, step) - - def test_combine_rgb_missing_column(self): - """Test that missing column raises error.""" - df = pd.DataFrame({"r": [0.5], "g": [0.5]}) - step = ActivationVisualizationPreprocessStep( - type="combine_rgb", input_fields=["r", "g", "missing"], output_fields=["color"] - ) - with pytest.raises(ConfigValidationError, match="missing from the dataframe"): - _combine_rgb(df, step) - - def test_combine_rgb_3_inputs(self): - """Test combine_rgb with exactly 3 inputs. - - Note: combine_rgb performs per-column min-max normalization, so to get - expected colors we need data where each column spans [0, 1]. - """ - df = pd.DataFrame({"r": [0.0, 1.0, 0.5], "g": [0.0, 1.0, 0.5], "b": [0.0, 1.0, 0.5]}) - step = ActivationVisualizationPreprocessStep( - type="combine_rgb", input_fields=["r", "g", "b"], output_fields=["color"] - ) - result = _combine_rgb(df, step) - assert result["color"].iloc[0] == "#000000" # black - assert result["color"].iloc[1] == "#ffffff" # white - assert result["color"].iloc[2] == "#808080" # gray - - def test_combine_rgb_more_than_3_inputs_pca(self): - """Test combine_rgb with >3 inputs triggers PCA path.""" - # Create data with 4 features - np.random.seed(42) - df = pd.DataFrame( - {"f0": np.random.rand(10), "f1": np.random.rand(10), "f2": np.random.rand(10), "f3": np.random.rand(10)} - ) - step = ActivationVisualizationPreprocessStep( - type="combine_rgb", input_fields=["f0", "f1", "f2", "f3"], output_fields=["color"] - ) - result = _combine_rgb(df, step) - assert "color" in result.columns - # All colors should be valid hex colors - for color in result["color"]: - assert color.startswith("#") - assert len(color) == 7 - - def test_combine_rgb_pca_few_samples(self): - """Test combine_rgb PCA path with fewer samples than components.""" - # Create 2 samples with 4 features - PCA will have <3 components - df = pd.DataFrame({"f0": [0.1, 0.9], "f1": [0.2, 0.8], "f2": [0.3, 0.7], "f3": [0.4, 0.6]}) - step = ActivationVisualizationPreprocessStep( - type="combine_rgb", input_fields=["f0", "f1", "f2", "f3"], output_fields=["color"] - ) - result = _combine_rgb(df, step) - assert "color" in result.columns - assert len(result) == 2 - - def test_apply_preprocessing_project_to_simplex(self): - """Test full preprocessing pipeline with project_to_simplex.""" - df = pd.DataFrame({"p0": [0.5, 0.3], "p1": [0.3, 0.4], "p2": [0.2, 0.3]}) - steps = [ - ActivationVisualizationPreprocessStep( - type="project_to_simplex", input_fields=["p0", "p1", "p2"], output_fields=["x", "y"] - ) - ] - result = _apply_preprocessing(df, steps) - assert "x" in result.columns - assert "y" in result.columns - - def test_apply_preprocessing_combine_rgb(self): - """Test full preprocessing pipeline with combine_rgb.""" - df = pd.DataFrame({"r": [0.5], "g": [0.5], "b": [0.5]}) - steps = [ - ActivationVisualizationPreprocessStep( - type="combine_rgb", input_fields=["r", "g", "b"], output_fields=["color"] - ) - ] - result = _apply_preprocessing(df, steps) - assert "color" in result.columns - - def test_apply_preprocessing_with_pattern_expansion(self): - """Test preprocessing with pattern expansion in input fields.""" - df = pd.DataFrame({"val_0": [0.2], "val_1": [0.3], "val_2": [0.5]}) - steps = [ - ActivationVisualizationPreprocessStep( - type="project_to_simplex", input_fields=["val_*"], output_fields=["x", "y"] - ) - ] - result = _apply_preprocessing(df, steps) - assert "x" in result.columns - assert "y" in result.columns - - -# pylint: disable=too-many-public-methods -class TestDataframeBuilders: - """Tests for dataframe_builders.py functions.""" - - def test_extract_base_column_name_with_group_pattern(self): - """Test extracting base column name with group value pattern.""" - result = _extract_base_column_name("factor_0_projected", "0") - assert result == "projected" - - def test_extract_base_column_name_no_pattern(self): - """Test extracting base column name when no pattern.""" - result = _extract_base_column_name("my_column", "0") - assert result == "my_column" - - def test_extract_base_column_name_no_match(self): - """Test extracting base column name when pattern doesn't match.""" - result = _extract_base_column_name("other_column", "0") - assert result == "other_column" - - def test_scalar_series_metadata_with_arrays(self): - """Test extracting metadata from arrays.""" - metadata = {"step": np.array([10]), "name": np.array(["test"])} - result = _scalar_series_metadata(metadata) - assert result["step"] == 10 - assert result["name"] == "test" - - def test_scalar_series_metadata_with_empty_array(self): - """Test that empty arrays are skipped.""" - metadata = {"step": np.array([10]), "empty": np.array([])} - result = _scalar_series_metadata(metadata) - assert result["step"] == 10 - assert "empty" not in result - - def test_scalar_series_metadata_with_scalar(self): - """Test extracting metadata from scalar values.""" - metadata = {"step": 10, "name": "test"} - result = _scalar_series_metadata(metadata) - assert result["step"] == 10 - assert result["name"] == "test" - - def test_infer_scalar_series_indices_success(self): - """Test inferring scalar series indices from available keys.""" - mapping = ScalarSeriesMapping( - key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar" - ) - scalars = { - "analysis/cumvar_0/layer_0": 0.5, - "analysis/cumvar_1/layer_0": 0.7, - "analysis/cumvar_2/layer_0": 0.9, - } - result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") - assert result == [0, 1, 2] - - def test_infer_scalar_series_indices_empty_body(self): - """Test that empty body between prefix and suffix is skipped.""" - mapping = ScalarSeriesMapping( - key_template="pc{index}_var/{layer}", index_field="component", value_field="variance" - ) - # Key that matches prefix and suffix but has empty body - scalars = { - "analysis/pc_var/layer_0": 0.5, # Empty between pc and _var - "analysis/pc0_var/layer_0": 0.3, - } - result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") - assert result == [0] # Only numeric index included - - def test_infer_scalar_series_indices_no_matches(self): - """Test that no matching indices raises error.""" - mapping = ScalarSeriesMapping( - key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar" - ) - scalars = {"analysis/other_metric": 1.0} - with pytest.raises(ConfigValidationError, match="could not infer indices"): - _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") - - def test_infer_scalar_series_indices_with_suffix(self): - """Test inferring indices when template has suffix after index.""" - mapping = ScalarSeriesMapping( - key_template="pc{index}_var/{layer}", index_field="component", value_field="variance" - ) - scalars = { - "analysis/pc0_var/layer_0": 0.5, - "analysis/pc1_var/layer_0": 0.3, - "analysis/pc2_var/layer_0": 0.2, - "analysis/other/layer_0": 1.0, # Should not match - } - result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") - assert result == [0, 1, 2] - - def test_infer_scalar_series_indices_non_numeric_skipped(self): - """Test that non-numeric values are skipped.""" - mapping = ScalarSeriesMapping(key_template="item_{index}/{layer}", index_field="idx", value_field="val") - scalars = { - "analysis/item_0/layer_0": 0.5, - "analysis/item_abc/layer_0": 0.7, # Non-numeric, should be skipped - "analysis/item_1/layer_0": 0.9, - } - result = _infer_scalar_series_indices(mapping, scalars, "layer_0", "analysis") - assert result == [0, 1] - - def test_build_scalar_series_dataframe_success(self): - """Test building scalar series dataframe.""" - mapping = ScalarSeriesMapping( - key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar" - ) - metadata = {"step": np.array([10]), "analysis": np.array(["pca"])} - scalars = { - "analysis/cumvar_0/layer_0": 0.5, - "analysis/cumvar_1/layer_0": 0.7, - "analysis/cumvar_0/layer_1": 0.6, - } - result = _build_scalar_series_dataframe(mapping, metadata, scalars, ["layer_0", "layer_1"], "analysis") - assert len(result) == 3 - assert "component" in result.columns - assert "cumvar" in result.columns - assert "layer" in result.columns - - def test_build_scalar_series_dataframe_no_matches(self): - """Test that no matching scalars raises error.""" - mapping = ScalarSeriesMapping( - key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar" - ) - metadata = {"step": np.array([10])} - scalars = {"analysis/other_metric": 1.0} - # Error comes from _infer_scalar_series_indices when no indices are found - with pytest.raises(ConfigValidationError, match="could not infer indices"): - _build_scalar_series_dataframe(mapping, metadata, scalars, ["layer_0"], "analysis") - - def test_build_scalar_series_dataframe_with_explicit_indices(self): - """Test building scalar series dataframe with explicit index_values.""" - mapping = ScalarSeriesMapping( - key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar", index_values=[0, 1] - ) - metadata = {"step": np.array([10])} - scalars = { - "analysis/cumvar_0/layer_0": 0.5, - "analysis/cumvar_1/layer_0": 0.7, - "analysis/cumvar_2/layer_0": 0.9, # Not in index_values, should be skipped - } - result = _build_scalar_series_dataframe(mapping, metadata, scalars, ["layer_0"], "analysis") - assert len(result) == 2 - assert list(result["component"]) == [0, 1] - - def test_build_scalar_dataframe_scalar_pattern(self): - """Test building scalar dataframe with scalar_pattern source.""" - mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="rmse/layer_*")} - scalars = { - "analysis/rmse/layer_0": 0.1, - "analysis/rmse/layer_1": 0.2, - } - result = _build_scalar_dataframe(mappings, scalars, {}, "analysis", 5) - assert len(result) == 2 - assert "step" in result.columns - assert "rmse" in result.columns - assert all(result["step"] == 5) - - def test_build_scalar_dataframe_scalar_history(self): - """Test building scalar dataframe with scalar_history source.""" - mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_history", key="metric")} - scalars = {} - scalar_history = {"analysis/metric": [(0, 0.5), (10, 0.3), (20, 0.1)]} - result = _build_scalar_dataframe(mappings, scalars, scalar_history, "analysis", 20) - assert len(result) == 3 - assert list(result["step"]) == [0, 10, 20] - - def test_build_scalar_dataframe_scalar_history_fallback(self): - """Test scalar_history falls back to current scalars when no history.""" - mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_history", key="metric")} - scalars = {"analysis/metric": 0.42} - scalar_history = {} - result = _build_scalar_dataframe(mappings, scalars, scalar_history, "analysis", 5) - assert len(result) == 1 - assert result["step"].iloc[0] == 5 - assert result["rmse"].iloc[0] == 0.42 - - def test_build_scalar_dataframe_no_matches(self): - """Test that no matching scalars raises error.""" - mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="missing_*")} - scalars = {"analysis/other": 1.0} - with pytest.raises(ConfigValidationError, match="No scalar pattern keys found"): - _build_scalar_dataframe(mappings, scalars, {}, "analysis", 5) - - def test_build_scalar_dataframe_non_scalar_source_skipped(self): - """Test that non-scalar sources are skipped.""" - mappings = { - "proj": ActivationVisualizationFieldRef(source="arrays", key="my_proj"), - "rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="rmse/layer_*"), - } - scalars = {"analysis/rmse/layer_0": 0.1} - result = _build_scalar_dataframe(mappings, scalars, {}, "analysis", 5) - # Only scalar_pattern should be in result - assert "rmse" in result.columns - assert len(result) == 1 - - def test_build_scalar_dataframe_simple_key(self): - """Test scalar_pattern with non-pattern key.""" - # Use field name "value" to avoid conflict with hardcoded "metric" column - mappings = {"value": ActivationVisualizationFieldRef(source="scalar_pattern", key="my_metric")} - scalars = {"analysis/my_metric": 0.42} - result = _build_scalar_dataframe(mappings, scalars, {}, "analysis", 10) - assert len(result) == 1 - assert result["value"].iloc[0] == 0.42 - assert result["metric"].iloc[0] == "analysis/my_metric" # Check the metric key column - - def test_build_scalar_dataframe_key_none(self): - """Test that scalar_pattern with key=None raises error.""" - ref = ActivationVisualizationFieldRef(source="scalar_pattern", key="placeholder") - # Bypass validation to set key to None - object.__setattr__(ref, "key", None) - mappings = {"value": ref} - with pytest.raises(ConfigValidationError, match="must specify a key"): - _build_scalar_dataframe(mappings, {"analysis/test": 1.0}, {}, "analysis", 5) - - def test_build_scalar_dataframe_no_matching_values(self): - """Test that no matching values raises error with pattern.""" - mappings = {"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="missing/layer_*")} - # Scalars exist but don't match the pattern - scalars = {"analysis/other/layer_0": 0.1, "analysis/something_else": 0.2} - with pytest.raises(ConfigValidationError, match="No scalar pattern keys found"): - _build_scalar_dataframe(mappings, scalars, {}, "analysis", 5) - - def test_build_metadata_columns(self): - """Test building metadata columns.""" - - sequences: list[tuple[int, ...]] = [(1, 2, 3), (4, 5)] - steps = np.array([3, 2]) - metadata = PreparedMetadata(sequences=sequences, steps=steps, select_last_token=False) - weights = np.array([1.0, 0.5]) - result = _build_metadata_columns("my_analysis", metadata, weights) - assert "analysis" in result - assert "step" in result - assert "sequence_length" in result - assert "sequence" in result - assert "sample_index" in result - assert "weight" in result - assert list(result["analysis"]) == ["my_analysis", "my_analysis"] - assert list(result["step"]) == [3, 2] - assert list(result["weight"]) == [1.0, 0.5] - - def test_build_dataframe_for_mappings_simple(self): - """Test _build_dataframe_for_mappings with simple array mapping.""" - mappings = {"x": ActivationVisualizationFieldRef(source="arrays", key="pca", component=0)} - metadata = {"step": np.array([1, 2]), "analysis": np.array(["test", "test"])} - arrays = {"pca/layer_0": np.array([[0.1, 0.2], [0.3, 0.4]])} - result = _build_dataframe_for_mappings(mappings, metadata, arrays, {}, None, False, ["layer_0"]) - assert "x" in result.columns - assert "layer" in result.columns - assert len(result) == 2 - - def test_build_dataframe_for_mappings_belief_only(self): - """Test _build_dataframe_for_mappings with belief_states only (no layer iteration).""" - mappings = {"belief": ActivationVisualizationFieldRef(source="belief_states", component=0)} - metadata = {"step": np.array([1, 2])} - beliefs = np.array([[0.8, 0.2], [0.6, 0.4]]) - result = _build_dataframe_for_mappings(mappings, metadata, {}, {}, beliefs, False, ["layer_0"]) - assert "belief" in result.columns - assert len(result) == 2 - # Belief-only mode uses "_no_layer_" placeholder - assert result["layer"].iloc[0] == "_no_layer_" - - def test_build_dataframe_for_mappings_with_groups(self): - """Test _build_dataframe_for_mappings with group expansion.""" - # Use belief_states with factor pattern to trigger group expansion - # field_name has one *, factor has one *, so component expansion happens - mappings = { - "prob_*": ActivationVisualizationFieldRef( - source="belief_states", factor="*", component=0, group_as="factor" - ) - } - metadata = {"step": np.array([1])} - # 3D beliefs: (samples, factors, states) - beliefs = np.array([[[0.8, 0.2], [0.6, 0.4]]]) # 1 sample, 2 factors, 2 states - result = _build_dataframe_for_mappings(mappings, metadata, {}, {}, beliefs, False, ["layer_0"]) - assert "factor" in result.columns - # Factor expansion creates separate prob_0 and prob_1 columns - assert "prob_0" in result.columns or "prob_1" in result.columns - # Should have 2 rows (one per factor group) - assert len(result) == 2 - - def test_build_dataframe_for_mappings_error_wrapping(self): - """Test that errors from _expand_field_mapping are wrapped with context.""" - # Create a mapping with a key pattern that will fail expansion due to no matching arrays - # The key "factor_*" is a pattern that needs expansion, which fails when no arrays match - mappings = {"x_*": ActivationVisualizationFieldRef(source="arrays", key="factor_*", group_as="factor")} - metadata = {"step": np.array([1])} - with pytest.raises(ConfigValidationError, match="Error expanding 'x_\\*' for layer"): - _build_dataframe_for_mappings(mappings, metadata, {}, {}, None, False, ["layer_0"]) - - def test_build_dataframe_with_scalar_pattern(self): - """Test _build_dataframe with scalar_pattern source.""" - data_mapping = ActivationVisualizationDataMapping( - mappings={"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="rmse/layer_*")} - ) - viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) - metadata = {"step": np.array([1]), "analysis": np.array(["test"])} - scalars = {"test/rmse/layer_0": 0.1, "test/rmse/layer_1": 0.2} - result = _build_dataframe(viz_cfg, metadata, {}, scalars, {}, 10, None, False, ["layer_0", "layer_1"]) - assert "rmse" in result.columns - assert len(result) == 2 - - def test_build_dataframe_with_scalar_series(self): - """Test _build_dataframe with scalar_series source.""" - scalar_series = ScalarSeriesMapping( - key_template="cumvar_{index}/{layer}", index_field="component", value_field="cumvar" - ) - data_mapping = ActivationVisualizationDataMapping(mappings={}, scalar_series=scalar_series) - viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) - metadata = {"step": np.array([1]), "analysis": np.array(["test"])} - scalars = {"test/cumvar_0/layer_0": 0.5, "test/cumvar_1/layer_0": 0.7} - result = _build_dataframe(viz_cfg, metadata, {}, scalars, {}, None, None, False, ["layer_0"]) - assert "component" in result.columns - assert "cumvar" in result.columns - - def test_build_dataframe_combined_mappings(self): - """Test _build_dataframe with combined mappings.""" - combined = [ - CombinedMappingSection( - label="projected", - mappings={"x": ActivationVisualizationFieldRef(source="arrays", key="pca", component=0)}, - ), - CombinedMappingSection( - label="raw", - mappings={"x": ActivationVisualizationFieldRef(source="arrays", key="raw", component=0)}, - ), - ] - data_mapping = ActivationVisualizationDataMapping(mappings={}, combined=combined, combine_as="source") - viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) - metadata = {"step": np.array([1])} - arrays = { - "pca/layer_0": np.array([[0.1, 0.2]]), - "raw/layer_0": np.array([[0.5, 0.6]]), - } - result = _build_dataframe(viz_cfg, metadata, arrays, {}, {}, None, None, False, ["layer_0"]) - assert "source" in result.columns - assert set(result["source"]) == {"projected", "raw"} - assert len(result) == 2 - - def test_build_dataframe_scalar_pattern_no_step(self): - """Test that scalar_pattern without step raises error.""" - data_mapping = ActivationVisualizationDataMapping( - mappings={"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="metric")} - ) - viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) - metadata = {"step": np.array([1]), "analysis": np.array(["test"])} - with pytest.raises(ConfigValidationError, match="without the `step` parameter"): - _build_dataframe(viz_cfg, metadata, {}, {"test/metric": 0.1}, {}, None, None, False, []) - - def test_build_dataframe_scalar_pattern_no_analysis(self): - """Test that scalar_pattern without analysis metadata raises error.""" - data_mapping = ActivationVisualizationDataMapping( - mappings={"rmse": ActivationVisualizationFieldRef(source="scalar_pattern", key="metric")} - ) - viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) - metadata = {"step": np.array([1])} # No "analysis" key - with pytest.raises(ConfigValidationError, match="requires 'analysis'"): - _build_dataframe(viz_cfg, metadata, {}, {"test/metric": 0.1}, {}, 10, None, False, []) - - def test_build_dataframe_scalar_series_no_analysis(self): - """Test that scalar_series without analysis metadata raises error.""" - scalar_series = ScalarSeriesMapping( - key_template="{layer}_cumvar_{index}", index_field="component", value_field="cumvar" - ) - data_mapping = ActivationVisualizationDataMapping(mappings={}, scalar_series=scalar_series) - viz_cfg = ActivationVisualizationConfig(name="test", data_mapping=data_mapping) - metadata = {"step": np.array([1])} # No "analysis" key - with pytest.raises(ConfigValidationError, match="requires 'analysis'"): - _build_dataframe(viz_cfg, metadata, {}, {}, {}, None, None, False, ["layer_0"]) - - -class TestSampling: - """Tests for DataFrame sampling functionality.""" - - def test_sampling_reduces_size_no_facets(self): - """Test that sampling reduces DataFrame size when no facet columns present.""" - df = pd.DataFrame({"a": range(100), "b": range(100)}) - config = SamplingConfig(max_points=20, seed=42) - result = _apply_sampling(df, config, facet_columns=[]) - assert len(result) == 20 - - def test_sampling_no_reduction_when_under_limit(self): - """Test that sampling returns original DataFrame when size <= max_points.""" - df = pd.DataFrame({"a": range(10), "b": range(10)}) - config = SamplingConfig(max_points=20, seed=42) - result = _apply_sampling(df, config, facet_columns=[]) - assert len(result) == 10 - pd.testing.assert_frame_equal(result, df) - - def test_sampling_per_facet_group(self): - """Test that sampling applies per facet group.""" - df = pd.DataFrame( - { - "factor": ["0"] * 50 + ["1"] * 50 + ["2"] * 50, - "value": range(150), - } - ) - config = SamplingConfig(max_points=10, seed=42) - result = _apply_sampling(df, config, facet_columns=["factor"]) - - assert len(result) == 30 # 10 per factor * 3 factors - for factor in ["0", "1", "2"]: - factor_count = len(result[result["factor"] == factor]) - assert factor_count == 10 - - def test_sampling_multiple_facet_columns(self): - """Test sampling with multiple facet columns.""" - df = pd.DataFrame( - { - "layer": ["layer_0"] * 40 + ["layer_1"] * 40, - "factor": (["0"] * 20 + ["1"] * 20) * 2, - "value": range(80), - } - ) - config = SamplingConfig(max_points=5, seed=42) - result = _apply_sampling(df, config, facet_columns=["layer", "factor"]) - - # Should have 4 groups (2 layers * 2 factors), each with max 5 points - assert len(result) == 20 - for layer in ["layer_0", "layer_1"]: - for factor in ["0", "1"]: - group_count = len(result[(result["layer"] == layer) & (result["factor"] == factor)]) - assert group_count == 5 - - def test_sampling_ignores_missing_facet_columns(self): - """Test that non-existent facet columns are ignored.""" - df = pd.DataFrame({"a": range(100), "value": range(100)}) - config = SamplingConfig(max_points=20, seed=42) - # facet_columns includes "factor" which doesn't exist - result = _apply_sampling(df, config, facet_columns=["factor", "layer"]) - # Should sample globally since no facet columns exist - assert len(result) == 20 - - def test_sampling_seed_reproducibility(self): - """Test that seed produces reproducible results.""" - df = pd.DataFrame({"a": range(100), "b": range(100)}) - config = SamplingConfig(max_points=20, seed=42) - - result1 = _apply_sampling(df, config, facet_columns=[]) - result2 = _apply_sampling(df, config, facet_columns=[]) - - pd.testing.assert_frame_equal(result1.reset_index(drop=True), result2.reset_index(drop=True)) - - def test_sampling_none_max_points_returns_original(self): - """Test that None max_points returns DataFrame unchanged.""" - df = pd.DataFrame({"a": range(100), "b": range(100)}) - config = SamplingConfig(max_points=None) - result = _apply_sampling(df, config, facet_columns=[]) - pd.testing.assert_frame_equal(result, df) - - def test_sampling_config_validation_negative(self): - """Test that negative max_points raises error.""" - with pytest.raises(ConfigValidationError, match="positive integer"): - SamplingConfig(max_points=-1) - - def test_sampling_config_validation_zero(self): - """Test that zero max_points raises error.""" - with pytest.raises(ConfigValidationError, match="positive integer"): - SamplingConfig(max_points=0) diff --git a/tests/activations/test_visualization_persistence.py b/tests/activations/test_visualization_persistence.py deleted file mode 100644 index ae3881ee..00000000 --- a/tests/activations/test_visualization_persistence.py +++ /dev/null @@ -1,89 +0,0 @@ -"""Tests for visualization persistence helpers.""" - -from __future__ import annotations - -import pandas as pd - -from simplexity.activations.activation_visualizations import ( - ActivationVisualizationPayload, - VisualizationControlDetail, - VisualizationControlsState, - render_visualization, -) -from simplexity.activations.visualization_persistence import save_visualization_payloads -from simplexity.visualization.history import history_paths -from simplexity.visualization.structured_configs import ( - AestheticsConfig, - ChannelAestheticsConfig, - DataConfig, - GeometryConfig, - LayerConfig, - PlotConfig, - PlotLevelGuideConfig, - PlotSizeConfig, -) - - -def _plot_config() -> PlotConfig: - layer = LayerConfig( - geometry=GeometryConfig(type="line"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="step", type="quantitative"), - y=ChannelAestheticsConfig(field="value", type="quantitative"), - ), - ) - return PlotConfig( - backend="altair", - data=DataConfig(source="main"), - layers=[layer], - size=PlotSizeConfig(), - guides=PlotLevelGuideConfig(), - ) - - -def _payload(dataframe: pd.DataFrame) -> ActivationVisualizationPayload: - cfg = _plot_config() - controls = VisualizationControlsState( - slider=VisualizationControlDetail( - type="slider", - field="step", - options=list(pd.unique(dataframe["step"])) if "step" in dataframe else [], - ) - ) - figure = render_visualization(cfg, dataframe, controls) - return ActivationVisualizationPayload( - analysis="analysis", - name="viz", - backend="altair", - figure=figure, - dataframe=dataframe, - controls=controls, - plot_config=cfg, - ) - - -def test_save_visualization_payloads_accumulates_step_history(tmp_path): - """Test that visualization payloads accumulate history across steps.""" - df_first = pd.DataFrame({"step": [0, 0], "value": [0.1, 0.2]}) - payload_one = _payload(df_first) - - save_visualization_payloads({"analysis/viz": payload_one}, tmp_path, step=1) - - data_path, _ = history_paths(tmp_path, "analysis_viz") - assert data_path.exists() - history_df = pd.read_json(data_path, orient="records", lines=True) - assert len(history_df) == len(df_first) - assert set(history_df["step"]) == {1} - assert set(history_df["sequence_step"]) == {0} - assert (tmp_path / "analysis" / "accumulated" / "viz.html").exists() - - df_second = pd.DataFrame({"step": [1], "value": [0.5]}) - payload_two = _payload(df_second) - - save_visualization_payloads({"analysis/viz": payload_two}, tmp_path, step=2) - - history_df = pd.read_json(data_path, orient="records", lines=True) - assert len(history_df) == len(df_first) + len(df_second) - assert set(history_df["step"]) == {1, 2} - assert set(history_df["sequence_step"]) == {0, 1} - assert (tmp_path / "analysis" / "accumulated" / "viz.html").exists() diff --git a/tests/end_to_end/configs/activation_tracker/rmse_over_time_example.yaml b/tests/end_to_end/configs/activation_tracker/rmse_over_time_example.yaml deleted file mode 100644 index 784c45ff..00000000 --- a/tests/end_to_end/configs/activation_tracker/rmse_over_time_example.yaml +++ /dev/null @@ -1,37 +0,0 @@ -# Example: Plot RMSE value over training steps, split by layer -# This demonstrates the scalar_pattern feature for temporal metric visualization - -name: tracker_with_rmse_tracking -instance: - _target_: simplexity.activations.activation_tracker.ActivationTracker - analyses: - regression: - instance: - _target_: simplexity.activations.activation_analyses.LinearRegressionSVDAnalysis - last_token_only: false - concat_layers: false - use_probs_as_weights: true - rcond_values: [1e-15, 1e-10, 1e-8, 1e-6, 1e-4, 1e-2] - visualizations: - # Temporal visualization: RMSE over training steps - - name: rmse_over_time - controls: - accumulate_steps: true - dropdown: layer # User can filter by layer in UI - data_mapping: - mappings: - rmse: {source: scalar_pattern, key: "blocks.*.hook_resid_post_rmse"} - backend: altair - layer: - geometry: - type: line - props: {} - aesthetics: - x: {field: step, type: quantitative, title: "Training Step"} - y: {field: rmse, type: quantitative, title: "RMSE"} - color: {field: layer, type: nominal, title: "Layer"} - strokeDash: {field: layer, type: nominal} - size: {width: 800, height: 400} - guides: - title: "RMSE Evolution Across Training" - subtitle: "Tracking model convergence by layer" diff --git a/tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml b/tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml deleted file mode 100644 index 1529c9ff..00000000 --- a/tests/end_to_end/configs/activation_tracker/with_factor_visuals.yaml +++ /dev/null @@ -1,162 +0,0 @@ -name: tracker_with_factor_arrays -instance: - _target_: simplexity.activations.activation_tracker.ActivationTracker - analyses: - # Linear regression with to_factors=true produces per-factor projections - # Keys are namespaced as: {layer}_factor_{idx}/projected - regression_factored: - instance: - _target_: simplexity.activations.activation_analyses.LinearRegressionAnalysis - last_token_only: false - concat_layers: false - use_probs_as_weights: false - to_factors: true # Enable per-factor projections - skip_first_token: true - visualizations: - # 2x5 grid: Top row = projections, Bottom row = ground truth beliefs - # Uses combined mappings to merge two data sources with a data_type column - - name: prediction_vs_truth_grid - controls: - slider: step - dropdown: layer - data_mapping: - # Combined mappings allow merging projections + belief states - sampling: - max_points: 2000 - seed: 42 - combined: - - label: prediction - mappings: - factor_*_prob_0: - source: arrays - key: "factor_*/projected" - component: 0 - group_as: factor - factor_*_prob_1: - source: arrays - key: "factor_*/projected" - component: 1 - group_as: factor - factor_*_prob_2: - source: arrays - key: "factor_*/projected" - component: 2 - group_as: factor - - label: ground_truth - mappings: - # Belief state factor patterns expand across factors (3D beliefs) - factor_*_prob_0: - source: belief_states - factor: "*" - component: 0 - group_as: factor - factor_*_prob_1: - source: belief_states - factor: "*" - component: 1 - group_as: factor - factor_*_prob_2: - source: belief_states - factor: "*" - component: 2 - group_as: factor - combine_as: data_type - preprocessing: - - type: project_to_simplex - input_fields: [prob_0, prob_1, prob_2] - output_fields: [simplex_x, simplex_y] - - type: combine_rgb - input_fields: [prob_0, prob_1, prob_2] - output_fields: [point_color] - backend: plotly - plot: - facet: - column: factor # 5 columns (one per factor) - row: data_type # 2 rows (prediction vs ground_truth) - layers: - - geometry: {type: point } - aesthetics: - x: {field: simplex_x, type: quantitative, title: "Simplex X"} - y: {field: simplex_y, type: quantitative, title: "Simplex Y"} - color: {field: point_color, type: nominal} - size: {value: 3} - size: {width: 200, height: 200} - guides: - title: "Prediction vs Ground Truth (Per Factor)" - subtitle: "Top: Ground truth | Bottom: Model belief states" - # 3D scatter plot of belief states (no simplex projection) - - name: belief_states_3d - controls: - slider: step - dropdown: layer - data_mapping: - sampling: - max_points: 2000 - seed: 42 - combined: - - label: prediction - mappings: - factor_*_prob_0: - source: arrays - key: "factor_*/projected" - component: 0 - group_as: factor - factor_*_prob_1: - source: arrays - key: "factor_*/projected" - component: 1 - group_as: factor - factor_*_prob_2: - source: arrays - key: "factor_*/projected" - component: 2 - group_as: factor - - label: ground_truth - mappings: - factor_*_prob_0: - source: belief_states - factor: "*" - component: 0 - group_as: factor - factor_*_prob_1: - source: belief_states - factor: "*" - component: 1 - group_as: factor - factor_*_prob_2: - source: belief_states - factor: "*" - component: 2 - group_as: factor - combine_as: data_type - preprocessing: - - type: combine_rgb - input_fields: [prob_0, prob_1, prob_2] - output_fields: [point_color] - backend: plotly - plot: - facet: - column: factor # 5 columns (one per factor) - row: data_type # 2 rows (prediction vs ground_truth) - layers: - - geometry: {type: point} - aesthetics: - # scale.domain sets axis ranges to [0, 1] for probability space - # This prevents auto-scaling artifacts when data is planar - x: {field: prob_0, type: quantitative, title: "State 0", scale: {domain: [0, 1]}} - y: {field: prob_1, type: quantitative, title: "State 1"} - z: {field: prob_2, type: quantitative, title: "State 2"} - color: {field: point_color, type: nominal} - size: {value: 3} - size: {width: 200, height: 200} - guides: - title: "3D Belief States (Prediction vs Ground Truth)" - subtitle: "Top: Prediction | Bottom: Ground Truth" - # PCA analysis for visualizing first 3 principal components in 3D - pca_analysis: - instance: - _target_: simplexity.activations.activation_analyses.PcaAnalysis - n_components: 10 - last_token_only: false - concat_layers: false - use_probs_as_weights: false diff --git a/tests/end_to_end/configs/activation_tracker/with_visuals.yaml b/tests/end_to_end/configs/activation_tracker/with_visuals.yaml deleted file mode 100644 index aece8498..00000000 --- a/tests/end_to_end/configs/activation_tracker/with_visuals.yaml +++ /dev/null @@ -1,207 +0,0 @@ -name: tracker_with_arrays -instance: - _target_: simplexity.activations.activation_tracker.ActivationTracker - analyses: - pca_all_tokens: # <- THIS RETURNS SCALARS AND PROJECTIONS - instance: - _target_: simplexity.activations.activation_analyses.PcaAnalysis - n_components: null - last_token_only: false - concat_layers: false - use_probs_as_weights: true - variance_thresholds: [0.80, 0.90, 0.95, 0.99] - visualizations: # <- THIS IS OPTIONAL - - name: pca_3d_scatter # <- NEW VISUALIZATION - controls: - slider: step - dropdown: layer - cumulative: false - data_mapping: - mappings: - pc_*: {source: arrays, key: pca, component: "*"} - belief_*: {source: belief_states, component: "*"} - preprocessing: - - type: combine_rgb - input_fields: [belief_*] - output_fields: [belief_color] - backend: plotly - layer: - geometry: - type: point - props: {size: 3} - aesthetics: - x: {field: pc_0, type: quantitative, title: "PC 1"} - y: {field: pc_1, type: quantitative, title: "PC 2"} - z: {field: pc_2, type: quantitative, title: "PC 3"} - color: - field: belief_color - type: nominal - opacity: {value: 0.85} - size: {width: 800, height: 600} - guides: - title: "PCA Projection (3D)" - subtitle: "All tokens, weighted by prefix probability" - - regression: # <- THIS RETURNS SCALARS AND PROJECTIONS - instance: - _target_: simplexity.activations.activation_analyses.LinearRegressionAnalysis - last_token_only: false - concat_layers: false - use_probs_as_weights: true - # rcond_values: [1e-15, 1e-10, 1e-8, 1e-6, 1e-4, 1e-2] - visualizations: # <- THIS IS OPTIONAL - - name: regression_3d # <- NEW VISUALIZATION - controls: - slider: step - dropdown: layer - cumulative: false - data_mapping: - mappings: - prob_0: {source: arrays, key: projected, component: 0} - prob_1: {source: arrays, key: projected, component: 1} - prob_2: {source: arrays, key: projected, component: 2} - belief_r: {source: belief_states, component: 0} - belief_g: {source: belief_states, component: 1} - belief_b: {source: belief_states, component: 2} - preprocessing: - - type: combine_rgb - input_fields: [belief_r, belief_g, belief_b] - output_fields: [belief_color] - backend: plotly - layer: - geometry: - type: point - props: {size: 4} - aesthetics: - x: {field: prob_0, type: quantitative, title: "P(State 0)"} - y: {field: prob_1, type: quantitative, title: "P(State 1)"} - z: {field: prob_2, type: quantitative, title: "P(State 2)"} - color: - field: belief_color - type: nominal - opacity: {value: 0.7} - size: {width: 800, height: 600} - guides: - title: "Regression Projection (3D)" - - name: regression_to_simplex # <- NEW VISUALIZATION - controls: - slider: step - dropdown: layer - cumulative: false - data_mapping: - mappings: - prob_0: {source: arrays, key: projected, component: 0} - prob_1: {source: arrays, key: projected, component: 1} - prob_2: {source: arrays, key: projected, component: 2} - preprocessing: - - type: combine_rgb - input_fields: [prob_0, prob_1, prob_2] - output_fields: [prediction_color] - - type: project_to_simplex - input_fields: [prob_0, prob_1, prob_2] - output_fields: [simplex_x, simplex_y] - backend: plotly - layer: - geometry: - type: point - props: {size: 4} - aesthetics: - x: {field: simplex_x, type: quantitative, title: "Simplex X"} - y: {field: simplex_y, type: quantitative, title: "Simplex Y"} - color: - field: prediction_color - type: nominal - opacity: {value: 0.7} - size: {width: 800, height: 600} - guides: - title: "Regression Projection (Simplex)" - - name: ground_truth_simplex - data_mapping: - mappings: - belief_r: {source: belief_states, component: 0} - belief_g: {source: belief_states, component: 1} - belief_b: {source: belief_states, component: 2} - preprocessing: - - type: combine_rgb - input_fields: [belief_r, belief_g, belief_b] - output_fields: [belief_color] - - type: project_to_simplex - input_fields: [belief_r, belief_g, belief_b] - output_fields: [simplex_x, simplex_y] - backend: plotly - layer: - geometry: - type: point - props: {size: 4} - aesthetics: - x: {field: simplex_x, type: quantitative, title: "Simplex X"} - y: {field: simplex_y, type: quantitative, title: "Simplex Y"} - color: - field: belief_color - type: nominal - opacity: {value: 0.7} - size: {width: 800, height: 600} - guides: - title: "Regression Projection (Simplex)" - # Temporal visualization: RMSE over training steps - - name: rmse_over_time - controls: - accumulate_steps: true - dropdown: layer # User can filter by layer in UI - data_mapping: - mappings: - rmse: {source: scalar_pattern, key: "rmse/L*.resid.post"} # Wildcard expands to all layers - backend: altair - layer: - geometry: - type: line - props: {} - aesthetics: - x: {field: step, type: quantitative, title: "Training Step"} - y: {field: rmse, type: quantitative, title: "RMSE"} - color: {field: layer, type: nominal, title: "Layer"} - strokeDash: {field: layer, type: nominal} - size: {width: 800, height: 400} - guides: - title: "RMSE Evolution Across Training" - subtitle: "Tracking model convergence by layer" - regression_concat: # <- THIS RETURNS SCALARS AND PROJECTIONS - instance: - _target_: simplexity.activations.activation_analyses.LinearRegressionAnalysis - last_token_only: false - concat_layers: true - use_probs_as_weights: true - # rcond_values: [1e-15, 1e-10, 1e-8, 1e-6, 1e-4, 1e-2] - visualizations: # <- THIS IS OPTIONAL - - name: regression_to_simplex_concat # <- NEW VISUALIZATION - controls: - slider: step - dropdown: layer - cumulative: false - data_mapping: - mappings: - prob_0: {source: arrays, key: projected, component: 0} - prob_1: {source: arrays, key: projected, component: 1} - prob_2: {source: arrays, key: projected, component: 2} - preprocessing: - - type: combine_rgb - input_fields: [prob_0, prob_1, prob_2] - output_fields: [prediction_color] - - type: project_to_simplex - input_fields: [prob_0, prob_1, prob_2] - output_fields: [simplex_x, simplex_y] - backend: plotly - layer: - geometry: - type: point - props: {size: 4} - aesthetics: - x: {field: simplex_x, type: quantitative, title: "Simplex X"} - y: {field: simplex_y, type: quantitative, title: "Simplex Y"} - color: - field: prediction_color - type: nominal - opacity: {value: 0.7} - size: {width: 800, height: 600} - guides: - title: "Regression Projection (Simplex)" \ No newline at end of file diff --git a/tests/end_to_end/configs/visualization/3d_scatter.yaml b/tests/end_to_end/configs/visualization/3d_scatter.yaml deleted file mode 100644 index c28699a3..00000000 --- a/tests/end_to_end/configs/visualization/3d_scatter.yaml +++ /dev/null @@ -1,5 +0,0 @@ -defaults: - - data: synthetic_cloud - - plot: scatter3d - -output_html: scatter3d_demo.html diff --git a/tests/end_to_end/configs/visualization/plot/scatter3d.yaml b/tests/end_to_end/configs/visualization/plot/scatter3d.yaml deleted file mode 100644 index d5d2d7ae..00000000 --- a/tests/end_to_end/configs/visualization/plot/scatter3d.yaml +++ /dev/null @@ -1,26 +0,0 @@ -backend: plotly -data: - source: cloud -layers: - - name: cluster_cloud - geometry: - type: point - props: - size: 8 - aesthetics: - x: { field: x, type: quantitative, title: "X position" } - y: { field: y, type: quantitative, title: "Y position" } - z: { field: z, type: quantitative, title: "Z position" } - color: { field: cluster, type: nominal, title: Cluster } - size: { field: magnitude, type: quantitative } - opacity: { value: 0.85 } - tooltip: - - { field: cluster, type: nominal, title: Cluster } - - { field: magnitude, type: quantitative, title: Magnitude } -size: - width: 800 - height: 600 -guides: - title: "Synthetic 3D Scatter" - subtitle: "Points sampled from multivariate Gaussians" - caption: "Configured entirely via Hydra YAML" diff --git a/tests/end_to_end/visualization_3d_demo.py b/tests/end_to_end/visualization_3d_demo.py deleted file mode 100644 index 06766886..00000000 --- a/tests/end_to_end/visualization_3d_demo.py +++ /dev/null @@ -1,201 +0,0 @@ -"""Hydra-powered demo that renders a 3D scatter plot via PlotConfig YAML.""" - -from __future__ import annotations - -import types -from dataclasses import dataclass, field, fields, is_dataclass -from pathlib import Path -from typing import Any, Union, cast, get_args, get_origin, get_type_hints - -import hydra -import numpy as np -import pandas as pd -from hydra.utils import get_original_cwd -from omegaconf import DictConfig, OmegaConf - -from simplexity.visualization.altair_renderer import build_altair_chart -from simplexity.visualization.data_registry import DictDataRegistry -from simplexity.visualization.plotly_renderer import build_plotly_figure -from simplexity.visualization.structured_configs import PlotConfig - - -@dataclass -class SyntheticDataConfig: - """Configuration for generating synthetic 3D clusters.""" - - source_name: str = "cloud" - num_points: int = 600 - clusters: int = 4 - cluster_spread: float = 0.8 - seed: int = 11 - - -@dataclass -class Scatter3DDemoConfig: - """Root Hydra config for the demo.""" - - data: SyntheticDataConfig = field(default_factory=SyntheticDataConfig) - plot: PlotConfig = field(default_factory=PlotConfig) - output_html: str = "scatter3d_demo.html" - - -@hydra.main(version_base=None, config_path="configs/visualization", config_name="3d_scatter") -def main(cfg: DictConfig) -> None: - """Main entry point for the demo.""" - data_cfg = _convert_cfg(cfg.data, SyntheticDataConfig) - plot_cfg = _convert_cfg(cfg.plot, PlotConfig) - output_html = cast(str, cfg.get("output_html", "scatter3d_demo.html")) - dataframe = _generate_dataset(data_cfg) - registry = DictDataRegistry({data_cfg.source_name: dataframe}) - - if plot_cfg.backend == "plotly": - figure = build_plotly_figure(plot_cfg, registry) - _save_plotly_figure(figure, output_html) - else: - chart = build_altair_chart(plot_cfg, registry) - _save_altair_chart(chart, output_html) - - print(f"Saved interactive plot to {output_html}") # noqa: T201 - demo script output - - -def _generate_dataset(cfg: SyntheticDataConfig) -> pd.DataFrame: - rng = np.random.default_rng(cfg.seed) - points_per_cluster = max(1, cfg.num_points // cfg.clusters) - remainder = cfg.num_points % cfg.clusters - records: list[dict[str, float | int | str]] = [] - for cluster_idx in range(cfg.clusters): - center = rng.normal(0.0, cfg.cluster_spread * 3.0, size=3) - count = points_per_cluster + (1 if cluster_idx < remainder else 0) - for _ in range(count): - noise = rng.normal(0.0, cfg.cluster_spread, size=3) - x, y, z = center + noise - magnitude = float(np.sqrt(x**2 + y**2 + z**2)) - records.append( - { - "cluster": f"C{cluster_idx + 1}", - "x": float(x), - "y": float(y), - "z": float(z), - "magnitude": magnitude, - } - ) - return pd.DataFrame.from_records(records) - - -def _convert_cfg[T](cfg_section: DictConfig, schema: type[T]) -> T: - """Convert DictConfig to dataclass instance, handling nested dataclasses recursively.""" - # Convert DictConfig to plain dict to avoid OmegaConf's Union/Literal type validation issues - cfg_dict = OmegaConf.to_container(cfg_section, resolve=True) or {} - return _dict_to_dataclass(cfg_dict, schema) - - -def _convert_value_by_type(value: Any, field_type: Any) -> Any: - """Convert a value based on its expected type (handles lists, dataclasses, etc.).""" - origin = get_origin(field_type) - - # Handle list types - if origin is list: - args = get_args(field_type) - if isinstance(value, list) and args: - item_type = args[0] - if is_dataclass(item_type): - return [ - _dict_to_dataclass(item, item_type) if isinstance(item, dict) else item # type: ignore[arg-type] - for item in value - ] - return value - # Handle dataclass types - if isinstance(value, dict) and is_dataclass(field_type): - return _dict_to_dataclass(value, field_type) # type: ignore[arg-type] - - return value - - -def _dict_to_dataclass(data: dict[str, Any] | Any, schema: type[Any]) -> Any: # pylint: disable=too-many-branches - """Recursively convert dict to dataclass instance, handling nested structures.""" - if not isinstance(data, dict): - return data - - if not is_dataclass(schema): - return data - - # Get field types from the dataclass schema, resolving string annotations - try: - field_types = get_type_hints(schema) - except (TypeError, NameError): - # Fallback to field.type if get_type_hints fails (e.g., forward references) - field_types = {f.name: f.type for f in fields(schema)} - - # Convert nested dicts to their corresponding dataclass types - converted: dict[str, Any] = {} - for key, value in data.items(): - if key not in field_types: - converted[key] = value - continue - - field_type = field_types[key] - origin = get_origin(field_type) - - # Handle Optional types (Union[X, None] or X | None) - if origin is Union or origin is types.UnionType: - args = get_args(field_type) - # Handle Optional[X] -> Union[X, None] - if args and len(args) == 2 and types.NoneType in args: - if value is None: - converted[key] = None - else: - non_none_type = next((t for t in args if t is not types.NoneType), None) - if non_none_type: - # Recursively handle the non-None type (could be a list, dict, etc.) - converted[key] = _convert_value_by_type(value, non_none_type) - else: - converted[key] = value - elif args and isinstance(value, dict): - # For other Union types, try to find a dataclass type that matches - dataclass_type = next((t for t in args if is_dataclass(t)), None) - if dataclass_type: - converted[key] = _dict_to_dataclass(value, dataclass_type) # type: ignore[arg-type] - else: - converted[key] = value - else: - # For other Union types, try to convert based on the first non-None type - non_none_types = [t for t in args if t is not types.NoneType] if args else [] - if non_none_types and value is not None: - converted[key] = _convert_value_by_type(value, non_none_types[0]) - else: - converted[key] = value - # Handle list types - elif origin is list: - args = get_args(field_type) - if isinstance(value, list) and args: - item_type = args[0] - if is_dataclass(item_type): - converted[key] = [ - _dict_to_dataclass(item, item_type) if isinstance(item, dict) else item # type: ignore[arg-type] - for item in value - ] - else: - converted[key] = value - else: - converted[key] = value - # Handle direct dataclass types - elif isinstance(value, dict) and is_dataclass(field_type): - converted[key] = _dict_to_dataclass(value, field_type) # type: ignore[arg-type] - else: - converted[key] = value - - return schema(**converted) - - -def _save_plotly_figure(figure, filename: str) -> None: - output_path = Path(get_original_cwd()) / filename - figure.write_html(str(output_path), include_plotlyjs="cdn") - - -def _save_altair_chart(chart, filename: str) -> None: - output_path = Path(get_original_cwd()) / filename - chart.save(str(output_path)) - - -if __name__ == "__main__": - main() # pylint: disable=no-value-for-parameter diff --git a/tests/end_to_end/visualization_demo.py b/tests/end_to_end/visualization_demo.py deleted file mode 100644 index 2da72bb8..00000000 --- a/tests/end_to_end/visualization_demo.py +++ /dev/null @@ -1,105 +0,0 @@ -"""Standalone demo that renders a layered Altair chart via visualization configs.""" - -from __future__ import annotations - -from pathlib import Path - -import numpy as np -import pandas as pd - -from simplexity.visualization.altair_renderer import build_altair_chart -from simplexity.visualization.data_registry import DictDataRegistry -from simplexity.visualization.structured_configs import ( - AestheticsConfig, - ChannelAestheticsConfig, - DataConfig, - GeometryConfig, - LayerConfig, - PlotConfig, - PlotLevelGuideConfig, - PlotSizeConfig, - TransformConfig, -) - - -def main() -> None: - """Generate a toy dataset, build a PlotConfig, and save the rendered chart.""" - df = _create_demo_dataframe() - registry = DictDataRegistry({"metrics": df}) - plot_cfg = _build_plot_config() - chart = build_altair_chart(plot_cfg, registry) - - output_path = Path(__file__).with_name("visualization_demo.html") - chart.save(str(output_path)) - print(f"Wrote visualization demo to {output_path}") # noqa: T201 - simple example harness - - -def _create_demo_dataframe() -> pd.DataFrame: - rng = np.random.default_rng(7) - records: list[dict[str, float | str | int]] = [] - for run_idx in range(3): - run_id = f"run_{run_idx + 1}" - for epoch in range(1, 51): - base_loss = np.exp(-epoch / 25.0) + 0.1 * run_idx - jitter = rng.normal(0.0, 0.02) - loss = max(base_loss + jitter, 1e-4) - accuracy = 0.55 + 0.008 * epoch + rng.normal(0.0, 0.01) - records.append( - { - "run_id": run_id, - "epoch": epoch, - "loss": loss, - "accuracy": accuracy, - } - ) - return pd.DataFrame(records) - - -def _build_plot_config() -> PlotConfig: - log_transform = TransformConfig(op="calculate", as_field="log_loss", expr="log(loss)") - base_aesthetics = AestheticsConfig( - x=ChannelAestheticsConfig(field="epoch", type="quantitative", title="Epoch"), - y=ChannelAestheticsConfig(field="log_loss", type="quantitative", title="log(loss)"), - tooltip=[ - ChannelAestheticsConfig(field="run_id", type="nominal", title="Run"), - ChannelAestheticsConfig(field="epoch", type="quantitative", title="Epoch"), - ChannelAestheticsConfig(field="log_loss", type="quantitative", title="log(loss)"), - ], - ) - raw_layer = LayerConfig( - name="raw_runs", - geometry=GeometryConfig(type="line", props={"opacity": 0.4}), - aesthetics=AestheticsConfig( - x=base_aesthetics.x, - y=base_aesthetics.y, - color=ChannelAestheticsConfig(field="run_id", type="nominal", title="Run"), - tooltip=base_aesthetics.tooltip, - ), - ) - mean_layer = LayerConfig( - name="mean_line", - geometry=GeometryConfig(type="line", props={"strokeWidth": 3, "color": "#111111"}), - aesthetics=AestheticsConfig( - x=base_aesthetics.x, - y=ChannelAestheticsConfig( - field="log_loss", - type="quantitative", - aggregate="mean", - title="Mean log(loss)", - ), - ), - ) - return PlotConfig( - data=DataConfig(source="metrics"), - transforms=[log_transform], - layers=[raw_layer, mean_layer], - size=PlotSizeConfig(width=600, height=400), - guides=PlotLevelGuideConfig( - title="Training loss over epochs", - subtitle="Each line is a synthetic training run built from random noise.", - ), - ) - - -if __name__ == "__main__": - main() diff --git a/tests/visualization/test_altair_renderer.py b/tests/visualization/test_altair_renderer.py deleted file mode 100644 index 6c9bae29..00000000 --- a/tests/visualization/test_altair_renderer.py +++ /dev/null @@ -1,330 +0,0 @@ -"""Tests for altair renderer.""" - -import pandas as pd -import pytest - -from simplexity.exceptions import ConfigValidationError -from simplexity.visualization.altair_renderer import ( - _apply_geometry, - _build_layer_chart, - _encode_aesthetics, - build_altair_chart, -) -from simplexity.visualization.data_registry import DictDataRegistry -from simplexity.visualization.structured_configs import ( - AestheticsConfig, - ChannelAestheticsConfig, - DataConfig, - FacetConfig, - GeometryConfig, - LayerConfig, - PlotConfig, - PlotLevelGuideConfig, - PlotSizeConfig, -) - -try: - import altair as alt -except ImportError: - pytest.skip("Altair not installed", allow_module_level=True) - - -class TestBuildAltairChart: - """Tests for build_altair_chart function.""" - - def test_raises_when_no_layers(self): - """Test that empty layers raises error.""" - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[]) - registry = DictDataRegistry({"main": pd.DataFrame()}) - with pytest.raises(ConfigValidationError, match="at least one layer"): - build_altair_chart(plot_cfg, registry) - - def test_builds_simple_point_chart(self): - """Test building a simple point chart.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) - registry = DictDataRegistry({"main": df}) - chart = build_altair_chart(plot_cfg, registry) - assert chart is not None - - def test_builds_line_chart(self): - """Test building a line chart.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="line"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) - registry = DictDataRegistry({"main": df}) - chart = build_altair_chart(plot_cfg, registry) - assert chart is not None - - def test_builds_bar_chart(self): - """Test building a bar chart.""" - df = pd.DataFrame({"category": ["a", "b", "c"], "value": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="bar"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="category", type="nominal"), - y=ChannelAestheticsConfig(field="value", type="quantitative"), - ), - ) - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) - registry = DictDataRegistry({"main": df}) - chart = build_altair_chart(plot_cfg, registry) - assert chart is not None - - def test_applies_color_encoding(self): - """Test that color encoding is applied.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "cat": ["a", "b", "a"]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - color=ChannelAestheticsConfig(field="cat", type="nominal"), - ), - ) - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) - registry = DictDataRegistry({"main": df}) - chart = build_altair_chart(plot_cfg, registry) - assert chart is not None - - def test_applies_size(self): - """Test that chart size is applied.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - size = PlotSizeConfig(width=800, height=600) - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], size=size) - registry = DictDataRegistry({"main": df}) - chart = build_altair_chart(plot_cfg, registry) - assert chart is not None - - def test_applies_guides(self): - """Test that plot guides are applied.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - guides = PlotLevelGuideConfig(title="My Chart") - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], guides=guides) - registry = DictDataRegistry({"main": df}) - chart = build_altair_chart(plot_cfg, registry) - assert chart is not None - - def test_multiple_layers(self): - """Test building chart with multiple layers.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer1 = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - layer2 = LayerConfig( - geometry=GeometryConfig(type="line"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer1, layer2]) - registry = DictDataRegistry({"main": df}) - chart = build_altair_chart(plot_cfg, registry) - assert chart is not None - - -class TestApplyGeometry: - """Tests for _apply_geometry function.""" - - def test_point_geometry(self): - """Test point geometry application.""" - chart = alt.Chart(pd.DataFrame({"x": [1]})) - geometry = GeometryConfig(type="point") - result = _apply_geometry(chart, geometry) - assert result is not None - - def test_line_geometry(self): - """Test line geometry application.""" - chart = alt.Chart(pd.DataFrame({"x": [1]})) - geometry = GeometryConfig(type="line") - result = _apply_geometry(chart, geometry) - assert result is not None - - def test_bar_geometry(self): - """Test bar geometry application.""" - chart = alt.Chart(pd.DataFrame({"x": [1]})) - geometry = GeometryConfig(type="bar") - result = _apply_geometry(chart, geometry) - assert result is not None - - def test_area_geometry(self): - """Test area geometry application.""" - chart = alt.Chart(pd.DataFrame({"x": [1]})) - geometry = GeometryConfig(type="area") - result = _apply_geometry(chart, geometry) - assert result is not None - - def test_invalid_geometry_raises(self): - """Test that invalid geometry type raises error.""" - chart = alt.Chart(pd.DataFrame({"x": [1]})) - geometry = GeometryConfig(type="invalid_type") - with pytest.raises(ConfigValidationError, match="does not support geometry"): - _apply_geometry(chart, geometry) - - -class TestEncodeAesthetics: - """Tests for _encode_aesthetics function.""" - - def test_basic_x_y_encoding(self): - """Test basic x and y encoding.""" - aes = AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ) - encoding = _encode_aesthetics(aes) - assert "x" in encoding - assert "y" in encoding - - def test_color_encoding(self): - """Test color encoding.""" - aes = AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - color=ChannelAestheticsConfig(field="cat", type="nominal"), - ) - encoding = _encode_aesthetics(aes) - assert "color" in encoding - - def test_size_encoding(self): - """Test size encoding.""" - aes = AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - size=ChannelAestheticsConfig(field="size", type="quantitative"), - ) - encoding = _encode_aesthetics(aes) - assert "size" in encoding - - def test_opacity_encoding(self): - """Test opacity encoding.""" - aes = AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - opacity=ChannelAestheticsConfig(field="opacity", type="quantitative"), - ) - encoding = _encode_aesthetics(aes) - assert "opacity" in encoding - - def test_empty_aesthetics_returns_empty(self): - """Test that empty aesthetics returns empty dict.""" - aes = AestheticsConfig() - encoding = _encode_aesthetics(aes) - assert not encoding - - -class TestFaceting: - """Tests for faceted charts.""" - - def test_column_facet(self): - """Test column faceting.""" - df = pd.DataFrame({"x": [1, 2, 3, 4], "y": [4, 5, 6, 7], "group": ["a", "a", "b", "b"]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - facet = FacetConfig(column="group") - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) - registry = DictDataRegistry({"main": df}) - chart = build_altair_chart(plot_cfg, registry) - assert chart is not None - - def test_row_facet(self): - """Test row faceting.""" - df = pd.DataFrame({"x": [1, 2, 3, 4], "y": [4, 5, 6, 7], "group": ["a", "a", "b", "b"]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - facet = FacetConfig(row="group") - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) - registry = DictDataRegistry({"main": df}) - chart = build_altair_chart(plot_cfg, registry) - assert chart is not None - - def test_row_and_column_facet(self): - """Test both row and column faceting.""" - df = pd.DataFrame( - { - "x": [1, 2, 3, 4, 5, 6, 7, 8], - "y": [4, 5, 6, 7, 8, 9, 10, 11], - "row_group": ["a", "a", "a", "a", "b", "b", "b", "b"], - "col_group": ["x", "x", "y", "y", "x", "x", "y", "y"], - } - ) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - facet = FacetConfig(row="row_group", column="col_group") - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) - registry = DictDataRegistry({"main": df}) - chart = build_altair_chart(plot_cfg, registry) - assert chart is not None - - -class TestBuildLayerChart: - """Tests for _build_layer_chart function.""" - - def test_builds_chart_from_layer(self): - """Test building a chart from layer config.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - chart = _build_layer_chart(layer, df) - assert chart is not None - - def test_applies_geometry_props(self): - """Test that geometry props are applied.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point", props={"size": 100}), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - chart = _build_layer_chart(layer, df) - assert chart is not None diff --git a/tests/visualization/test_data_pipeline.py b/tests/visualization/test_data_pipeline.py deleted file mode 100644 index 08c50730..00000000 --- a/tests/visualization/test_data_pipeline.py +++ /dev/null @@ -1,317 +0,0 @@ -"""Tests for data pipeline transforms and materialization.""" - -import pandas as pd -import pytest - -from simplexity.exceptions import ConfigValidationError -from simplexity.visualization.data_pipeline import ( - _apply_transform, - _derive_fold_names, - _parse_function_expr, - apply_filters, - apply_transforms, - build_plot_level_dataframe, - materialize_data, - normalize_expression, - resolve_layer_dataframe, -) -from simplexity.visualization.data_registry import DictDataRegistry -from simplexity.visualization.structured_configs import ( - DataConfig, - LayerConfig, - TransformConfig, -) - - -class TestNormalizeExpression: - """Tests for normalize_expression.""" - - def test_removes_datum_prefix(self): - """Test that datum. prefix is removed.""" - assert normalize_expression("datum.x > 5") == "x > 5" - - def test_strips_whitespace(self): - """Test that whitespace is stripped.""" - assert normalize_expression(" x > 5 ") == "x > 5" - - -class TestApplyFilters: - """Tests for apply_filters.""" - - def test_single_filter(self): - """Test applying a single filter.""" - df = pd.DataFrame({"x": [1, 2, 3, 4, 5], "y": [10, 20, 30, 40, 50]}) - result = apply_filters(df, ["x > 2"]) - assert list(result["x"]) == [3, 4, 5] - - def test_multiple_filters(self): - """Test applying multiple filters.""" - df = pd.DataFrame({"x": [1, 2, 3, 4, 5], "y": [10, 20, 30, 40, 50]}) - result = apply_filters(df, ["x > 2", "y < 50"]) - assert list(result["x"]) == [3, 4] - - def test_filter_with_datum_prefix(self): - """Test that datum. prefix is normalized.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - result = apply_filters(df, ["datum.x > 1"]) - assert list(result["x"]) == [2, 3] - - -class TestMaterializeData: - """Tests for materialize_data.""" - - def test_basic_materialization(self): - """Test basic data materialization.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - registry = DictDataRegistry({"main": df}) - data_cfg = DataConfig(source="main") - result = materialize_data(data_cfg, registry) - assert list(result.columns) == ["x", "y"] - assert len(result) == 3 - - def test_with_filters(self): - """Test materialization with filters.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - registry = DictDataRegistry({"main": df}) - data_cfg = DataConfig(source="main", filters=["x > 1"]) - result = materialize_data(data_cfg, registry) - assert len(result) == 2 - - def test_with_column_selection(self): - """Test materialization with column selection.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "z": [7, 8, 9]}) - registry = DictDataRegistry({"main": df}) - data_cfg = DataConfig(source="main", columns=["x", "z"]) - result = materialize_data(data_cfg, registry) - assert list(result.columns) == ["x", "z"] - - def test_missing_column_raises(self): - """Test that missing columns raise an error.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - registry = DictDataRegistry({"main": df}) - data_cfg = DataConfig(source="main", columns=["x", "missing"]) - with pytest.raises(ConfigValidationError, match="not present"): - materialize_data(data_cfg, registry) - - -class TestBuildPlotLevelDataframe: - """Tests for build_plot_level_dataframe.""" - - def test_with_transforms(self): - """Test building dataframe with transforms.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - registry = DictDataRegistry({"main": df}) - data_cfg = DataConfig(source="main") - transforms = [TransformConfig(op="calculate", expr="x * 2", as_field="x2")] - result = build_plot_level_dataframe(data_cfg, transforms, registry) - assert "x2" in result.columns - assert list(result["x2"]) == [2, 4, 6] - - def test_without_transforms(self): - """Test building dataframe without transforms.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - registry = DictDataRegistry({"main": df}) - data_cfg = DataConfig(source="main") - result = build_plot_level_dataframe(data_cfg, [], registry) - assert list(result.columns) == ["x", "y"] - assert len(result) == 3 - - -class TestResolveLayerDataframe: - """Tests for resolve_layer_dataframe.""" - - def test_uses_plot_df_when_no_layer_data(self): - """Test that plot dataframe is used when layer has no data config.""" - plot_df = pd.DataFrame({"x": [1, 2, 3]}) - layer = LayerConfig() - result = resolve_layer_dataframe(layer, plot_df, {}) - assert list(result["x"]) == [1, 2, 3] - - def test_uses_layer_data_when_specified(self): - """Test that layer data config is used when specified.""" - plot_df = pd.DataFrame({"x": [1, 2, 3]}) - layer_df = pd.DataFrame({"y": [4, 5, 6]}) - registry = DictDataRegistry({"layer_data": layer_df}) - layer = LayerConfig(data=DataConfig(source="layer_data")) - result = resolve_layer_dataframe(layer, plot_df, registry) - assert "y" in result.columns - assert "x" not in result.columns - - def test_applies_layer_transforms(self): - """Test that layer transforms are applied.""" - plot_df = pd.DataFrame({"x": [1, 2, 3]}) - layer = LayerConfig(transforms=[TransformConfig(op="filter", filter="x > 1")]) - result = resolve_layer_dataframe(layer, plot_df, {}) - assert len(result) == 2 - - -class TestApplyTransform: - """Tests for individual transform operations.""" - - def test_filter_transform(self): - """Test filter transform.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - transform = TransformConfig(op="filter", filter="x > 1") - result = _apply_transform(df, transform) - assert len(result) == 2 - - def test_filter_requires_expression(self): - """Test that filter transform requires filter expression.""" - with pytest.raises(ConfigValidationError, match="filter"): - TransformConfig(op="filter") - - def test_calculate_transform(self): - """Test calculate transform.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - transform = TransformConfig(op="calculate", expr="x * 2", as_field="x2") - result = _apply_transform(df, transform) - assert list(result["x2"]) == [2, 4, 6] - - def test_calculate_requires_as_field(self): - """Test that calculate transform requires as_field.""" - with pytest.raises(ConfigValidationError, match="as_field"): - TransformConfig(op="calculate", expr="x * 2") - - def test_aggregate_transform(self): - """Test aggregate transform.""" - df = pd.DataFrame({"group": ["a", "a", "b"], "value": [1, 2, 3]}) - transform = TransformConfig(op="aggregate", groupby=["group"], aggregations={"total": "sum(value)"}) - result = _apply_transform(df, transform) - assert len(result) == 2 - assert "total" in result.columns - - def test_aggregate_requires_groupby_and_aggregations(self): - """Test that aggregate transform requires groupby and aggregations.""" - with pytest.raises(ConfigValidationError, match="groupby"): - TransformConfig(op="aggregate") - - def test_bin_transform(self): - """Test bin transform.""" - df = pd.DataFrame({"x": [1, 5, 10, 15, 20]}) - transform = TransformConfig(op="bin", field="x", binned_as="x_bin", maxbins=5) - result = _apply_transform(df, transform) - assert "x_bin" in result.columns - - def test_bin_requires_field_and_binned_as(self): - """Test that bin transform requires field and binned_as.""" - with pytest.raises(ConfigValidationError, match="field"): - TransformConfig(op="bin") - - def test_window_transform_rank(self): - """Test window transform with rank function.""" - df = pd.DataFrame({"x": [3, 1, 2]}) - transform = TransformConfig(op="window", window={"x_rank": "rank(x)"}) - result = _apply_transform(df, transform) - assert "x_rank" in result.columns - - def test_window_transform_cumsum(self): - """Test window transform with cumsum function.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - transform = TransformConfig(op="window", window={"x_cumsum": "cumsum(x)"}) - result = _apply_transform(df, transform) - assert list(result["x_cumsum"]) == [1, 3, 6] - - def test_window_unsupported_function(self): - """Test that unsupported window function raises error.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - transform = TransformConfig(op="window", window={"x_bad": "unsupported(x)"}) - with pytest.raises(ConfigValidationError, match="not supported"): - _apply_transform(df, transform) - - def test_window_requires_window_mapping(self): - """Test that window transform requires window mapping.""" - with pytest.raises(ConfigValidationError, match="window"): - TransformConfig(op="window") - - def test_fold_transform(self): - """Test fold transform.""" - df = pd.DataFrame({"a": [1, 2], "b": [3, 4], "c": [5, 6]}) - transform = TransformConfig(op="fold", fold_fields=["a", "b"]) - result = _apply_transform(df, transform) - assert "key" in result.columns - assert "value" in result.columns - assert len(result) == 4 - - def test_fold_requires_fold_fields(self): - """Test that fold transform requires fold_fields.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - transform = TransformConfig(op="fold") - with pytest.raises(ConfigValidationError, match="fold_fields"): - _apply_transform(df, transform) - - def test_pivot_not_implemented(self): - """Test that pivot transform is not implemented.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - transform = TransformConfig(op="pivot") - with pytest.raises(ConfigValidationError, match="not implemented"): - _apply_transform(df, transform) - - def test_unsupported_op_raises(self): - """Test that unsupported operation raises error.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - transform = TransformConfig(op="unknown") - with pytest.raises(ConfigValidationError, match="Unsupported"): - _apply_transform(df, transform) - - -class TestApplyTransforms: - """Tests for apply_transforms.""" - - def test_applies_multiple_transforms(self): - """Test that multiple transforms are applied sequentially.""" - df = pd.DataFrame({"x": [1, 2, 3, 4, 5]}) - transforms = [ - TransformConfig(op="filter", filter="x > 2"), - TransformConfig(op="calculate", expr="x * 10", as_field="x10"), - ] - result = apply_transforms(df, transforms) - assert len(result) == 3 - assert list(result["x10"]) == [30, 40, 50] - - def test_empty_transforms_returns_original(self): - """Test that empty transforms list returns original dataframe.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - result = apply_transforms(df, []) - assert list(result["x"]) == [1, 2, 3] - - -class TestParseFunctionExpr: - """Tests for _parse_function_expr.""" - - def test_parses_valid_expression(self): - """Test parsing a valid function expression.""" - func, field = _parse_function_expr("sum(value)", expected_arg=True) - assert func == "sum" - assert field == "value" - - def test_invalid_expression_raises(self): - """Test that invalid expression raises error.""" - with pytest.raises(ConfigValidationError, match="must be of the form"): - _parse_function_expr("invalid", expected_arg=True) - - def test_missing_arg_when_expected_raises(self): - """Test that missing argument raises error when expected.""" - with pytest.raises(ConfigValidationError, match="must supply an argument"): - _parse_function_expr("func()", expected_arg=True) - - -class TestDeriveFoldNames: - """Tests for _derive_fold_names.""" - - def test_default_names(self): - """Test default names when as_fields is None.""" - var_name, value_name = _derive_fold_names(None) - assert var_name == "key" - assert value_name == "value" - - def test_single_as_field(self): - """Test with single as_field.""" - var_name, value_name = _derive_fold_names(["custom_key"]) - assert var_name == "custom_key" - assert value_name == "value" - - def test_two_as_fields(self): - """Test with two as_fields.""" - var_name, value_name = _derive_fold_names(["custom_key", "custom_value"]) - assert var_name == "custom_key" - assert value_name == "custom_value" diff --git a/tests/visualization/test_history.py b/tests/visualization/test_history.py deleted file mode 100644 index a3455982..00000000 --- a/tests/visualization/test_history.py +++ /dev/null @@ -1,155 +0,0 @@ -"""Tests for visualization history persistence utilities.""" - -from __future__ import annotations - -import copy - -import pandas as pd - -from simplexity.visualization.history import ( - history_paths, - load_history_dataframe, - plot_config_signature, - save_history_dataframe, -) -from simplexity.visualization.structured_configs import ( - AestheticsConfig, - ChannelAestheticsConfig, - DataConfig, - GeometryConfig, - LayerConfig, - PlotConfig, - PlotLevelGuideConfig, - PlotSizeConfig, -) - - -def _simple_plot_config() -> PlotConfig: - layer = LayerConfig( - geometry=GeometryConfig(type="line"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="step", type="quantitative"), - y=ChannelAestheticsConfig(field="value", type="quantitative"), - ), - ) - return PlotConfig( - backend="altair", - data=DataConfig(source="main"), - layers=[layer], - size=PlotSizeConfig(width=400, height=200), - guides=PlotLevelGuideConfig(), - ) - - -def test_plot_config_signature_changes_with_config_mutation(): - """Test that plot config signature changes when config is mutated.""" - cfg = _simple_plot_config() - clone = copy.deepcopy(cfg) - clone.size.width = 800 - - assert plot_config_signature(cfg) != plot_config_signature(clone) - - -def test_history_round_trip(tmp_path): - """Test saving and loading history dataframe preserves data.""" - cfg = _simple_plot_config() - signature = plot_config_signature(cfg) - data_path, meta_path = history_paths(tmp_path, "demo") - df = pd.DataFrame({"step": [0, 1], "value": [0.1, 0.2]}) - - save_history_dataframe( - df, - data_path, - meta_path, - signature=signature, - analysis="analysis", - name="viz", - backend="altair", - ) - - loaded = load_history_dataframe(data_path, meta_path, expected_signature=signature) - pd.testing.assert_frame_equal(loaded, df) - - -def test_load_returns_empty_when_files_missing(tmp_path): - """Test that missing files return empty dataframe.""" - data_path, meta_path = history_paths(tmp_path, "nonexistent") - loaded = load_history_dataframe(data_path, meta_path, expected_signature="any") - assert loaded.empty - - -def test_load_returns_empty_when_metadata_corrupted(tmp_path): - """Test that corrupted metadata returns empty dataframe.""" - data_path, meta_path = history_paths(tmp_path, "corrupted") - data_path.parent.mkdir(parents=True, exist_ok=True) - data_path.write_text('{"step": 0, "value": 0.1}\n') - meta_path.write_text("not valid json {{{") - loaded = load_history_dataframe(data_path, meta_path, expected_signature="any") - assert loaded.empty - - -def test_load_returns_empty_when_signature_mismatched(tmp_path): - """Test that mismatched signature returns empty dataframe.""" - cfg = _simple_plot_config() - signature = plot_config_signature(cfg) - data_path, meta_path = history_paths(tmp_path, "mismatched") - df = pd.DataFrame({"step": [0], "value": [0.1]}) - - save_history_dataframe( - df, - data_path, - meta_path, - signature=signature, - analysis="analysis", - name="viz", - backend="altair", - ) - - loaded = load_history_dataframe(data_path, meta_path, expected_signature="different_signature") - assert loaded.empty - - -def test_load_returns_empty_when_data_corrupted(tmp_path): - """Test that corrupted data file returns empty dataframe.""" - cfg = _simple_plot_config() - signature = plot_config_signature(cfg) - data_path, meta_path = history_paths(tmp_path, "data_corrupted") - df = pd.DataFrame({"step": [0], "value": [0.1]}) - - save_history_dataframe( - df, - data_path, - meta_path, - signature=signature, - analysis="analysis", - name="viz", - backend="altair", - ) - - # Corrupt the data file - data_path.write_text("not valid jsonl {{{") - - loaded = load_history_dataframe(data_path, meta_path, expected_signature=signature) - assert loaded.empty - - -def test_plot_config_signature_handles_path_values(): - """Test that plot config signature can serialize Path objects.""" - layer = LayerConfig( - geometry=GeometryConfig(type="line"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="step", type="quantitative"), - y=ChannelAestheticsConfig(field="value", type="quantitative"), - ), - ) - cfg = PlotConfig( - backend="altair", - data=DataConfig(source="main"), - layers=[layer], - size=PlotSizeConfig(width=400, height=200), - guides=PlotLevelGuideConfig(), - ) - # This should not raise even with complex nested objects - sig = plot_config_signature(cfg) - assert isinstance(sig, str) - assert len(sig) == 64 # SHA256 hex length diff --git a/tests/visualization/test_plotly_renderer.py b/tests/visualization/test_plotly_renderer.py deleted file mode 100644 index f427af61..00000000 --- a/tests/visualization/test_plotly_renderer.py +++ /dev/null @@ -1,411 +0,0 @@ -"""Tests for plotly renderer.""" - -import pandas as pd -import pytest - -from simplexity.exceptions import ConfigValidationError -from simplexity.visualization.data_registry import DictDataRegistry -from simplexity.visualization.plotly_renderer import ( - _axis_title, - _build_scatter2d, - _build_scatter3d, - _require_field, - _resolve_layer_dropdown, - _resolve_slider_control, - build_plotly_figure, -) -from simplexity.visualization.structured_configs import ( - AestheticsConfig, - ChannelAestheticsConfig, - DataConfig, - FacetConfig, - GeometryConfig, - LayerConfig, - PlotConfig, - PlotLevelGuideConfig, - PlotSizeConfig, -) - - -class TestHelperFunctions: - """Tests for helper functions.""" - - def test_axis_title_from_config(self): - """Test axis title extraction from config.""" - config = ChannelAestheticsConfig(field="x", type="quantitative", title="X Axis") - assert _axis_title(config) == "X Axis" - - def test_axis_title_none_when_no_config(self): - """Test axis title is None when no config.""" - assert _axis_title(None) is None - - def test_axis_title_uses_field_when_no_title(self): - """Test axis title falls back to field name when no title.""" - config = ChannelAestheticsConfig(field="x", type="quantitative") - assert _axis_title(config) == "x" - - def test_require_field_extracts_field(self): - """Test that require_field extracts the field name.""" - config = ChannelAestheticsConfig(field="my_field", type="quantitative") - assert _require_field(config, "x") == "my_field" - - def test_require_field_raises_when_none(self): - """Test that require_field raises when config is None.""" - with pytest.raises(ConfigValidationError, match="requires"): - _require_field(None, "x") - - def test_require_field_raises_when_no_field(self): - """Test that require_field raises when field is None.""" - config = ChannelAestheticsConfig(field=None, type="quantitative") - with pytest.raises(ConfigValidationError, match="requires"): - _require_field(config, "x") - - -class TestResolveControls: - """Tests for control resolution functions.""" - - def test_resolve_slider_control_none_when_no_controls(self): - """Test slider returns None when no controls.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - result = _resolve_slider_control(df, None) - assert result is None - - def test_resolve_layer_dropdown_none_when_no_controls(self): - """Test layer dropdown returns None when no controls.""" - df = pd.DataFrame({"x": [1, 2, 3]}) - result = _resolve_layer_dropdown(df, None) - assert result is None - - -class TestBuildScatter2D: - """Tests for 2D scatter plot building.""" - - def test_basic_scatter2d(self): - """Test basic 2D scatter plot building.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - fig = _build_scatter2d(layer, df, None) - assert fig is not None - assert len(fig.data) > 0 - - def test_scatter2d_with_color(self): - """Test 2D scatter with color encoding.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "category": ["a", "b", "a"]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - color=ChannelAestheticsConfig(field="category", type="nominal"), - ), - ) - fig = _build_scatter2d(layer, df, None) - assert fig is not None - - -class TestBuildScatter3D: - """Tests for 3D scatter plot building.""" - - def test_basic_scatter3d(self): - """Test basic 3D scatter plot building.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "z": [7, 8, 9]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - z=ChannelAestheticsConfig(field="z", type="quantitative"), - ), - ) - fig = _build_scatter3d(layer, df, None) - assert fig is not None - assert len(fig.data) > 0 - - def test_scatter3d_with_color(self): - """Test 3D scatter with color encoding.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "z": [7, 8, 9], "cat": ["a", "b", "a"]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - z=ChannelAestheticsConfig(field="z", type="quantitative"), - color=ChannelAestheticsConfig(field="cat", type="nominal"), - ), - ) - fig = _build_scatter3d(layer, df, None) - assert fig is not None - - -class TestBuildPlotlyFigure: - """Tests for the main build_plotly_figure function.""" - - def test_raises_when_no_layers(self): - """Test that empty layers raises error.""" - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[]) - registry = DictDataRegistry({"main": pd.DataFrame()}) - with pytest.raises(ConfigValidationError, match="at least one layer"): - build_plotly_figure(plot_cfg, registry) - - def test_raises_when_multiple_layers(self): - """Test that multiple layers raises error (currently unsupported).""" - layer1 = LayerConfig(geometry=GeometryConfig(type="point")) - layer2 = LayerConfig(geometry=GeometryConfig(type="point")) - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer1, layer2]) - registry = DictDataRegistry({"main": pd.DataFrame({"x": [1], "y": [2]})}) - with pytest.raises(ConfigValidationError, match="exactly one layer"): - build_plotly_figure(plot_cfg, registry) - - def test_raises_when_non_point_geometry(self): - """Test that non-point geometry raises error.""" - layer = LayerConfig(geometry=GeometryConfig(type="line")) - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) - registry = DictDataRegistry({"main": pd.DataFrame({"x": [1], "y": [2]})}) - with pytest.raises(ConfigValidationError, match="point geometry"): - build_plotly_figure(plot_cfg, registry) - - def test_builds_2d_figure(self): - """Test building a basic 2D figure.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) - registry = DictDataRegistry({"main": df}) - fig = build_plotly_figure(plot_cfg, registry) - assert fig is not None - - def test_builds_3d_figure(self): - """Test building a basic 3D figure.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "z": [7, 8, 9]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - z=ChannelAestheticsConfig(field="z", type="quantitative"), - ), - ) - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer]) - registry = DictDataRegistry({"main": df}) - fig = build_plotly_figure(plot_cfg, registry) - assert fig is not None - - def test_applies_guides(self): - """Test that plot guides are applied.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - guides = PlotLevelGuideConfig(title="My Plot", subtitle="My Subtitle") - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], guides=guides) - registry = DictDataRegistry({"main": df}) - fig = build_plotly_figure(plot_cfg, registry) - layout = fig.to_dict()["layout"] - assert "My Plot" in layout["title"]["text"] - - def test_applies_size(self): - """Test that plot size is applied.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - size = PlotSizeConfig(width=800, height=600) - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], size=size) - registry = DictDataRegistry({"main": df}) - fig = build_plotly_figure(plot_cfg, registry) - layout = fig.to_dict()["layout"] - assert layout["width"] == 800 - assert layout["height"] == 600 - - -class TestFacetedFigures: - """Tests for faceted figure building.""" - - def test_builds_column_faceted_figure(self): - """Test building a column-faceted 2D figure.""" - df = pd.DataFrame({"x": [1, 2, 3, 4], "y": [4, 5, 6, 7], "group": ["a", "a", "b", "b"]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - facet = FacetConfig(column="group") - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) - registry = DictDataRegistry({"main": df}) - fig = build_plotly_figure(plot_cfg, registry) - assert fig is not None - - def test_builds_row_faceted_figure(self): - """Test building a row-faceted 2D figure.""" - df = pd.DataFrame({"x": [1, 2, 3, 4], "y": [4, 5, 6, 7], "group": ["a", "a", "b", "b"]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - facet = FacetConfig(row="group") - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) - registry = DictDataRegistry({"main": df}) - fig = build_plotly_figure(plot_cfg, registry) - assert fig is not None - - def test_builds_3d_faceted_figure(self): - """Test building a 3D faceted figure.""" - df = pd.DataFrame( - { - "x": [1, 2, 3, 4], - "y": [4, 5, 6, 7], - "z": [7, 8, 9, 10], - "group": ["a", "a", "b", "b"], - } - ) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - z=ChannelAestheticsConfig(field="z", type="quantitative"), - ), - ) - facet = FacetConfig(column="group") - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) - registry = DictDataRegistry({"main": df}) - fig = build_plotly_figure(plot_cfg, registry) - assert fig is not None - - def test_builds_row_and_column_faceted_figure(self): - """Test building figure with both row and column facets.""" - df = pd.DataFrame( - { - "x": [1, 2, 3, 4, 5, 6, 7, 8], - "y": [1, 2, 3, 4, 5, 6, 7, 8], - "row_grp": ["r1", "r1", "r1", "r1", "r2", "r2", "r2", "r2"], - "col_grp": ["c1", "c1", "c2", "c2", "c1", "c1", "c2", "c2"], - } - ) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - facet = FacetConfig(row="row_grp", column="col_grp") - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) - registry = DictDataRegistry({"main": df}) - fig = build_plotly_figure(plot_cfg, registry) - assert fig is not None - - -class TestScatterWithEncodings: - """Tests for scatter plots with various encodings.""" - - def test_scatter2d_with_size_encoding(self): - """Test 2D scatter with size encoding.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "size_val": [10, 20, 30]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - size=ChannelAestheticsConfig(field="size_val", type="quantitative"), - ), - ) - fig = _build_scatter2d(layer, df, None) - assert fig is not None - - def test_scatter2d_with_opacity(self): - """Test 2D scatter with opacity encoding.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - opacity=ChannelAestheticsConfig(field=None, type="quantitative", value=0.5), - ), - ) - fig = _build_scatter2d(layer, df, None) - assert fig is not None - - def test_scatter3d_with_size_encoding(self): - """Test 3D scatter with size encoding.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6], "z": [7, 8, 9], "size_val": [10, 20, 30]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - z=ChannelAestheticsConfig(field="z", type="quantitative"), - size=ChannelAestheticsConfig(field="size_val", type="quantitative"), - ), - ) - fig = _build_scatter3d(layer, df, None) - assert fig is not None - - def test_figure_with_background_color(self): - """Test that background color is applied.""" - df = pd.DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]}) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - ), - ) - plot_cfg = PlotConfig( - data=DataConfig(source="main"), - layers=[layer], - background="#f0f0f0", - ) - registry = DictDataRegistry({"main": df}) - fig = build_plotly_figure(plot_cfg, registry) - layout = fig.to_dict()["layout"] - assert layout["plot_bgcolor"] == "#f0f0f0" - - def test_faceted_figure_with_color_encoding(self): - """Test faceted figure with color encoding.""" - df = pd.DataFrame( - { - "x": [1, 2, 3, 4], - "y": [4, 5, 6, 7], - "group": ["a", "a", "b", "b"], - "category": ["cat1", "cat2", "cat1", "cat2"], - } - ) - layer = LayerConfig( - geometry=GeometryConfig(type="point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - color=ChannelAestheticsConfig(field="category", type="nominal"), - ), - ) - facet = FacetConfig(column="group") - plot_cfg = PlotConfig(data=DataConfig(source="main"), layers=[layer], facet=facet) - registry = DictDataRegistry({"main": df}) - fig = build_plotly_figure(plot_cfg, registry) - assert fig is not None diff --git a/tests/visualization/test_renderer_controls.py b/tests/visualization/test_renderer_controls.py deleted file mode 100644 index 13d28184..00000000 --- a/tests/visualization/test_renderer_controls.py +++ /dev/null @@ -1,234 +0,0 @@ -"""Tests for renderer support of interactive controls.""" - -from __future__ import annotations - -import pandas as pd - -from simplexity.activations.activation_visualizations import ( - VisualizationControlDetail, - VisualizationControlsState, -) -from simplexity.visualization.altair_renderer import build_altair_chart -from simplexity.visualization.data_registry import DictDataRegistry -from simplexity.visualization.plotly_renderer import build_plotly_figure -from simplexity.visualization.structured_configs import ( - AestheticsConfig, - ChannelAestheticsConfig, - DataConfig, - GeometryConfig, - LayerConfig, - PlotConfig, - PlotLevelGuideConfig, - PlotSizeConfig, -) - - -def _base_plot_config(backend: str = "altair") -> PlotConfig: - layer = LayerConfig( - geometry=GeometryConfig(type="line" if backend == "altair" else "point"), - aesthetics=AestheticsConfig( - x=ChannelAestheticsConfig(field="x", type="quantitative"), - y=ChannelAestheticsConfig(field="y", type="quantitative"), - z=ChannelAestheticsConfig(field="z", type="quantitative") if backend == "plotly" else None, - ), - ) - return PlotConfig( - backend=backend, - data=DataConfig(source="main"), - layers=[layer], - size=PlotSizeConfig(), - guides=PlotLevelGuideConfig(), - ) - - -def _layer_controls(values: list[str]) -> VisualizationControlsState: - dropdown = VisualizationControlDetail(type="dropdown", field="layer", options=values) - return VisualizationControlsState(dropdown=dropdown) - - -def _slider_controls(values: list[int]) -> VisualizationControlsState: - slider = VisualizationControlDetail(type="slider", field="step", options=values) - return VisualizationControlsState(slider=slider) - - -def test_altair_renderer_adds_dropdown_selection(): - """Altair renderer should add a dropdown param when controls include layer dropdown.""" - df = pd.DataFrame( - { - "x": [0, 1, 0, 1], - "y": [0, 1, 1, 0], - "layer": ["layer_0", "layer_0", "layer_1", "layer_1"], - } - ) - plot_cfg = _base_plot_config(backend="altair") - registry = DictDataRegistry({"main": df}) - controls = _layer_controls(["layer_0", "layer_1"]) - - chart = build_altair_chart(plot_cfg, registry, controls=controls) - spec = chart.to_dict() - - assert "params" in spec - assert spec["params"][0]["name"] == "layer_dropdown" - assert spec["params"][0]["bind"]["options"] == ["layer_0", "layer_1"] - - -def test_altair_renderer_adds_slider_binding(): - """Test that Altair renderer adds slider binding from controls.""" - df = pd.DataFrame( - { - "x": [0, 1, 0, 1], - "y": [0, 1, 1, 0], - "step": [0, 0, 1, 1], - } - ) - plot_cfg = _base_plot_config(backend="altair") - registry = DictDataRegistry({"main": df}) - controls = _slider_controls([0, 1]) - - chart = build_altair_chart(plot_cfg, registry, controls=controls) - spec = chart.to_dict() - - assert any(param["name"].endswith("_slider") for param in spec.get("params", [])) - slider_param = next(param for param in spec["params"] if param["name"].endswith("_slider")) - assert slider_param["bind"]["input"] in {"range", "select"} - - -def test_altair_renderer_skips_slider_when_accumulating(): - """Test that slider binding is skipped when accumulate_steps is enabled.""" - df = pd.DataFrame( - { - "x": [0, 1, 0, 1], - "y": [0, 1, 1, 0], - "step": [0, 0, 1, 1], - } - ) - plot_cfg = _base_plot_config(backend="altair") - registry = DictDataRegistry({"main": df}) - controls = VisualizationControlsState( - slider=VisualizationControlDetail(type="slider", field="step", options=[0, 1]), - accumulate_steps=True, - ) - - chart = build_altair_chart(plot_cfg, registry, controls=controls) - spec = chart.to_dict() - - assert all(not param["name"].endswith("_slider") for param in spec.get("params", [])) - - -def test_altair_renderer_injects_detail_when_accumulating(): - """Test that detail encoding is added when accumulate_steps is enabled.""" - df = pd.DataFrame( - { - "x": [0, 1, 0, 1], - "y": [0, 1, 1, 0], - "step": [0, 0, 1, 1], - } - ) - plot_cfg = _base_plot_config(backend="altair") - registry = DictDataRegistry({"main": df}) - controls = VisualizationControlsState(accumulate_steps=True) - - chart = build_altair_chart(plot_cfg, registry, controls=controls) - spec = chart.to_dict() - - assert "detail" in spec.get("encoding", {}) - detail_encoding = spec["encoding"]["detail"] - if isinstance(detail_encoding, list): - detail_encoding = detail_encoding[0] - assert detail_encoding["field"] == "step" - - -def test_altair_renderer_skips_detail_when_step_axis_used(): - """Test that detail encoding is skipped when step is already used as an axis.""" - df = pd.DataFrame( - { - "step": [0, 1, 2, 3], - "y": [0.1, 0.2, 0.3, 0.4], - } - ) - plot_cfg = _base_plot_config(backend="altair") - assert plot_cfg.layers[0].aesthetics.x is not None - assert plot_cfg.layers[0].aesthetics.y is not None - plot_cfg.layers[0].aesthetics.x.field = "step" - plot_cfg.layers[0].aesthetics.y.field = "y" - registry = DictDataRegistry({"main": df}) - controls = VisualizationControlsState(accumulate_steps=True) - - chart = build_altair_chart(plot_cfg, registry, controls=controls) - spec = chart.to_dict() - - assert "detail" not in spec.get("encoding", {}) - - -def test_plotly_renderer_adds_layer_dropdown_menu(): - """Plotly renderer should add a dropdown menu that toggles layer visibility.""" - df = pd.DataFrame( - { - "layer": ["layer_0"] * 5 + ["layer_1"] * 5, - "x": list(range(10)), - "y": [value * 0.5 for value in range(10)], - "z": [1.0] * 10, - } - ) - plot_cfg = _base_plot_config(backend="plotly") - registry = DictDataRegistry({"main": df}) - controls = _layer_controls(["layer_0", "layer_1"]) - - figure = build_plotly_figure(plot_cfg, registry, controls=controls) - - layout = figure.to_dict()["layout"] - assert layout["updatemenus"] - menu = layout["updatemenus"][0] - assert len(menu["buttons"]) == 2 - assert [button["label"] for button in menu["buttons"]] == ["layer_0", "layer_1"] - # First trace should be visible initially, remaining traces hidden until selected. - figure_dict = figure.to_dict() - traces = figure_dict["data"] - assert traces[0]["visible"] is True - assert all(trace.get("visible") is False for trace in traces[1:]) - - -def test_plotly_renderer_adds_step_slider(): - """Test that Plotly renderer adds a slider for step-based animation.""" - df = pd.DataFrame( - { - "layer": ["layer_0"] * 6 + ["layer_1"] * 6, - "x": list(range(12)), - "y": [value * 0.5 for value in range(12)], - "z": [1.0] * 12, - "step": [0, 0, 1, 1, 2, 2] * 2, - } - ) - plot_cfg = _base_plot_config(backend="plotly") - registry = DictDataRegistry({"main": df}) - controls = VisualizationControlsState( - dropdown=VisualizationControlDetail(type="dropdown", field="layer", options=["layer_0", "layer_1"]), - slider=VisualizationControlDetail(type="slider", field="step", options=[0, 1, 2]), - ) - - figure = build_plotly_figure(plot_cfg, registry, controls=controls) - - layout = figure.to_dict()["layout"] - assert layout["sliders"] - assert len(figure.frames) == 3 - - -def test_plotly_renderer_preserves_literal_colors(): - """Test that Plotly renderer preserves literal color values from data.""" - df = pd.DataFrame( - { - "x": [0, 1], - "y": [0, 1], - "z": [0, 1], - "literal_color": ["#00ff00", "#ff0000"], - } - ) - plot_cfg = _base_plot_config(backend="plotly") - plot_cfg.layers[0].aesthetics.color = ChannelAestheticsConfig(field="literal_color", type="nominal") - registry = DictDataRegistry({"main": df}) - - figure = build_plotly_figure(plot_cfg, registry) - - traces = figure.to_dict()["data"] - assert traces - assert list(traces[0]["marker"]["color"]) == ["#00ff00", "#ff0000"] From 368a1f4355038c4d6eb24b5d1aefb7b34ed1692c Mon Sep 17 00:00:00 2001 From: Casper Lutzhoft Christensen Date: Wed, 4 Mar 2026 10:29:40 -0800 Subject: [PATCH 31/35] re-simplify docstrings --- simplexity/cli/run_parallel.py | 72 ++------------ .../factored_generative_process.py | 95 ++----------------- .../structures/conditional_transitions.py | 52 +--------- .../structures/fully_conditional.py | 20 +--- simplexity/utils/factoring_utils.py | 92 ++---------------- 5 files changed, 28 insertions(+), 303 deletions(-) diff --git a/simplexity/cli/run_parallel.py b/simplexity/cli/run_parallel.py index b0c160f0..2d9d7f36 100644 --- a/simplexity/cli/run_parallel.py +++ b/simplexity/cli/run_parallel.py @@ -94,15 +94,7 @@ @dataclass(frozen=True) class Job: - """Represents a single experiment job to be executed. - - Attributes: - script: Path to the Python script to run. - config_name: Hydra config name. - overrides: Space-separated Hydra overrides. - gpu_id: GPU ID to assign via CUDA_VISIBLE_DEVICES, or None for CPU-only. - job_num: Job number for logging and identification. - """ + """Represents a single experiment job to be executed.""" script: str config_name: str @@ -111,11 +103,7 @@ class Job: job_num: int def to_cmd(self) -> list[str]: - """Render the full command list for this job. - - Returns: - List of command arguments suitable for subprocess execution. - """ + """Render the full command list for this job.""" cmd = [ "uv", "run", @@ -136,19 +124,7 @@ def device_str(self) -> str: def load_sweep_file(path: str) -> list[str]: - """Load sweep parameters from a YAML file. - - The file should contain parameter names as keys and lists of values: - - seed: [1, 2, 3, 4] - model.lr: [0.01, 0.001] - - Args: - path: Path to the sweep YAML file. - - Returns: - List of sweep strings like ['seed=1,2,3,4', 'model.lr=0.01,0.001'] - """ + """Load sweep parameters from a YAML file.""" cfg = OmegaConf.load(path) sweeps = [] for key, values in cfg.items(): @@ -168,14 +144,7 @@ def parse_sweep_param(sweep_str: str) -> tuple[str, list[str]]: def generate_override_combinations(sweeps: list[str]) -> list[str]: - """Generate all combinations of sweep parameters (cartesian product). - - Args: - sweeps: List of sweep strings like ['a=1,2', 'b=x,y'] - - Returns: - List of override strings like ['a=1 b=x', 'a=1 b=y', 'a=2 b=x', 'a=2 b=y'] - """ + """Generate all combinations of sweep parameters (cartesian product).""" if not sweeps: return [""] @@ -198,19 +167,7 @@ def generate_jobs( overrides: list[str], gpus: list[int] | None, ) -> list[Job]: - """Generate a list of jobs from sweep parameters and device configuration. - - Args: - script: Path to the Python script to run. - config_name: Hydra config name. - sweeps: List of sweep strings like ['a=1,2', 'b=x,y']. Should include - any sweeps loaded from sweep files. - overrides: Explicit override strings (alternative to sweeps). - gpus: List of GPU IDs for round-robin assignment, or None for CPU mode. - - Returns: - List of Job objects ready for dispatch. - """ + """Generate a list of jobs from sweep parameters and device configuration.""" if overrides: override_list = overrides elif sweeps: @@ -235,14 +192,7 @@ def generate_jobs( def _run_single_job(job: Job) -> dict: - """Run a single experiment job in a subprocess. - - Args: - job: The Job to execute. - - Returns: - Dict with job results including status, stdout, stderr. - """ + """Run a single experiment job in a subprocess.""" env = os.environ.copy() if job.gpu_id is not None: env["CUDA_VISIBLE_DEVICES"] = str(job.gpu_id) @@ -282,15 +232,7 @@ def _run_single_job(job: Job) -> dict: def dispatch_jobs(jobs: list[Job], max_parallel: int) -> list[dict]: - """Execute jobs in parallel with staggered starts. - - Args: - jobs: List of Job objects to execute. - max_parallel: Maximum number of jobs to run concurrently. - - Returns: - List of result dictionaries, one per job. - """ + """Execute jobs in parallel with staggered starts.""" results = [] with ProcessPoolExecutor(max_workers=max_parallel) as executor: diff --git a/simplexity/generative_processes/factored_generative_process.py b/simplexity/generative_processes/factored_generative_process.py index cbd54dc2..d4c0aab8 100644 --- a/simplexity/generative_processes/factored_generative_process.py +++ b/simplexity/generative_processes/factored_generative_process.py @@ -26,16 +26,7 @@ def _move_arrays_to_device( device: jax.Device, # type: ignore[valid-type] name: str, ) -> tuple[jax.Array, ...]: - """Move arrays to specified device with warning if needed. - - Args: - arrays: Sequence of arrays to move - device: Target device - name: Name for warning messages (e.g., "Transition matrices") - - Returns: - Tuple of arrays on target device - """ + """Move arrays to specified device with warning if needed.""" result = [] for i, arr in enumerate(arrays): if arr.device != device: @@ -52,20 +43,7 @@ def _move_arrays_to_device( class FactoredGenerativeProcess(GenerativeProcess[FactoredState]): - """Unified factored generative process with pluggable conditional structures. - - This class provides a single implementation of factored generative processes - that supports different conditional dependency patterns via the ConditionalStructure protocol. - - Attributes: - component_types: Type of each factor ("hmm" or "ghmm") - transition_matrices: Per-factor transition tensors (shape [K_i, V_i, S_i, S_i]) - normalizing_eigenvectors: Per-factor eigenvectors (shape [K_i, S_i]) - initial_states: Initial state per factor (shape [S_i]) - num_variants: Number of parameter variants per factor - structure: Conditional structure determining factor interactions - encoder: Token encoder for composite observations - """ + """Unified factored generative process with pluggable conditional structures.""" # Static structure component_types: tuple[ComponentType, ...] @@ -96,19 +74,6 @@ def __init__( device: str | None = None, noise_epsilon: float = 0.0, ) -> None: - """Initialize factored generative process. - - Args: - component_types: Type of each factor ("hmm" or "ghmm") - transition_matrices: Per-factor transition tensors. - transition_matrices[i] has shape [K_i, V_i, S_i, S_i] - normalizing_eigenvectors: Per-factor eigenvectors for GHMM. - normalizing_eigenvectors[i] has shape [K_i, S_i] - initial_states: Initial state per factor (shape [S_i]) - structure: Conditional structure defining factor interactions - device: Device to place arrays on (e.g., "cpu", "gpu") - noise_epsilon: Noisy channel epsilon value - """ if len(component_types) == 0: raise ValueError("Must provide at least one component") @@ -170,14 +135,7 @@ def initial_state(self) -> FactoredState: @eqx.filter_jit def observation_probability_distribution(self, state: FactoredState) -> jax.Array: - """Compute P(composite_token | state) under the conditional structure. - - Args: - state: Tuple of state vectors (one per factor) - - Returns: - Distribution over composite tokens, shape [prod(V_i)] - """ + """Compute P(composite_token | state) under the conditional structure.""" context = self._make_context(state) joint_dist = self.structure.compute_joint_distribution(context) @@ -188,44 +146,21 @@ def observation_probability_distribution(self, state: FactoredState) -> jax.Arra @eqx.filter_jit def log_observation_probability_distribution(self, log_belief_state: FactoredState) -> jax.Array: - """Compute log P(composite_token | state). - - Args: - log_belief_state: Tuple of log-state vectors - - Returns: - Log-distribution over composite tokens, shape [prod(V_i)] - """ + """Compute log P(composite_token | state).""" state = tuple(jnp.exp(s) for s in log_belief_state) probs = self.observation_probability_distribution(state) return jnp.log(probs) @eqx.filter_jit def emit_observation(self, state: FactoredState, key: jax.Array) -> jax.Array: - """Sample composite observation from current state. - - Args: - state: Tuple of state vectors - key: JAX random key - - Returns: - Composite observation (scalar token) - """ + """Sample a composite observation from the current state.""" probs = self.observation_probability_distribution(state) token_flat = jax.random.categorical(key, jnp.log(probs)) return token_flat @eqx.filter_jit def transition_states(self, state: FactoredState, obs: chex.Array) -> FactoredState: - """Update states given composite observation. - - Args: - state: Tuple of current state vectors - obs: Composite observation (scalar token) - - Returns: - Tuple of updated state vectors - """ + """Update states given a composite observation.""" # Decode composite observation to per-factor tokens obs_tuple = self.encoder.token_to_tuple(obs) @@ -245,14 +180,7 @@ def transition_states(self, state: FactoredState, obs: chex.Array) -> FactoredSt @eqx.filter_jit def probability(self, observations: jax.Array) -> jax.Array: - """Compute P(observations) by scanning through sequence. - - Args: - observations: Array of composite observations - - Returns: - Scalar probability - """ + """Compute P(observations) by scanning through the sequence.""" def step(carry: FactoredState, obs: jax.Array): state = carry @@ -266,14 +194,7 @@ def step(carry: FactoredState, obs: jax.Array): @eqx.filter_jit def log_probability(self, observations: jax.Array) -> jax.Array: - """Compute log P(observations) by scanning through sequence. - - Args: - observations: Array of composite observations - - Returns: - Scalar log-probability - """ + """Compute log P(observations) by scanning through the sequence.""" def step(carry: FactoredState, obs: jax.Array): state = carry diff --git a/simplexity/generative_processes/structures/conditional_transitions.py b/simplexity/generative_processes/structures/conditional_transitions.py index deaf81ef..a65f208c 100644 --- a/simplexity/generative_processes/structures/conditional_transitions.py +++ b/simplexity/generative_processes/structures/conditional_transitions.py @@ -22,25 +22,7 @@ class ConditionalTransitions(eqx.Module): - """Conditional transitions structure with flexible emission modes. - - Emissions can be: - - Independent (use_emission_chain=False): P(t) = ∏_i P_i(t_i | s_i, k_emit_i) - - Sequential (use_emission_chain=True): P(t) = P0(t0) * ∏_{i>0} P_i(t_i | t_0..t_{i-1}, s_i) - - Transitions are always mutually conditional: factor i selects transition variant based on - all other factors' tokens. - - Attributes: - control_maps_transition: Transition control maps. control_maps_transition[i] - has shape [prod(V_j for j!=i)] mapping other tokens to transition variant. - emission_variant_indices: Fixed emission variants per factor (shape [F]) - emission_control_maps: Optional sequential emission control maps - use_emission_chain: Whether to use sequential emissions - other_multipliers: Precomputed radix multipliers for other-factor indexing - prefix_multipliers: Precomputed radix multipliers for prefix indexing - vocab_sizes_py: Python int tuple of vocab sizes - """ + """Conditional transitions with flexible emission modes and mutually conditional transitions.""" control_maps_transition: tuple[jax.Array, ...] emission_variant_indices: jax.Array # shape [F] @@ -57,17 +39,6 @@ def __init__( vocab_sizes: jax.Array, emission_control_maps: tuple[jax.Array | None, ...] | None = None, ): - """Initialize conditional transitions structure. - - Args: - control_maps_transition: Transition control maps for each factor. - control_maps_transition[i] should have shape [prod(V_j for j!=i)]. - emission_variant_indices: Fixed emission variant per factor (shape [F]) - vocab_sizes: Vocabulary sizes per factor (shape [F]) - emission_control_maps: Optional sequential emission control maps. - If provided, emission_control_maps[i] should have shape - [prod(V_j for j0. - """ self.control_maps_transition = tuple(jnp.asarray(cm, dtype=jnp.int32) for cm in control_maps_transition) self.emission_variant_indices = jnp.asarray(emission_variant_indices, dtype=jnp.int32) self.vocab_sizes_py = tuple(int(v) for v in vocab_sizes) @@ -118,14 +89,7 @@ def _flatten_prev_tokens_index(self, tokens: jax.Array, i: int) -> jax.Array: return jnp.sum(tokens * mult) def compute_joint_distribution(self, context: ConditionalContext) -> jax.Array: - """Compute joint distribution based on emission mode. - - Args: - context: Conditional context with states and parameters - - Returns: - Flattened joint distribution of shape [prod(V_i)] - """ + """Compute joint distribution based on emission mode.""" num_factors = len(context.vocab_sizes) states = context.states component_types = context.component_types @@ -190,17 +154,7 @@ def select_variants( obs_tuple: tuple[jax.Array, ...], context: ConditionalContext, ) -> tuple[jax.Array, ...]: - """Select transition variants based on other factors' tokens. - - Note: This returns TRANSITION variants, not emission variants. - - Args: - obs_tuple: Tuple of observed tokens (one per factor) - context: Conditional context (unused) - - Returns: - Tuple of transition variant indices (one per factor) - """ + """Select transition variants based on other factors' tokens.""" tokens_arr = jnp.array(obs_tuple) variants = [] for i in range(len(obs_tuple)): diff --git a/simplexity/generative_processes/structures/fully_conditional.py b/simplexity/generative_processes/structures/fully_conditional.py index 38c55eb5..ece4f92e 100644 --- a/simplexity/generative_processes/structures/fully_conditional.py +++ b/simplexity/generative_processes/structures/fully_conditional.py @@ -104,15 +104,7 @@ def __init__( self.perms_py = tuple(perms_py) def _flatten_other_tokens_index(self, tokens: jax.Array, i: int) -> jax.Array: - """Flatten other-factor tokens to control map index. - - Args: - tokens: Array of shape [F] with all tokens - i: Factor index to exclude - - Returns: - Scalar index for control_maps[i] - """ + """Flatten other-factor tokens to control map index.""" mult = self.other_multipliers[i] return flatten_index(tokens, mult) @@ -182,15 +174,7 @@ def select_variants( obs_tuple: tuple[jax.Array, ...], context: ConditionalContext, ) -> tuple[jax.Array, ...]: - """Select variants based on all other factors' tokens. - - Args: - obs_tuple: Tuple of observed tokens (one per factor) - context: Conditional context (unused for fully conditional structure) - - Returns: - Tuple of variant indices (one per factor) - """ + """Select variants based on all other factors' tokens.""" tokens_arr = jnp.array(obs_tuple) variants = [] for i in range(len(obs_tuple)): diff --git a/simplexity/utils/factoring_utils.py b/simplexity/utils/factoring_utils.py index 2a3f8b0d..ecf04af0 100644 --- a/simplexity/utils/factoring_utils.py +++ b/simplexity/utils/factoring_utils.py @@ -22,17 +22,7 @@ def compute_obs_dist_for_variant( transition_matrix: jax.Array, normalizing_eigenvector: jax.Array | None = None, ) -> jax.Array: - """Compute observation distribution for a single factor variant. - - Args: - component_type: "hmm" or "ghmm" - state: State vector of shape [S] - transition_matrix: Transition tensor of shape [V, S, S] - normalizing_eigenvector: For GHMM only, shape [S]. Ignored for HMM. - - Returns: - Distribution over observations, shape [V] - """ + """Compute observation distribution for a single factor variant.""" if component_type == "hmm": # HMM: normalize by sum obs_state = state @ transition_matrix # [V, S] @@ -57,18 +47,7 @@ def transition_with_obs( obs: jax.Array, normalizing_eigenvector: jax.Array | None = None, ) -> jax.Array: - """Update state after observing a token. - - Args: - component_type: "hmm" or "ghmm" - state: Current state vector of shape [S] - transition_matrix: Transition tensor of shape [V, S, S] - obs: Observed token (scalar int) - normalizing_eigenvector: For GHMM only, shape [S]. Ignored for HMM. - - Returns: - New normalized state vector of shape [S] - """ + """Update state after observing a token.""" new_state = state @ transition_matrix[obs] # [S] if component_type == "hmm": @@ -88,17 +67,7 @@ def _radix_multipliers(vs: jax.Array) -> jax.Array: def compute_other_multipliers(vocab_sizes: tuple[int, ...]) -> tuple[jax.Array, ...]: - """Compute radix multipliers for other-factor indexing. - - For each factor i, computes the radix multipliers over all other factors, - with a zero inserted at position i. - - Args: - vocab_sizes: Tuple of vocabulary sizes, one per factor. - - Returns: - Tuple of arrays, one per factor, each of shape [num_factors]. - """ + """Compute radix multipliers for other-factor indexing.""" vs = jnp.array(vocab_sizes) num_factors = len(vocab_sizes) result = [] @@ -113,17 +82,7 @@ def compute_other_multipliers(vocab_sizes: tuple[int, ...]) -> tuple[jax.Array, def compute_prefix_multipliers(vocab_sizes: tuple[int, ...]) -> tuple[jax.Array, ...]: - """Compute radix multipliers for prefix-factor indexing. - - For each factor i, computes the radix multipliers over factors [0, i), - padded with zeros for the remaining positions. - - Args: - vocab_sizes: Tuple of vocabulary sizes, one per factor. - - Returns: - Tuple of arrays, one per factor, each of shape [num_factors]. - """ + """Compute radix multipliers for prefix-factor indexing.""" vs = jnp.array(vocab_sizes) num_factors = len(vocab_sizes) result = [] @@ -137,26 +96,12 @@ def compute_prefix_multipliers(vocab_sizes: tuple[int, ...]) -> tuple[jax.Array, class TokenEncoder(eqx.Module): - """Encodes/decodes composite observations from per-factor tokens. - - Uses radix encoding: given vocab sizes [V_0, V_1, ..., V_{F-1}], - a tuple (t_0, t_1, ..., t_{F-1}) maps to: - composite = t_0 * (V_1 * V_2 * ... * V_{F-1}) + t_1 * (V_2 * ... * V_{F-1}) + ... + t_{F-1} - - Attributes: - vocab_sizes: Array of shape [F] with vocabulary size per factor - radix_multipliers: Array of shape [F] with multipliers for encoding - """ + """Encodes/decodes composite observations from per-factor tokens using radix encoding.""" vocab_sizes: jax.Array # shape [F] radix_multipliers: jax.Array # shape [F] def __init__(self, vocab_sizes: jax.Array): - """Initialize encoder with vocab sizes. - - Args: - vocab_sizes: Array of shape [F] with vocabulary size per factor - """ self.vocab_sizes = jnp.asarray(vocab_sizes) self.radix_multipliers = _radix_multipliers(self.vocab_sizes) @@ -171,14 +116,7 @@ def composite_vocab_size(self) -> int: return int(jnp.prod(self.vocab_sizes)) def tuple_to_token(self, token_tuple: tuple[jax.Array, ...]) -> jax.Array: - """Convert per-factor tokens to composite token. - - Args: - token_tuple: Tuple of f scalar arrays, each in [0, V_i) - - Returns: - Scalar array with composite token in [0, prod(V_i)) - """ + """Convert per-factor tokens to a composite token.""" token = jnp.array(0) multiplier = jnp.array(1) for i in reversed(range(len(token_tuple))): @@ -187,14 +125,7 @@ def tuple_to_token(self, token_tuple: tuple[jax.Array, ...]) -> jax.Array: return token def token_to_tuple(self, token: chex.Array) -> tuple[jax.Array, ...]: - """Convert composite token to per-factor tokens. - - Args: - token: Scalar array with composite token - - Returns: - Tuple of f scalar arrays with per-factor tokens - """ + """Convert a composite token to per-factor tokens.""" result = [] remaining = jnp.array(token) for i in reversed(range(self.num_factors)): @@ -205,13 +136,6 @@ def token_to_tuple(self, token: chex.Array) -> tuple[jax.Array, ...]: return tuple(reversed(result)) def extract_factors_vectorized(self, tokens: jax.Array) -> jax.Array: - """Extract per-factor tokens from batch of composite tokens. - - Args: - tokens: Array of shape [n] with composite tokens - - Returns: - Array of shape [n, f] with per-factor tokens - """ + """Extract per-factor tokens from a batch of composite tokens.""" tokens = jnp.atleast_1d(tokens) return (tokens[:, None] // self.radix_multipliers[None, :]) % self.vocab_sizes[None, :] From d5413a1d695551342351563bc89f3dd48fcbb4de Mon Sep 17 00:00:00 2001 From: Casper Lutzhoft Christensen Date: Wed, 4 Mar 2026 13:37:39 -0800 Subject: [PATCH 32/35] remove dependency --- pyproject.toml | 1 - uv.lock | 140 ------------------------------------------------- 2 files changed, 141 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fc728e1b..3f47c89c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,6 @@ dependencies = [ "transformer-lens>=2.15.4", "treescope", "pydantic>=2.12.0", - "altair>=5.3.0", ] [project.optional-dependencies] diff --git a/uv.lock b/uv.lock index f2932715..a98d77e4 100644 --- a/uv.lock +++ b/uv.lock @@ -164,22 +164,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ba/88/6237e97e3385b57b5f1528647addea5cc03d4d65d5979ab24327d41fb00d/alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6", size = 248554 }, ] -[[package]] -name = "altair" -version = "6.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jinja2" }, - { name = "jsonschema" }, - { name = "narwhals" }, - { name = "packaging" }, - { name = "typing-extensions", marker = "python_full_version < '3.15'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f7/c0/184a89bd5feba14ff3c41cfaf1dd8a82c05f5ceedbc92145e17042eb08a4/altair-6.0.0.tar.gz", hash = "sha256:614bf5ecbe2337347b590afb111929aa9c16c9527c4887d96c9bc7f6640756b4", size = 763834 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/db/33/ef2f2409450ef6daa61459d5de5c08128e7d3edb773fefd0a324d1310238/altair-6.0.0-py3-none-any.whl", hash = "sha256:09ae95b53d5fe5b16987dccc785a7af8588f2dca50de1e7a156efa8a461515f8", size = 795410 }, -] - [[package]] name = "annotated-doc" version = "0.0.4" @@ -1528,33 +1512,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/e8/685f47e0d754320684db4425a0967f7d3fa70126bffd76110b7009a0090f/joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241", size = 308396 }, ] -[[package]] -name = "jsonschema" -version = "4.26.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "jsonschema-specifications" }, - { name = "referencing" }, - { name = "rpds-py" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630 }, -] - -[[package]] -name = "jsonschema-specifications" -version = "2025.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "referencing" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437 }, -] - [[package]] name = "kiwisolver" version = "1.4.9" @@ -3150,20 +3107,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341 }, ] -[[package]] -name = "referencing" -version = "0.37.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "rpds-py" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766 }, -] - [[package]] name = "regex" version = "2025.11.3" @@ -3282,87 +3225,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/80/97b6f357ac458d9ad9872cc3183ca09ef7439ac89e030ea43053ba1294b6/rich_argparse-1.7.2-py3-none-any.whl", hash = "sha256:0559b1f47a19bbeb82bf15f95a057f99bcbbc98385532f57937f9fc57acc501a", size = 25476 }, ] -[[package]] -name = "rpds-py" -version = "0.30.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086 }, - { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053 }, - { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763 }, - { url = "https://files.pythonhosted.org/packages/d4/36/eb2eb8515e2ad24c0bd43c3ee9cd74c33f7ca6430755ccdb240fd3144c44/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd", size = 408951 }, - { url = "https://files.pythonhosted.org/packages/d6/65/ad8dc1784a331fabbd740ef6f71ce2198c7ed0890dab595adb9ea2d775a1/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f", size = 514622 }, - { url = "https://files.pythonhosted.org/packages/63/8e/0cfa7ae158e15e143fe03993b5bcd743a59f541f5952e1546b1ac1b5fd45/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1", size = 414492 }, - { url = "https://files.pythonhosted.org/packages/60/1b/6f8f29f3f995c7ffdde46a626ddccd7c63aefc0efae881dc13b6e5d5bb16/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23", size = 394080 }, - { url = "https://files.pythonhosted.org/packages/6d/d5/a266341051a7a3ca2f4b750a3aa4abc986378431fc2da508c5034d081b70/rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6", size = 408680 }, - { url = "https://files.pythonhosted.org/packages/10/3b/71b725851df9ab7a7a4e33cf36d241933da66040d195a84781f49c50490c/rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51", size = 423589 }, - { url = "https://files.pythonhosted.org/packages/00/2b/e59e58c544dc9bd8bd8384ecdb8ea91f6727f0e37a7131baeff8d6f51661/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5", size = 573289 }, - { url = "https://files.pythonhosted.org/packages/da/3e/a18e6f5b460893172a7d6a680e86d3b6bc87a54c1f0b03446a3c8c7b588f/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e", size = 599737 }, - { url = "https://files.pythonhosted.org/packages/5c/e2/714694e4b87b85a18e2c243614974413c60aa107fd815b8cbc42b873d1d7/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394", size = 563120 }, - { url = "https://files.pythonhosted.org/packages/6f/ab/d5d5e3bcedb0a77f4f613706b750e50a5a3ba1c15ccd3665ecc636c968fd/rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf", size = 223782 }, - { url = "https://files.pythonhosted.org/packages/39/3b/f786af9957306fdc38a74cef405b7b93180f481fb48453a114bb6465744a/rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b", size = 240463 }, - { url = "https://files.pythonhosted.org/packages/f3/d2/b91dc748126c1559042cfe41990deb92c4ee3e2b415f6b5234969ffaf0cc/rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e", size = 230868 }, - { url = "https://files.pythonhosted.org/packages/ed/dc/d61221eb88ff410de3c49143407f6f3147acf2538c86f2ab7ce65ae7d5f9/rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2", size = 374887 }, - { url = "https://files.pythonhosted.org/packages/fd/32/55fb50ae104061dbc564ef15cc43c013dc4a9f4527a1f4d99baddf56fe5f/rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8", size = 358904 }, - { url = "https://files.pythonhosted.org/packages/58/70/faed8186300e3b9bdd138d0273109784eea2396c68458ed580f885dfe7ad/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4", size = 389945 }, - { url = "https://files.pythonhosted.org/packages/bd/a8/073cac3ed2c6387df38f71296d002ab43496a96b92c823e76f46b8af0543/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136", size = 407783 }, - { url = "https://files.pythonhosted.org/packages/77/57/5999eb8c58671f1c11eba084115e77a8899d6e694d2a18f69f0ba471ec8b/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7", size = 515021 }, - { url = "https://files.pythonhosted.org/packages/e0/af/5ab4833eadc36c0a8ed2bc5c0de0493c04f6c06de223170bd0798ff98ced/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2", size = 414589 }, - { url = "https://files.pythonhosted.org/packages/b7/de/f7192e12b21b9e9a68a6d0f249b4af3fdcdff8418be0767a627564afa1f1/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6", size = 394025 }, - { url = "https://files.pythonhosted.org/packages/91/c4/fc70cd0249496493500e7cc2de87504f5aa6509de1e88623431fec76d4b6/rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e", size = 408895 }, - { url = "https://files.pythonhosted.org/packages/58/95/d9275b05ab96556fefff73a385813eb66032e4c99f411d0795372d9abcea/rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d", size = 422799 }, - { url = "https://files.pythonhosted.org/packages/06/c1/3088fc04b6624eb12a57eb814f0d4997a44b0d208d6cace713033ff1a6ba/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7", size = 572731 }, - { url = "https://files.pythonhosted.org/packages/d8/42/c612a833183b39774e8ac8fecae81263a68b9583ee343db33ab571a7ce55/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31", size = 599027 }, - { url = "https://files.pythonhosted.org/packages/5f/60/525a50f45b01d70005403ae0e25f43c0384369ad24ffe46e8d9068b50086/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95", size = 563020 }, - { url = "https://files.pythonhosted.org/packages/0b/5d/47c4655e9bcd5ca907148535c10e7d489044243cc9941c16ed7cd53be91d/rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d", size = 223139 }, - { url = "https://files.pythonhosted.org/packages/f2/e1/485132437d20aa4d3e1d8b3fb5a5e65aa8139f1e097080c2a8443201742c/rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15", size = 240224 }, - { url = "https://files.pythonhosted.org/packages/24/95/ffd128ed1146a153d928617b0ef673960130be0009c77d8fbf0abe306713/rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1", size = 230645 }, - { url = "https://files.pythonhosted.org/packages/ff/1b/b10de890a0def2a319a2626334a7f0ae388215eb60914dbac8a3bae54435/rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a", size = 364443 }, - { url = "https://files.pythonhosted.org/packages/0d/bf/27e39f5971dc4f305a4fb9c672ca06f290f7c4e261c568f3dea16a410d47/rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e", size = 353375 }, - { url = "https://files.pythonhosted.org/packages/40/58/442ada3bba6e8e6615fc00483135c14a7538d2ffac30e2d933ccf6852232/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000", size = 383850 }, - { url = "https://files.pythonhosted.org/packages/14/14/f59b0127409a33c6ef6f5c1ebd5ad8e32d7861c9c7adfa9a624fc3889f6c/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db", size = 392812 }, - { url = "https://files.pythonhosted.org/packages/b3/66/e0be3e162ac299b3a22527e8913767d869e6cc75c46bd844aa43fb81ab62/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2", size = 517841 }, - { url = "https://files.pythonhosted.org/packages/3d/55/fa3b9cf31d0c963ecf1ba777f7cf4b2a2c976795ac430d24a1f43d25a6ba/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa", size = 408149 }, - { url = "https://files.pythonhosted.org/packages/60/ca/780cf3b1a32b18c0f05c441958d3758f02544f1d613abf9488cd78876378/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083", size = 383843 }, - { url = "https://files.pythonhosted.org/packages/82/86/d5f2e04f2aa6247c613da0c1dd87fcd08fa17107e858193566048a1e2f0a/rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9", size = 396507 }, - { url = "https://files.pythonhosted.org/packages/4b/9a/453255d2f769fe44e07ea9785c8347edaf867f7026872e76c1ad9f7bed92/rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0", size = 414949 }, - { url = "https://files.pythonhosted.org/packages/a3/31/622a86cdc0c45d6df0e9ccb6becdba5074735e7033c20e401a6d9d0e2ca0/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94", size = 565790 }, - { url = "https://files.pythonhosted.org/packages/1c/5d/15bbf0fb4a3f58a3b1c67855ec1efcc4ceaef4e86644665fff03e1b66d8d/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08", size = 590217 }, - { url = "https://files.pythonhosted.org/packages/6d/61/21b8c41f68e60c8cc3b2e25644f0e3681926020f11d06ab0b78e3c6bbff1/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27", size = 555806 }, - { url = "https://files.pythonhosted.org/packages/f9/39/7e067bb06c31de48de3eb200f9fc7c58982a4d3db44b07e73963e10d3be9/rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6", size = 211341 }, - { url = "https://files.pythonhosted.org/packages/0a/4d/222ef0b46443cf4cf46764d9c630f3fe4abaa7245be9417e56e9f52b8f65/rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d", size = 225768 }, - { url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099 }, - { url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192 }, - { url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080 }, - { url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841 }, - { url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670 }, - { url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005 }, - { url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112 }, - { url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049 }, - { url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661 }, - { url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606 }, - { url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126 }, - { url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371 }, - { url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298 }, - { url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604 }, - { url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391 }, - { url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868 }, - { url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747 }, - { url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795 }, - { url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330 }, - { url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194 }, - { url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340 }, - { url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765 }, - { url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834 }, - { url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470 }, - { url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630 }, - { url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148 }, - { url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030 }, - { url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570 }, - { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532 }, -] - [[package]] name = "rsa" version = "4.9.1" @@ -3640,7 +3502,6 @@ name = "simplexity" version = "0.1" source = { editable = "." } dependencies = [ - { name = "altair" }, { name = "chex" }, { name = "dotenv" }, { name = "equinox" }, @@ -3693,7 +3554,6 @@ penzai = [ [package.metadata] requires-dist = [ - { name = "altair", specifier = ">=5.3.0" }, { name = "boto3", marker = "extra == 'aws'", specifier = ">=1.37.24" }, { name = "chex" }, { name = "diff-cover", marker = "extra == 'dev'" }, From 80ebd19f76da606e15ce7acd8003d5418a13dc26 Mon Sep 17 00:00:00 2001 From: Eric Alt <13019253+ealt@users.noreply.github.com> Date: Mon, 16 Mar 2026 13:59:01 -0700 Subject: [PATCH 33/35] Refactor vocab map handling in NonErgodicGenerativeProcess and improve error checking. Update _build_prefix_vocab_maps for clarity and efficiency. Add tests for mismatched vocab maps and duplicate entries. --- simplexity/generative_processes/builder.py | 27 ++- .../nonergodic_generative_process.py | 18 +- .../test_nonergodic_generative_process.py | 22 +++ walkthroughs/pr-172.json | 157 ++++++++++++++++++ 4 files changed, 202 insertions(+), 22 deletions(-) create mode 100644 walkthroughs/pr-172.json diff --git a/simplexity/generative_processes/builder.py b/simplexity/generative_processes/builder.py index 0dbb8f3a..49c85c9b 100644 --- a/simplexity/generative_processes/builder.py +++ b/simplexity/generative_processes/builder.py @@ -807,14 +807,10 @@ def _build_prefix_vocab_maps(n_components: int, v: int, n_shared: int, n_unique: C0 gets [0..V-1]. Ci>0 gets shared [0..n_shared-1] + unique tokens above V. """ - vocab_maps: list[list[int]] = [] - for i in range(n_components): - if i == 0: - vocab_maps.append(list(range(v))) - else: - unique_start = v + (i - 1) * n_unique - vocab_maps.append(list(range(n_shared)) + list(range(unique_start, unique_start + n_unique))) - return vocab_maps + return [list(range(v))] + [ + list(range(n_shared)) + list(range(v + i * n_unique, v + (i + 1) * n_unique)) + for i in range(n_components - 1) + ] def _build_sliding_vocab_maps(n_components: int, v: int, n_unique: int) -> list[list[int]]: @@ -826,17 +822,15 @@ def _build_sliding_vocab_maps(n_components: int, v: int, n_unique: int) -> list[ return [list(range(i * offset, i * offset + v)) for i in range(n_components)] -def _build_random_vocab_maps(n_components: int, v: int, n_shared: int, n_unique: int, seed: int) -> list[list[int]]: +def _build_random_vocab_maps(n_components: int, v: int, n_unique: int, seed: int) -> list[list[int]]: """Build vocab maps by having each component randomly sample V tokens from the global pool. - The global vocab size is the same as in prefix mode (V + (n_components - 1) * n_unique), - and each component independently samples V tokens without replacement. + The global vocab size is the same as in prefix mode: + V + (n_components - 1) * n_unique. """ - prefix_maps = _build_prefix_vocab_maps(n_components, v, n_shared, n_unique) - global_vocab_size = max(max(vm) for vm in prefix_maps) + 1 + global_vocab_size = v + (n_components - 1) * n_unique rng = random.Random(seed) - global_tokens = list(range(global_vocab_size)) - return [sorted(rng.sample(global_tokens, v)) for _ in range(n_components)] + return [sorted(rng.sample(range(global_vocab_size), v)) for _ in range(n_components)] def build_nonergodic_partial_overlap( @@ -884,8 +878,7 @@ def build_nonergodic_partial_overlap( elif mode == "sliding": vocab_maps = _build_sliding_vocab_maps(n_components, v, n_unique) elif mode == "random": - assert seed is not None - vocab_maps = _build_random_vocab_maps(n_components, v, n_shared, n_unique, seed) + vocab_maps = _build_random_vocab_maps(n_components, v, n_unique, seed) else: raise ValueError(f"Unknown mode '{mode}'. Must be 'prefix', 'sliding', or 'random'.") diff --git a/simplexity/generative_processes/nonergodic_generative_process.py b/simplexity/generative_processes/nonergodic_generative_process.py index 0be9f1a4..8e1b939b 100644 --- a/simplexity/generative_processes/nonergodic_generative_process.py +++ b/simplexity/generative_processes/nonergodic_generative_process.py @@ -138,7 +138,9 @@ def __init__( device: Device to place arrays on (e.g., "cpu", "gpu"). Raises: - ValueError: If components is empty or weights don't match component count. + ValueError: If components is empty, weights don't match component count, + vocab map count doesn't match component count, or a component + vocab_map contains duplicate global token indices. """ if len(components) == 0: raise ValueError("Must provide at least one component") @@ -158,6 +160,12 @@ def __init__( if vocab_maps is None: vocab_maps = [list(range(c.vocab_size)) for c in components] + elif len(vocab_maps) != len(self.components): + raise ValueError("Length of vocab maps must equal length of components.") + + for i, vm in enumerate(vocab_maps): + if len(set(vm)) != len(vm): + raise ValueError(f"vocab_maps[{i}] must not contain duplicate global token indices") self.vocab_maps = tuple(jax.device_put(jnp.array(vm, dtype=jnp.int32), self.device) for vm in vocab_maps) self._vocab_size = max(max(vm) for vm in vocab_maps) + 1 @@ -197,11 +205,11 @@ def observation_probability_distribution(self, state: NonErgodicState) -> jax.Ar """ global_dist = jnp.zeros(self._vocab_size) - for i, (component, vm) in enumerate(zip(self.components, self.vocab_maps, strict=False)): + for i, (component, vm) in enumerate(zip(self.components, self.vocab_maps, strict=True)): comp_state = state.component_states[i] local_dist = component.observation_probability_distribution(comp_state) component_contrib = jnp.zeros(self._vocab_size).at[vm].add(local_dist) - global_dist = global_dist + state.component_beliefs[i] * component_contrib + global_dist += state.component_beliefs[i] * component_contrib return global_dist @@ -215,7 +223,7 @@ def log_observation_probability_distribution(self, log_belief_state: NonErgodicS """ log_probs = [] - for i, (component, vm) in enumerate(zip(self.components, self.vocab_maps, strict=False)): + for i, (component, vm) in enumerate(zip(self.components, self.vocab_maps, strict=True)): comp_log_state = log_belief_state.component_states[i] comp_log_belief = log_belief_state.component_beliefs[i] @@ -263,7 +271,7 @@ def transition_states(self, state: NonErgodicState, obs: chex.Array) -> NonErgod new_component_states = [] likelihoods = [] - for i, (component, inv_map) in enumerate(zip(self.components, self._inverse_vocab_maps, strict=False)): + for i, (component, inv_map) in enumerate(zip(self.components, self._inverse_vocab_maps, strict=True)): comp_state = state.component_states[i] local_obs = inv_map[obs] diff --git a/tests/generative_processes/test_nonergodic_generative_process.py b/tests/generative_processes/test_nonergodic_generative_process.py index 51e93e47..7b72edfa 100644 --- a/tests/generative_processes/test_nonergodic_generative_process.py +++ b/tests/generative_processes/test_nonergodic_generative_process.py @@ -352,6 +352,28 @@ def test_mismatched_weights_raises(self): component_weights=[1.0], # Only 1 weight for 2 components ) + def test_mismatched_vocab_maps_raises(self): + """Should raise if vocab map count doesn't match component count.""" + coin = build_hidden_markov_model("coin", {"p": 0.5}) + + with pytest.raises(ValueError, match="Length of vocab maps"): + NonErgodicGenerativeProcess( + components=[coin, coin], + component_weights=[0.5, 0.5], + vocab_maps=[[0, 1]], + ) + + def test_duplicate_vocab_map_entries_raise(self): + """Should raise if a component vocab map reuses a global token index.""" + coin = build_hidden_markov_model("coin", {"p": 0.5}) + + with pytest.raises(ValueError, match="must not contain duplicate"): + NonErgodicGenerativeProcess( + components=[coin], + component_weights=[1.0], + vocab_maps=[[0, 0]], + ) + class TestGenerateReturnAllStates: """Tests for generate with return_all_states=True.""" diff --git a/walkthroughs/pr-172.json b/walkthroughs/pr-172.json new file mode 100644 index 00000000..2fe60f54 --- /dev/null +++ b/walkthroughs/pr-172.json @@ -0,0 +1,157 @@ +{ + "title": "PR #172: NonErgodicGenerativeProcess & InflatedVocabularyProcess", + "description": "Walkthrough of two new generative process types: a block-diagonal nonergodic mixture model and a vocabulary inflation wrapper, plus builder functions and comprehensive tests.", + "repository": { + "remote": "https://github.com/Astera-org/simplexity.git", + "commit": "HEAD" + }, + "metadata": { + "pr": 172, + "recommendation": "approve" + }, + "steps": [ + { + "id": 1, + "title": "Overview: Two new generative process abstractions", + "body": "This PR introduces two new `GenerativeProcess` subclasses:\n\n1. **NonErgodicGenerativeProcess** — A block-diagonal mixture model that composes multiple `GenerativeProcess` components with weighted probabilities. No transitions occur between components; beliefs are updated via Bayesian filtering.\n\n2. **InflatedVocabularyProcess** — A wrapper that multiplies vocabulary size by a factor K with uniform noise, increasing optimal per-token loss by exactly `log(K)` nats.\n\nThe PR also adds 9 builder functions for constructing these processes from YAML specs, supporting disjoint and partially-overlapping vocabulary configurations with three mapping strategies (prefix, sliding, random).\n\nKey design choices:\n- Does NOT materialize a full block-diagonal matrix — stores component processes directly for efficiency\n- Uses `IndependentFactoredGenerativeProcess` for independent structures to achieve O(sum V_i) sampling complexity\n- Handles heterogeneous state types (HMM vs Factored) via flatten/pad/unflatten for `jax.lax.switch` compatibility" + }, + { + "id": 2, + "title": "NonErgodicState: the composite state representation", + "body": "The state is a `NamedTuple` with two fields:\n- `component_beliefs`: a probability distribution over components, shape `[num_components]`. During generation this becomes one-hot after the first emission; during inference it's updated via Bayes rule.\n- `component_states`: a tuple of per-component states, where each element can be either a flat `jax.Array` (HMM) or a tuple of arrays (FactoredState).\n\nThis heterogeneous state design is central to the PR — it allows mixing different process types (HMM, GHMM, Factored) in a single mixture.", + "location": "simplexity/generative_processes/nonergodic_generative_process.py:78-90" + }, + { + "id": 3, + "title": "State flattening utilities for JAX compatibility", + "body": "Since `jax.lax.switch` requires all branches to return identically-shaped arrays, these three helper functions flatten heterogeneous component states into uniform 1D arrays:\n\n- `_get_flat_size`: counts total elements\n- `_flatten_state`: concatenates to 1D\n- `_unflatten_state`: reconstructs original structure using a template\n\nNote the use of `jax.lax.dynamic_slice` instead of Python slicing in `_unflatten_state` (line 71) — this avoids `ConcretizationTypeError` inside `jax.lax.switch` since template shapes are known at trace time.", + "location": "simplexity/generative_processes/nonergodic_generative_process.py:20-75" + }, + { + "id": 4, + "title": "Constructor: vocab maps and inverse maps", + "body": "The constructor normalizes component weights, builds forward vocab maps (local-to-global), and computes inverse maps (global-to-local) for efficient observation routing.\n\nKey details:\n- Weights are normalized to sum to 1 (line 156)\n- If no vocab maps provided, each component gets identity mapping `[0..V-1]` (line 160)\n- The unified vocab size is `max(all global tokens) + 1` (line 163)\n- Inverse maps use `-1` sentinel for unmapped tokens (line 167), which `transition_states` checks to determine if an observation belongs to a component", + "location": "simplexity/generative_processes/nonergodic_generative_process.py:123-171", + "comments": [ + { + "id": "mmtj452t4ne", + "author": "Eric Alt", + "body": "```python\nif vocab_maps is None:\n vocab_maps = [list(range(c.vocab_size)) for c in components]\nelif len(vocab_maps) != len(self.components):\n raise ValueError(\"Length of vocab maps must equal length of components.\")\n```" + }, + { + "id": "mmtjm7a5e79", + "author": "Eric Alt", + "body": "```python\n Raises:\n ValueError: If components is empty, weights don't match component count,\n or a component vocab_map contains duplicate global token indices\n\n...\n\n for i, vm in enumerate(vocab_maps):\n if len(set(vm)) != len(vm):\n raise ValueError(f\"vocab_maps[{i}] must not contain duplicate global token indices\")\n```\n\nCorresponding unit tests should also be added" + } + ] + }, + { + "id": 5, + "title": "Observation distribution: weighted mixture over components", + "body": "Computes `P(obs | state) = sum_i P(component_i | state) * P(obs | component_i, state_i)`.\n\nFor each component: gets the local distribution, scatters it into global vocab space via `vocab_maps[i]`, and weights by `component_beliefs[i]`. Tokens not in a component's vocab naturally get probability 0.\n\nThe log-space variant (line 208-228) uses `logsumexp` across stacked per-component log distributions for numerical stability, with `-inf` for unmapped tokens.", + "location": "simplexity/generative_processes/nonergodic_generative_process.py:186-228", + "comments": [ + { + "id": "mmtj5fpu16p", + "author": "Eric Alt", + "body": "`len(self.components)` should always equal `len(self.vocab_maps)` so `strict` should be `True` instead of `False`" + }, + { + "id": "mmtj8i7vy18", + "author": "Eric Alt", + "body": "```python\nglobal_dist += state.component_beliefs[i] * component_contrib\n```" + } + ] + }, + { + "id": 6, + "title": "Bayesian filtering in transition_states", + "body": "This is the core inference logic. For each observation:\n\n1. Map global token to each component's local space via inverse vocab maps (line 268)\n2. Compute likelihood `P(obs | component_i)` — 0 if token not in component's vocab (lines 271-275)\n3. Conditionally update each component's internal state only when likelihood > 0 (lines 278-284)\n4. Apply Bayes rule: `new_beliefs = beliefs * likelihoods / normalizer` (lines 288-294)\n5. Fall back to prior beliefs if all likelihoods are 0 (line 291-293)\n\nThe `jax.lax.cond` on line 278 avoids unnecessary state transitions for components that couldn't have generated the observation.", + "location": "simplexity/generative_processes/nonergodic_generative_process.py:253-299", + "comments": [ + { + "id": "mmtjcmycfty", + "author": "Eric Alt", + "body": "`strict=True`" + } + ] + }, + { + "id": 7, + "title": "Generation: sample one component, generate entire sequence", + "body": "Unlike inference (which tracks beliefs across all components), generation samples a single component at the start and generates entirely from it.\n\nThe implementation is notable for its complexity:\n- Cannot delegate to `component.generate()` because that method is also vmapped (line 363)\n- Uses flatten/pad to a common max size so `jax.lax.switch` can handle heterogeneous state types (lines 389-398)\n- `scan_step` (line 411) runs generation via `lax.switch` selecting the active component\n- Only the active component's state is updated per step (lines 424-431)\n- When `return_all_states=True`, a second inference scan reconstructs belief trajectories (lines 447-453)\n\nThis is the most intricate part of the PR — the flatten/pad/unflatten dance is the price paid for supporting mixed component types in a single JIT-compiled scan.", + "location": "simplexity/generative_processes/nonergodic_generative_process.py:347-459", + "comments": [ + { + "id": "mmtlfwdo7hn", + "author": "Eric Alt", + "body": "TODO: instead of having generative processes's `generate` function vmapped by default we should just have it function for a single sequence and define a separete `generate_batch` function that just wraps the generate function in a `vmap` (or just require the caller of generate to do that themselves) - outside the scope of this PR though" + } + ] + }, + { + "id": 8, + "title": "InflatedVocabularyProcess: controlled difficulty via noise", + "body": "A clean decorator pattern that wraps any `GenerativeProcess[State]` to inflate its vocabulary.\n\nToken encoding: `inflated_token = noise_prefix * V_base + base_token`\n- `emit_observation` samples a base token then adds a uniform random noise prefix (lines 68-71)\n- `transition_states` extracts the base token via modulo and discards noise (line 76)\n- Probability distributions are tiled K times and divided by K (line 83)\n- `probability` applies a `(1/K)^T` penalty for a sequence of length T (line 96)\n\nThis elegantly increases optimal per-token loss by exactly `log(K)` nats while preserving all state dynamics.", + "location": "simplexity/generative_processes/inflated_vocabulary_process.py:22-103" + }, + { + "id": 9, + "title": "Generator updates: slicing NonErgodicState belief trajectories", + "body": "The existing `generate_data_batch_with_full_history` function needed to handle `NonErgodicState` when slicing belief trajectories along the sequence dimension.\n\nA new `_slice_belief_states` helper (line 96) handles three state representations:\n- Plain arrays: slice directly\n- Tuples of arrays: slice each element\n- `NonErgodicState`: slice both `component_beliefs` and each entry in `component_states`, handling nested tuples for factored components (line 111)\n\nThis replaces the previous inline isinstance/tuple handling with a cleaner dispatch.", + "location": "simplexity/generative_processes/generator.py:96-118" + }, + { + "id": 10, + "title": "IndependentFactoredGenerativeProcess: noise_epsilon passthrough", + "body": "A small but important change: `noise_epsilon` is added as a constructor parameter (line 52) and forwarded to the parent `FactoredGenerativeProcess.__init__` (line 83).\n\nThis allows nonergodic processes to compose factored components that use noisy channels — previously the `IndependentFactoredGenerativeProcess` would silently ignore this parameter.", + "location": "simplexity/generative_processes/independent_factored_generative_process.py:43-84" + }, + { + "id": 11, + "title": "Builder: component factory and nonergodic process construction", + "body": "The private `_build_components_from_spec` helper (line 653) is the foundation for all nonergodic builders. It dispatches on `component_type` to build HMM, GHMM, or Factored processes.\n\n`build_nonergodic_process_from_spec` (line 700) is the main entry point, documented with a full YAML example showing how to compose HMM, GHMM, and factored components with explicit vocab maps.\n\nNote how vocab maps can be specified either per-component in the spec or globally as an override (lines 754-761).", + "location": "simplexity/generative_processes/builder.py:653-768" + }, + { + "id": 12, + "title": "Builder: vocabulary mapping strategies", + "body": "Three vocab map strategies for partially overlapping alphabets:\n\n1. **Prefix** (line 805): C0 gets `[0..V-1]`, subsequent components share a prefix of `n_shared` tokens plus unique tokens above V\n2. **Sliding** (line 820): Each component's vocab slides by `max(1, n_unique)` tokens — simple offset strategy\n3. **Random** (line 829): Each component independently samples V tokens from the global pool using a seeded RNG\n\n`build_nonergodic_partial_overlap` (line 842) orchestrates these, computing `n_shared = int(V * overlap_frac)` and `n_unique = V - n_shared` from the `overlap_frac` parameter. All components must have equal vocab size for this to work (validated on line 875-876).\n\n`build_nonergodic_disjoint_vocab` (line 771) is the simpler case: sequential non-overlapping ranges `[0..V0-1], [V0..V0+V1-1], ...`", + "location": "simplexity/generative_processes/builder.py:771-897", + "comments": [ + { + "id": "mmtncbw77fd", + "author": "Eric Alt", + "body": "```python\n def _build_prefix_vocab_maps(n_components: int, v: int, n_shared: int, n_unique: int) -> list[list[int]]:\n \"\"\"Build vocab maps using the prefix strategy.\"\"\"\n return [list(range(v))] + [\n list(range(n_shared)) + list(range(v + i * n_unique, v + (i + 1) * n_unique))\n for i in range(n_components - 1)\n ]\n```" + }, + { + "id": "mmtngqqdqwf", + "author": "Eric Alt", + "body": "`assert seed is not None` is redundant with earlier check" + }, + { + "id": "mmtnjikp3yy", + "author": "Eric Alt", + "body": "```python\n def _build_random_vocab_maps(n_components: int, v: int, n_unique: int, seed: int) -> list[list[int]]:\n \"\"\"Build vocab maps by having each component randomly sample V tokens from the global pool.\n\n The global vocab size is the same as in prefix mode:\n V + (n_components - 1) * n_unique.\n \"\"\"\n global_vocab_size = v + (n_components - 1) * n_unique\n rng = random.Random(seed)\n\n return [\n sorted(rng.sample(range(global_vocab_size), v))\n for _ in range(n_components)\n ]\n```" + } + ] + }, + { + "id": 13, + "title": "Builder: InflatedVocabularyProcess construction", + "body": "Two builder functions for the inflation wrapper:\n\n- `build_inflated_process` (line 900): Simple wrapper taking an existing `GenerativeProcess` and inflation factor\n- `build_inflated_process_from_spec` (line 916): Builds the base process from a spec dict first, then wraps it — supports HMM, GHMM, and factored base types\n\nBoth are thin wrappers that delegate to the `InflatedVocabularyProcess` constructor.", + "location": "simplexity/generative_processes/builder.py:900-959" + }, + { + "id": 14, + "title": "build_factored_process now returns IndependentFactoredGenerativeProcess", + "body": "A structural change in the existing `build_factored_process` function: when `structure_type == \"independent\"`, it now returns an `IndependentFactoredGenerativeProcess` directly (line 200) with early return, rather than falling through to the generic `FactoredGenerativeProcess` constructor.\n\nThis ensures that nonergodic processes composing independent factored components get the specialized subclass with per-factor sampling and frozen factor support, plus the new `noise_epsilon` passthrough.", + "location": "simplexity/generative_processes/builder.py:198-207" + }, + { + "id": 15, + "title": "Summary and recommendations", + "body": "**Strengths:**\n- Clean abstractions: both new classes implement the full `GenerativeProcess` protocol\n- The flatten/pad/unflatten approach for heterogeneous states in `jax.lax.switch` is well-documented and correct\n- Comprehensive builder functions with three vocab mapping strategies cover real research use cases\n- Excellent test coverage (21+ tests for NonErgodic, tests for Inflated, builder tests)\n- The `InflatedVocabularyProcess` is a particularly elegant design — stateless noise with provable loss increase\n\n**Architecture notes:**\n- The `generate` method in `NonErgodicGenerativeProcess` is the most complex piece — the re-implementation of the generate loop (rather than delegating to components) is necessary due to vmap constraints but adds maintenance burden\n- The `_slice_belief_states` helper in `generator.py` adds a third branch for `NonErgodicState` — if more state types emerge, this could benefit from a protocol-based dispatch\n\n**Recommendation: Approve** — Well-structured addition with solid test coverage and clear documentation of the tricky JAX compatibility patterns." + } + ] +} \ No newline at end of file From 2d6a3568962213e4e348dd21245a1d257441a78b Mon Sep 17 00:00:00 2001 From: Eric Alt <13019253+ealt@users.noreply.github.com> Date: Mon, 16 Mar 2026 14:04:42 -0700 Subject: [PATCH 34/35] ruff format --- simplexity/generative_processes/builder.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/simplexity/generative_processes/builder.py b/simplexity/generative_processes/builder.py index 49c85c9b..2a0180b9 100644 --- a/simplexity/generative_processes/builder.py +++ b/simplexity/generative_processes/builder.py @@ -808,8 +808,7 @@ def _build_prefix_vocab_maps(n_components: int, v: int, n_shared: int, n_unique: C0 gets [0..V-1]. Ci>0 gets shared [0..n_shared-1] + unique tokens above V. """ return [list(range(v))] + [ - list(range(n_shared)) + list(range(v + i * n_unique, v + (i + 1) * n_unique)) - for i in range(n_components - 1) + list(range(n_shared)) + list(range(v + i * n_unique, v + (i + 1) * n_unique)) for i in range(n_components - 1) ] From 447d03d879b08e9231f91eb0aebb9f743d5e1f78 Mon Sep 17 00:00:00 2001 From: Eric Alt <13019253+ealt@users.noreply.github.com> Date: Mon, 16 Mar 2026 14:26:25 -0700 Subject: [PATCH 35/35] Add error handling for random mode in build_nonergodic_partial_overlap and introduce new utility functions for state management in NonErgodicGenerativeProcess --- simplexity/generative_processes/builder.py | 2 + .../nonergodic_generative_process.py | 227 +++++++++++------- 2 files changed, 145 insertions(+), 84 deletions(-) diff --git a/simplexity/generative_processes/builder.py b/simplexity/generative_processes/builder.py index 2a0180b9..ddcf1851 100644 --- a/simplexity/generative_processes/builder.py +++ b/simplexity/generative_processes/builder.py @@ -877,6 +877,8 @@ def build_nonergodic_partial_overlap( elif mode == "sliding": vocab_maps = _build_sliding_vocab_maps(n_components, v, n_unique) elif mode == "random": + if seed is None: + raise ValueError("seed is required when mode='random'") vocab_maps = _build_random_vocab_maps(n_components, v, n_unique, seed) else: raise ValueError(f"Unknown mode '{mode}'. Must be 'prefix', 'sliding', or 'random'.") diff --git a/simplexity/generative_processes/nonergodic_generative_process.py b/simplexity/generative_processes/nonergodic_generative_process.py index 8e1b939b..a45eabf7 100644 --- a/simplexity/generative_processes/nonergodic_generative_process.py +++ b/simplexity/generative_processes/nonergodic_generative_process.py @@ -17,6 +17,14 @@ ComponentState = jax.Array | tuple[jax.Array, ...] +class _GenerationLayout(NamedTuple): + """Static metadata needed to flatten and restore heterogeneous states.""" + + flat_sizes: tuple[int, ...] + state_templates: tuple[ComponentState, ...] + max_flat_size: int + + def _get_flat_size(state: ComponentState) -> int: """Get total number of elements in a component state. @@ -45,6 +53,12 @@ def _flatten_state(state: ComponentState) -> jax.Array: return state.ravel() +def _flatten_and_pad_state(state: ComponentState, max_flat_size: int) -> jax.Array: + """Flatten a state and pad it to the shared switch-compatible size.""" + flat = _flatten_state(state) + return jnp.pad(flat, (0, max_flat_size - flat.size)) + + def _unflatten_state(flat: jax.Array, template: ComponentState) -> ComponentState: """Restore original state structure from a flattened 1D array. @@ -75,6 +89,16 @@ def _unflatten_state(flat: jax.Array, template: ComponentState) -> ComponentStat return flat.reshape(template.shape) +def _unpad_and_unflatten_state(padded: jax.Array, original_size: int, template: ComponentState) -> ComponentState: + """Remove padding and restore the component state structure.""" + return _unflatten_state(padded[:original_size], template) + + +def _keep_state(state: ComponentState, _obs: chex.Array) -> ComponentState: + """Return the existing state unchanged.""" + return state + + class NonErgodicState(NamedTuple): """State for nonergodic generative process. @@ -258,6 +282,34 @@ def emit_from_component(i: int, k: chex.PRNGKey) -> chex.Array: return global_obs + def _update_component_for_observation( + self, + component: GenerativeProcess, + inv_map: jax.Array, + comp_state: ComponentState, + obs: chex.Array, + ) -> tuple[ComponentState, jax.Array]: + """Update one component state and return its observation likelihood.""" + local_obs = inv_map[obs] + local_dist = component.observation_probability_distribution(comp_state) + likelihood = jnp.where( + local_obs >= 0, + local_dist[jnp.clip(local_obs, 0, local_dist.shape[0] - 1)], + 0.0, + ) + + def transition_component(state: ComponentState, mapped_obs: chex.Array) -> ComponentState: + return component.transition_states(state, mapped_obs) + + new_comp_state = jax.lax.cond( + likelihood > 0, + transition_component, + _keep_state, + comp_state, + local_obs, + ) + return new_comp_state, likelihood + @eqx.filter_jit def transition_states(self, state: NonErgodicState, obs: chex.Array) -> NonErgodicState: """Update state given observation using Bayesian filtering. @@ -273,23 +325,8 @@ def transition_states(self, state: NonErgodicState, obs: chex.Array) -> NonErgod for i, (component, inv_map) in enumerate(zip(self.components, self._inverse_vocab_maps, strict=True)): comp_state = state.component_states[i] - local_obs = inv_map[obs] - - local_dist = component.observation_probability_distribution(comp_state) - likelihood = jnp.where( - local_obs >= 0, - local_dist[jnp.clip(local_obs, 0, local_dist.shape[0] - 1)], - 0.0, - ) + new_comp_state, likelihood = self._update_component_for_observation(component, inv_map, comp_state, obs) likelihoods.append(likelihood) - - new_comp_state = jax.lax.cond( - likelihood > 0, - lambda s, lo, c=component: c.transition_states(s, lo), - lambda s, lo, c=None: s, - comp_state, - local_obs, - ) new_component_states.append(new_comp_state) likelihoods_arr = jnp.array(likelihoods) @@ -318,9 +355,13 @@ def compute_component_prob(i: int) -> jax.Array: inv_map = self._inverse_vocab_maps[i] local_obs = inv_map[observations] all_valid = jnp.all(local_obs >= 0) + + def compute_prob(lo: jax.Array) -> jax.Array: + return component.probability(lo) + prob = jax.lax.cond( all_valid, - lambda lo: component.probability(lo), + compute_prob, lambda lo: jnp.array(0.0), local_obs, ) @@ -341,9 +382,13 @@ def compute_component_log_prob(i: int) -> jax.Array: inv_map = self._inverse_vocab_maps[i] local_obs = inv_map[observations] all_valid = jnp.all(local_obs >= 0) + + def compute_log_prob(lo: jax.Array) -> jax.Array: + return component.log_probability(lo) + log_prob = jax.lax.cond( all_valid, - lambda lo: component.log_probability(lo), + compute_log_prob, lambda lo: jnp.array(-jnp.inf), local_obs, ) @@ -352,6 +397,72 @@ def compute_component_log_prob(i: int) -> jax.Array: log_probs = jnp.array([compute_component_log_prob(i) for i in range(len(self.components))]) return jax.nn.logsumexp(log_probs) + def _generate_component_step( + self, + i: int, + padded_state: jax.Array, + step_key: chex.PRNGKey, + layout: _GenerationLayout, + ) -> tuple[jax.Array, chex.Array]: + """Advance one selected component by a single generation step.""" + real_state = _unpad_and_unflatten_state(padded_state, layout.flat_sizes[i], layout.state_templates[i]) + local_obs = self.components[i].emit_observation(real_state, step_key) + new_real_state = self.components[i].transition_states(real_state, local_obs) + new_padded_state = _flatten_and_pad_state(new_real_state, layout.max_flat_size) + global_obs = self.vocab_maps[i][local_obs] + return new_padded_state, global_obs + + def _scan_component_generation( + self, + component_idx: jax.Array, + padded_states: tuple[jax.Array, ...], + keys: jax.Array, + layout: _GenerationLayout, + ) -> tuple[tuple[jax.Array, ...], chex.Array]: + """Generate observations while updating only the sampled component state.""" + num_components = len(self.components) + + def scan_step( + carry: tuple[jax.Array, tuple[jax.Array, ...]], step_key: chex.PRNGKey + ) -> tuple[tuple[jax.Array, tuple[jax.Array, ...]], chex.Array]: + idx, padded_comp_states = carry + + new_padded_state, global_obs = jax.lax.switch( + idx, + [ + partial( + self._generate_component_step, + i, + padded_comp_states[i], + step_key, + layout, + ) + for i in range(num_components) + ], + ) + + new_padded_comp_states = tuple( + jax.lax.select(idx == i, new_padded_state, padded_comp_states[i]) for i in range(num_components) + ) + + return (idx, new_padded_comp_states), global_obs + + init_carry = (component_idx, padded_states) + (_, final_padded_states), observations = jax.lax.scan(scan_step, init_carry, keys) + return final_padded_states, observations + + def _generate_state_trajectory( + self, state: NonErgodicState, observations: chex.Array + ) -> tuple[NonErgodicState, chex.Array]: + """Reconstruct the per-token belief trajectory from generated observations.""" + + def inference_step(carry_state: NonErgodicState, obs: chex.Array) -> tuple[NonErgodicState, NonErgodicState]: + new_state = self.transition_states(carry_state, obs) + return new_state, carry_state + + _, state_trajectory = jax.lax.scan(inference_step, state, observations) + return state_trajectory, observations + @eqx.filter_vmap(in_axes=(None, 0, 0, None, None)) def generate( self, @@ -391,77 +502,25 @@ def generate( keys = jax.random.split(key2, sequence_len) component_idx = jax.random.categorical(key1, jnp.log(state.component_beliefs)) - - num_components = len(self.components) - state_templates = state.component_states - flat_sizes = [_get_flat_size(s) for s in state_templates] - max_flat_size = max(flat_sizes) - - def flatten_and_pad(s: ComponentState) -> jax.Array: - flat = _flatten_state(s) - return jnp.pad(flat, (0, max_flat_size - flat.size)) - - def unpad_and_unflatten(padded: jax.Array, original_size: int, template: ComponentState) -> ComponentState: - return _unflatten_state(padded[:original_size], template) - - padded_states = tuple(flatten_and_pad(s) for s in state.component_states) - - def gen_step_for_component( - i: int, padded_state: jax.Array, step_key: chex.PRNGKey - ) -> tuple[jax.Array, chex.Array]: - real_state = unpad_and_unflatten(padded_state, flat_sizes[i], state_templates[i]) - local_obs = self.components[i].emit_observation(real_state, step_key) - new_real_state = self.components[i].transition_states(real_state, local_obs) - new_padded_state = flatten_and_pad(new_real_state) - global_obs = self.vocab_maps[i][local_obs] - return new_padded_state, global_obs - - def scan_step( - carry: tuple[jax.Array, tuple[jax.Array, ...]], step_key: chex.PRNGKey - ) -> tuple[tuple[jax.Array, tuple[jax.Array, ...]], chex.Array]: - idx, padded_comp_states = carry - - def gen_from_i(i: int) -> tuple[jax.Array, chex.Array]: - return gen_step_for_component(i, padded_comp_states[i], step_key) - - new_padded_state, global_obs = jax.lax.switch( - idx, - [partial(gen_from_i, i) for i in range(num_components)], - ) - - new_padded_comp_states = tuple( - jax.lax.cond( - idx == i, - lambda ns=new_padded_state: ns, - lambda ps=padded_comp_states[i]: ps, - ) - for i in range(num_components) - ) - - return (idx, new_padded_comp_states), global_obs - - init_carry = (component_idx, padded_states) - (_, final_padded_states), observations = jax.lax.scan(scan_step, init_carry, keys) + layout = _GenerationLayout( + flat_sizes=tuple(_get_flat_size(s) for s in state.component_states), + state_templates=state.component_states, + max_flat_size=max(_get_flat_size(s) for s in state.component_states), + ) + padded_states = tuple(_flatten_and_pad_state(s, layout.max_flat_size) for s in state.component_states) + final_padded_states, observations = self._scan_component_generation(component_idx, padded_states, keys, layout) final_comp_states = tuple( - unpad_and_unflatten(final_padded_states[i], flat_sizes[i], state_templates[i]) - for i in range(num_components) + _unpad_and_unflatten_state(final_padded_states[i], layout.flat_sizes[i], layout.state_templates[i]) + for i in range(len(self.components)) ) one_hot_beliefs = jax.nn.one_hot(component_idx, len(self.components), dtype=self.component_weights.dtype) if return_all_states: + return self._generate_state_trajectory(state, observations) - def inference_step( - carry_state: NonErgodicState, obs: chex.Array - ) -> tuple[NonErgodicState, NonErgodicState]: - new_state = self.transition_states(carry_state, obs) - return new_state, carry_state - - _, state_trajectory = jax.lax.scan(inference_step, state, observations) - return state_trajectory, observations - else: - return NonErgodicState( - component_beliefs=one_hot_beliefs, - component_states=final_comp_states, - ), observations + return NonErgodicState( + component_beliefs=one_hot_beliefs, + component_states=final_comp_states, + ), observations