From e0cb092980b6785902d808df72645e8bf059b7a3 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Fri, 27 Feb 2026 16:31:06 +0300 Subject: [PATCH 01/17] feat: add machine charm config --- .github/workflows/deploy.yaml | 28 +- {charm => k8s-charm}/.gitignore | 0 {charm => k8s-charm}/charmcraft.yaml | 0 .../data_platform_libs/v0/data_interfaces.py | 0 .../lib/charms/data_platform_libs/v0/s3.py | 0 .../grafana_k8s/v0/grafana_dashboard.py | 0 k8s-charm/lib/charms/hydra/v0/oauth.py | 808 +++ .../lib/charms/loki_k8s/v1/loki_push_api.py | 0 .../observability_libs/v0/juju_topology.py | 0 .../lib/charms/openfga_k8s/v1/openfga.py | 0 .../prometheus_k8s/v0/prometheus_scrape.py | 0 .../lib/charms/redis_k8s/v0/redis.py | 0 .../lib/charms/saml_integrator/v0/saml.py | 0 .../lib/charms/smtp_integrator/v0/smtp.py | 0 .../tempo_coordinator_k8s/v0/tracing.py | 0 .../lib/charms/traefik_k8s/v2/ingress.py | 0 {charm => k8s-charm}/pyproject.toml | 0 {charm => k8s-charm}/requirements.txt | 0 {charm => k8s-charm}/src/charm.py | 0 {charm => k8s-charm}/tox.ini | 0 machine-charm/.charmignore | 31 + machine-charm/.gitignore | 14 + machine-charm/CONTRIBUTING.md | 35 + machine-charm/LICENSE | 202 + machine-charm/README.md | 26 + machine-charm/charmcraft.yaml | 64 + .../data_platform_libs/v0/data_interfaces.py | 5782 +++++++++++++++++ machine-charm/pyproject.toml | 80 + machine-charm/src/charm.py | 99 + machine-charm/src/workload.py | 155 + machine-charm/tests/integration/conftest.py | 47 + machine-charm/tests/integration/test_charm.py | 29 + machine-charm/tests/unit/test_charm.py | 26 + machine-charm/tox.ini | 80 + machine-charm/uv.lock | 617 ++ 35 files changed, 8120 insertions(+), 3 deletions(-) rename {charm => k8s-charm}/.gitignore (100%) rename {charm => k8s-charm}/charmcraft.yaml (100%) rename {charm => k8s-charm}/lib/charms/data_platform_libs/v0/data_interfaces.py (100%) rename {charm => k8s-charm}/lib/charms/data_platform_libs/v0/s3.py (100%) rename {charm => k8s-charm}/lib/charms/grafana_k8s/v0/grafana_dashboard.py (100%) create mode 100644 k8s-charm/lib/charms/hydra/v0/oauth.py rename {charm => k8s-charm}/lib/charms/loki_k8s/v1/loki_push_api.py (100%) rename {charm => k8s-charm}/lib/charms/observability_libs/v0/juju_topology.py (100%) rename {charm => k8s-charm}/lib/charms/openfga_k8s/v1/openfga.py (100%) rename {charm => k8s-charm}/lib/charms/prometheus_k8s/v0/prometheus_scrape.py (100%) rename {charm => k8s-charm}/lib/charms/redis_k8s/v0/redis.py (100%) rename {charm => k8s-charm}/lib/charms/saml_integrator/v0/saml.py (100%) rename {charm => k8s-charm}/lib/charms/smtp_integrator/v0/smtp.py (100%) rename {charm => k8s-charm}/lib/charms/tempo_coordinator_k8s/v0/tracing.py (100%) rename {charm => k8s-charm}/lib/charms/traefik_k8s/v2/ingress.py (100%) rename {charm => k8s-charm}/pyproject.toml (100%) rename {charm => k8s-charm}/requirements.txt (100%) rename {charm => k8s-charm}/src/charm.py (100%) rename {charm => k8s-charm}/tox.ini (100%) create mode 100644 machine-charm/.charmignore create mode 100644 machine-charm/.gitignore create mode 100644 machine-charm/CONTRIBUTING.md create mode 100644 machine-charm/LICENSE create mode 100644 machine-charm/README.md create mode 100644 machine-charm/charmcraft.yaml create mode 100644 machine-charm/lib/charms/data_platform_libs/v0/data_interfaces.py create mode 100644 machine-charm/pyproject.toml create mode 100755 machine-charm/src/charm.py create mode 100644 machine-charm/src/workload.py create mode 100644 machine-charm/tests/integration/conftest.py create mode 100644 machine-charm/tests/integration/test_charm.py create mode 100644 machine-charm/tests/unit/test_charm.py create mode 100644 machine-charm/tox.ini create mode 100644 machine-charm/uv.lock diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml index 4a4064b1..7f3c0b1b 100644 --- a/.github/workflows/deploy.yaml +++ b/.github/workflows/deploy.yaml @@ -11,7 +11,7 @@ env: ROCKCRAFT_ENABLE_EXPERIMENTAL_EXTENSIONS: true jobs: - pack-charm: + pack-k8s-charm: runs-on: ubuntu-latest steps: - name: Checkout Code @@ -25,15 +25,37 @@ jobs: - name: Pack charm run: | - cd charm + cd k8s-charm charmcraft pack -v --project-dir ./ - name: Upload charm uses: actions/upload-artifact@v4 with: name: ubuntu-security-api-charm - path: ./charm/*.charm + path: ./k8s-charm/*.charm + pack-machine-charm: + runs-on: ubuntu-latest + steps: + - name: Checkout Code + uses: actions/checkout@v3 + + - name: Setup LXD + uses: canonical/setup-lxd@main + + - name: Setup Charmcraft + run: sudo snap install charmcraft --classic --channel=latest/edge + + - name: Pack charm + run: | + cd machine-charm + charmcraft pack -v --project-dir ./ + + - name: Upload charm + uses: actions/upload-artifact@v4 + with: + name: ubuntu-security-api-charm + path: ./machine-charm/*.charm pack-rock: runs-on: ubuntu-latest steps: diff --git a/charm/.gitignore b/k8s-charm/.gitignore similarity index 100% rename from charm/.gitignore rename to k8s-charm/.gitignore diff --git a/charm/charmcraft.yaml b/k8s-charm/charmcraft.yaml similarity index 100% rename from charm/charmcraft.yaml rename to k8s-charm/charmcraft.yaml diff --git a/charm/lib/charms/data_platform_libs/v0/data_interfaces.py b/k8s-charm/lib/charms/data_platform_libs/v0/data_interfaces.py similarity index 100% rename from charm/lib/charms/data_platform_libs/v0/data_interfaces.py rename to k8s-charm/lib/charms/data_platform_libs/v0/data_interfaces.py diff --git a/charm/lib/charms/data_platform_libs/v0/s3.py b/k8s-charm/lib/charms/data_platform_libs/v0/s3.py similarity index 100% rename from charm/lib/charms/data_platform_libs/v0/s3.py rename to k8s-charm/lib/charms/data_platform_libs/v0/s3.py diff --git a/charm/lib/charms/grafana_k8s/v0/grafana_dashboard.py b/k8s-charm/lib/charms/grafana_k8s/v0/grafana_dashboard.py similarity index 100% rename from charm/lib/charms/grafana_k8s/v0/grafana_dashboard.py rename to k8s-charm/lib/charms/grafana_k8s/v0/grafana_dashboard.py diff --git a/k8s-charm/lib/charms/hydra/v0/oauth.py b/k8s-charm/lib/charms/hydra/v0/oauth.py new file mode 100644 index 00000000..c0b35a3a --- /dev/null +++ b/k8s-charm/lib/charms/hydra/v0/oauth.py @@ -0,0 +1,808 @@ +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +"""# Oauth Library. + +This library is designed to enable applications to register OAuth2/OIDC +clients with an OIDC Provider through the `oauth` interface. + +## Getting started + +To get started using this library you just need to fetch the library using `charmcraft`. **Note +that you also need to add `jsonschema` to your charm's `requirements.txt`.** + +```shell +cd some-charm +charmcraft fetch-lib charms.hydra.v0.oauth +EOF +``` + +Then, to initialize the library: +```python +# ... +from charms.hydra.v0.oauth import ClientConfig, OAuthRequirer + +OAUTH = "oauth" +OAUTH_SCOPES = "openid email" +OAUTH_GRANT_TYPES = ["authorization_code"] + +class SomeCharm(CharmBase): + def __init__(self, *args): + # ... + self.oauth = OAuthRequirer(self, client_config, relation_name=OAUTH) + + self.framework.observe(self.oauth.on.oauth_info_changed, self._configure_application) + # ... + + def _on_ingress_ready(self, event): + self.external_url = "https://example.com" + self._set_client_config() + + def _set_client_config(self): + client_config = ClientConfig( + urljoin(self.external_url, "/oauth/callback"), + OAUTH_SCOPES, + OAUTH_GRANT_TYPES, + ) + self.oauth.update_client_config(client_config) +``` +""" + +import json +import logging +import re +from dataclasses import asdict, dataclass, field, fields +from typing import Dict, List, Mapping, Optional + +import jsonschema +from ops.charm import CharmBase, RelationBrokenEvent, RelationChangedEvent, RelationCreatedEvent +from ops.framework import EventBase, EventSource, Handle, Object, ObjectEvents +from ops.model import Relation, Secret, SecretNotFoundError, TooManyRelatedAppsError + +# The unique Charmhub library identifier, never change it +LIBID = "a3a301e325e34aac80a2d633ef61fe97" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 11 + +PYDEPS = ["jsonschema"] + + +logger = logging.getLogger(__name__) + +DEFAULT_RELATION_NAME = "oauth" +ALLOWED_GRANT_TYPES = [ + "authorization_code", + "refresh_token", + "client_credentials", + "urn:ietf:params:oauth:grant-type:device_code", +] +ALLOWED_CLIENT_AUTHN_METHODS = ["client_secret_basic", "client_secret_post"] +CLIENT_SECRET_FIELD = "secret" + +url_regex = re.compile( + r"(^http://)|(^https://)" # http:// or https:// + r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|" + r"[A-Z0-9-]{2,}\.?)|" # domain... + r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip + r"(?::\d+)?" # optional port + r"(?:/?|[/?]\S+)$", + re.IGNORECASE, +) + +OAUTH_PROVIDER_JSON_SCHEMA = { + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://canonical.github.io/charm-relation-interfaces/interfaces/oauth/schemas/provider.json", + "type": "object", + "properties": { + "issuer_url": { + "type": "string", + }, + "authorization_endpoint": { + "type": "string", + }, + "token_endpoint": { + "type": "string", + }, + "introspection_endpoint": { + "type": "string", + }, + "userinfo_endpoint": { + "type": "string", + }, + "jwks_endpoint": { + "type": "string", + }, + "scope": { + "type": "string", + }, + "client_id": { + "type": "string", + }, + "client_secret_id": { + "type": "string", + }, + "groups": {"type": "string", "default": None}, + "ca_chain": {"type": "array", "items": {"type": "string"}, "default": []}, + "jwt_access_token": {"type": "string", "default": "False"}, + }, + "required": [ + "issuer_url", + "authorization_endpoint", + "token_endpoint", + "introspection_endpoint", + "userinfo_endpoint", + "jwks_endpoint", + "scope", + ], +} +OAUTH_REQUIRER_JSON_SCHEMA = { + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://canonical.github.io/charm-relation-interfaces/interfaces/oauth/schemas/requirer.json", + "type": "object", + "properties": { + "redirect_uri": { + "type": "string", + "default": None, + }, + "audience": {"type": "array", "default": [], "items": {"type": "string"}}, + "scope": {"type": "string", "default": None}, + "grant_types": { + "type": "array", + "default": None, + "items": { + "enum": ALLOWED_GRANT_TYPES, + "type": "string", + }, + }, + "token_endpoint_auth_method": { + "type": "string", + "enum": ALLOWED_CLIENT_AUTHN_METHODS, + "default": "client_secret_basic", + }, + }, + "required": ["redirect_uri", "audience", "scope", "grant_types", "token_endpoint_auth_method"], +} + + +class ClientConfigError(Exception): + """Emitted when invalid client config is provided.""" + + +class DataValidationError(RuntimeError): + """Raised when data validation fails on relation data.""" + + +def _load_data(data: Mapping, schema: Optional[Dict] = None) -> Dict: + """Parses nested fields and checks whether `data` matches `schema`.""" + ret = {} + for k, v in data.items(): + try: + ret[k] = json.loads(v) + except json.JSONDecodeError: + ret[k] = v + + if schema: + _validate_data(ret, schema) + return ret + + +def _dump_data(data: Dict, schema: Optional[Dict] = None) -> Dict: + if schema: + _validate_data(data, schema) + + ret = {} + for k, v in data.items(): + if isinstance(v, (list, dict)): + try: + ret[k] = json.dumps(v) + except json.JSONDecodeError as e: + raise DataValidationError(f"Failed to encode relation json: {e}") + elif isinstance(v, bool): + ret[k] = str(v) + else: + ret[k] = v + return ret + + +def strtobool(val: str) -> bool: + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + if not isinstance(val, str): + raise ValueError(f"invalid value type {type(val)}") + + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return True + elif val in ("n", "no", "f", "false", "off", "0"): + return False + else: + raise ValueError(f"invalid truth value {val}") + + +class OAuthRelation(Object): + """A class containing helper methods for oauth relation.""" + + def _pop_relation_data(self, relation_id: Relation) -> None: + if not self.model.unit.is_leader(): + return + + if len(self.model.relations) == 0: + return + + relation = self.model.get_relation(self._relation_name, relation_id=relation_id) + if not relation or not relation.app: + return + + try: + for data in list(relation.data[self.model.app]): + relation.data[self.model.app].pop(data, "") + except Exception as e: + logger.info(f"Failed to pop the relation data: {e}") + + +def _validate_data(data: Dict, schema: Dict) -> None: + """Checks whether `data` matches `schema`. + + Will raise DataValidationError if the data is not valid, else return None. + """ + try: + jsonschema.validate(instance=data, schema=schema) + except jsonschema.ValidationError as e: + raise DataValidationError(data, schema) from e + + +@dataclass +class ClientConfig: + """Helper class containing a client's configuration.""" + + redirect_uri: str + scope: str + grant_types: List[str] + audience: List[str] = field(default_factory=lambda: []) + token_endpoint_auth_method: str = "client_secret_basic" + client_id: Optional[str] = None + + def validate(self) -> None: + """Validate the client configuration.""" + # Validate redirect_uri + if not re.match(url_regex, self.redirect_uri): + raise ClientConfigError(f"Invalid URL {self.redirect_uri}") + + if self.redirect_uri.startswith("http://"): + logger.warning("Provided Redirect URL uses http scheme. Don't do this in production") + + # Validate grant_types + for grant_type in self.grant_types: + if grant_type not in ALLOWED_GRANT_TYPES: + raise ClientConfigError( + f"Invalid grant_type {grant_type}, must be one " f"of {ALLOWED_GRANT_TYPES}" + ) + + # Validate client authentication methods + if self.token_endpoint_auth_method not in ALLOWED_CLIENT_AUTHN_METHODS: + raise ClientConfigError( + f"Invalid client auth method {self.token_endpoint_auth_method}, " + f"must be one of {ALLOWED_CLIENT_AUTHN_METHODS}" + ) + + def to_dict(self) -> Dict: + """Convert object to dict.""" + return {k: v for k, v in asdict(self).items() if v is not None} + + +@dataclass +class OauthProviderConfig: + """Helper class containing provider's configuration.""" + + issuer_url: str + authorization_endpoint: str + token_endpoint: str + introspection_endpoint: str + userinfo_endpoint: str + jwks_endpoint: str + scope: str + client_id: Optional[str] = None + client_secret: Optional[str] = None + groups: Optional[str] = None + ca_chain: Optional[str] = None + jwt_access_token: Optional[bool] = False + + @classmethod + def from_dict(cls, dic: Dict) -> "OauthProviderConfig": + """Generate OauthProviderConfig instance from dict.""" + jwt_access_token = False + if "jwt_access_token" in dic: + jwt_access_token = strtobool(dic["jwt_access_token"]) + return cls( + jwt_access_token=jwt_access_token, + **{ + k: v + for k, v in dic.items() + if k in [f.name for f in fields(cls)] and k != "jwt_access_token" + }, + ) + + +class OAuthInfoChangedEvent(EventBase): + """Event to notify the charm that the information in the databag changed.""" + + def __init__(self, handle: Handle, client_id: str, client_secret_id: str): + super().__init__(handle) + self.client_id = client_id + self.client_secret_id = client_secret_id + + def snapshot(self) -> Dict: + """Save event.""" + return { + "client_id": self.client_id, + "client_secret_id": self.client_secret_id, + } + + def restore(self, snapshot: Dict) -> None: + """Restore event.""" + super().restore(snapshot) + self.client_id = snapshot["client_id"] + self.client_secret_id = snapshot["client_secret_id"] + + +class InvalidClientConfigEvent(EventBase): + """Event to notify the charm that the client configuration is invalid.""" + + def __init__(self, handle: Handle, error: str): + super().__init__(handle) + self.error = error + + def snapshot(self) -> Dict: + """Save event.""" + return { + "error": self.error, + } + + def restore(self, snapshot: Dict) -> None: + """Restore event.""" + self.error = snapshot["error"] + + +class OAuthInfoRemovedEvent(EventBase): + """Event to notify the charm that the provider data was removed.""" + + def snapshot(self) -> Dict: + """Save event.""" + return {} + + def restore(self, snapshot: Dict) -> None: + """Restore event.""" + pass + + +class OAuthRequirerEvents(ObjectEvents): + """Event descriptor for events raised by `OAuthRequirerEvents`.""" + + oauth_info_changed = EventSource(OAuthInfoChangedEvent) + oauth_info_removed = EventSource(OAuthInfoRemovedEvent) + invalid_client_config = EventSource(InvalidClientConfigEvent) + + +class OAuthRequirer(OAuthRelation): + """Register an oauth client.""" + + on = OAuthRequirerEvents() + + def __init__( + self, + charm: CharmBase, + client_config: Optional[ClientConfig] = None, + relation_name: str = DEFAULT_RELATION_NAME, + ) -> None: + super().__init__(charm, relation_name) + self._charm = charm + self._relation_name = relation_name + self._client_config = client_config + events = self._charm.on[relation_name] + self.framework.observe(events.relation_created, self._on_relation_created_event) + self.framework.observe(events.relation_changed, self._on_relation_changed_event) + self.framework.observe(events.relation_broken, self._on_relation_broken_event) + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + try: + self._update_relation_data(self._client_config, event.relation.id) + except ClientConfigError as e: + self.on.invalid_client_config.emit(e.args[0]) + + def _on_relation_broken_event(self, event: RelationBrokenEvent) -> None: + # This may be caused by a provider unit being removed. + # Also the oauth data may still be there, perhaps we should remove this event altogether for now. + + # Notify the requirer that the relation data was removed + self.on.oauth_info_removed.emit() + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + data = event.relation.data[event.app] + if not data: + logger.info("No relation data available.") + return + + data = _load_data(data, OAUTH_PROVIDER_JSON_SCHEMA) + + client_id = data.get("client_id") + client_secret_id = data.get("client_secret_id") + if not client_id or not client_secret_id: + logger.info("OAuth Provider info is available, waiting for client to be registered.") + # The client credentials are not ready yet, so we do nothing + # This could mean that the client credentials were removed from the databag, + # but we don't allow that (for now), so we don't have to check for it. + return + + self.on.oauth_info_changed.emit(client_id, client_secret_id) + + def _update_relation_data( + self, client_config: Optional[ClientConfig], relation_id: Optional[int] = None + ) -> None: + if not self.model.unit.is_leader() or not client_config: + return + + if not isinstance(client_config, ClientConfig): + raise ValueError(f"Unexpected client_config type: {type(client_config)}") + + client_config.validate() + + try: + relation = self.model.get_relation( + relation_name=self._relation_name, relation_id=relation_id + ) + except TooManyRelatedAppsError: + raise RuntimeError("More than one relations are defined. Please provide a relation_id") + + if not relation or not relation.app: + return + + data = _dump_data(client_config.to_dict(), OAUTH_REQUIRER_JSON_SCHEMA) + relation.data[self.model.app].update(data) + + def is_client_created(self, relation_id: Optional[int] = None) -> bool: + """Check if the client has been created.""" + if len(self.model.relations) == 0: + return None + try: + relation = self.model.get_relation(self._relation_name, relation_id=relation_id) + except TooManyRelatedAppsError: + raise RuntimeError("More than one relations are defined. Please provide a relation_id") + + if not relation or not relation.app: + return None + + return ( + "client_id" in relation.data[relation.app] + and "client_secret_id" in relation.data[relation.app] + ) + + def get_provider_info( + self, relation_id: Optional[int] = None + ) -> Optional[OauthProviderConfig]: + """Get the provider information from the databag.""" + if len(self.model.relations) == 0: + return None + try: + relation = self.model.get_relation(self._relation_name, relation_id=relation_id) + except TooManyRelatedAppsError: + raise RuntimeError("More than one relations are defined. Please provide a relation_id") + if not relation or not relation.app: + return None + + data = relation.data[relation.app] + if not data: + logger.info("No relation data available.") + return + + data = _load_data(data, OAUTH_PROVIDER_JSON_SCHEMA) + + client_secret_id = data.get("client_secret_id") + if client_secret_id: + _client_secret = self.get_client_secret(client_secret_id) + client_secret = _client_secret.get_content()[CLIENT_SECRET_FIELD] + data["client_secret"] = client_secret + + oauth_provider = OauthProviderConfig.from_dict(data) + return oauth_provider + + def get_client_secret(self, client_secret_id: str) -> Secret: + """Get the client_secret.""" + client_secret = self.model.get_secret(id=client_secret_id) + return client_secret + + def update_client_config( + self, client_config: ClientConfig, relation_id: Optional[int] = None + ) -> None: + """Update the client config stored in the object.""" + self._client_config = client_config + self._update_relation_data(client_config, relation_id=relation_id) + + +class ClientCreatedEvent(EventBase): + """Event to notify the Provider charm to create a new client.""" + + def __init__( + self, + handle: Handle, + redirect_uri: str, + scope: str, + grant_types: List[str], + audience: List, + token_endpoint_auth_method: str, + relation_id: int, + ) -> None: + super().__init__(handle) + self.redirect_uri = redirect_uri + self.scope = scope + self.grant_types = grant_types + self.audience = audience + self.token_endpoint_auth_method = token_endpoint_auth_method + self.relation_id = relation_id + + def snapshot(self) -> Dict: + """Save event.""" + return { + "redirect_uri": self.redirect_uri, + "scope": self.scope, + "grant_types": self.grant_types, + "audience": self.audience, + "token_endpoint_auth_method": self.token_endpoint_auth_method, + "relation_id": self.relation_id, + } + + def restore(self, snapshot: Dict) -> None: + """Restore event.""" + self.redirect_uri = snapshot["redirect_uri"] + self.scope = snapshot["scope"] + self.grant_types = snapshot["grant_types"] + self.audience = snapshot["audience"] + self.token_endpoint_auth_method = snapshot["token_endpoint_auth_method"] + self.relation_id = snapshot["relation_id"] + + def to_client_config(self) -> ClientConfig: + """Convert the event information to a ClientConfig object.""" + return ClientConfig( + self.redirect_uri, + self.scope, + self.grant_types, + self.audience, + self.token_endpoint_auth_method, + ) + + +class ClientChangedEvent(EventBase): + """Event to notify the Provider charm that the client config changed.""" + + def __init__( + self, + handle: Handle, + redirect_uri: str, + scope: str, + grant_types: List, + audience: List, + token_endpoint_auth_method: str, + relation_id: int, + client_id: str, + ) -> None: + super().__init__(handle) + self.redirect_uri = redirect_uri + self.scope = scope + self.grant_types = grant_types + self.audience = audience + self.token_endpoint_auth_method = token_endpoint_auth_method + self.relation_id = relation_id + self.client_id = client_id + + def snapshot(self) -> Dict: + """Save event.""" + return { + "redirect_uri": self.redirect_uri, + "scope": self.scope, + "grant_types": self.grant_types, + "audience": self.audience, + "token_endpoint_auth_method": self.token_endpoint_auth_method, + "relation_id": self.relation_id, + "client_id": self.client_id, + } + + def restore(self, snapshot: Dict) -> None: + """Restore event.""" + self.redirect_uri = snapshot["redirect_uri"] + self.scope = snapshot["scope"] + self.grant_types = snapshot["grant_types"] + self.audience = snapshot["audience"] + self.token_endpoint_auth_method = snapshot["token_endpoint_auth_method"] + self.relation_id = snapshot["relation_id"] + self.client_id = snapshot["client_id"] + + def to_client_config(self) -> ClientConfig: + """Convert the event information to a ClientConfig object.""" + return ClientConfig( + self.redirect_uri, + self.scope, + self.grant_types, + self.audience, + self.token_endpoint_auth_method, + self.client_id, + ) + + +class ClientDeletedEvent(EventBase): + """Event to notify the Provider charm that the client was deleted.""" + + def __init__( + self, + handle: Handle, + relation_id: int, + ) -> None: + super().__init__(handle) + self.relation_id = relation_id + + def snapshot(self) -> Dict: + """Save event.""" + return {"relation_id": self.relation_id} + + def restore(self, snapshot: Dict) -> None: + """Restore event.""" + self.relation_id = snapshot["relation_id"] + + +class OAuthProviderEvents(ObjectEvents): + """Event descriptor for events raised by `OAuthProviderEvents`.""" + + client_created = EventSource(ClientCreatedEvent) + client_changed = EventSource(ClientChangedEvent) + client_deleted = EventSource(ClientDeletedEvent) + + +class OAuthProvider(OAuthRelation): + """A provider object for OIDC Providers.""" + + on = OAuthProviderEvents() + + def __init__(self, charm: CharmBase, relation_name: str = DEFAULT_RELATION_NAME) -> None: + super().__init__(charm, relation_name) + self._charm = charm + self._relation_name = relation_name + + events = self._charm.on[relation_name] + self.framework.observe( + events.relation_changed, + self._get_client_config_from_relation_data, + ) + self.framework.observe( + events.relation_broken, + self._on_relation_broken, + ) + + def _get_client_config_from_relation_data(self, event: RelationChangedEvent) -> None: + if not self.model.unit.is_leader(): + return + + data = event.relation.data[event.app] + if not data: + logger.info("No requirer relation data available.") + return + + client_data = _load_data(data, OAUTH_REQUIRER_JSON_SCHEMA) + redirect_uri = client_data.get("redirect_uri") + scope = client_data.get("scope") + grant_types = client_data.get("grant_types") + audience = client_data.get("audience") + token_endpoint_auth_method = client_data.get("token_endpoint_auth_method") + + data = event.relation.data[self._charm.app] + if not data: + logger.info("No provider relation data available.") + return + provider_data = _load_data(data, OAUTH_PROVIDER_JSON_SCHEMA) + client_id = provider_data.get("client_id") + + relation_id = event.relation.id + + if client_id: + # Modify an existing client + self.on.client_changed.emit( + redirect_uri, + scope, + grant_types, + audience, + token_endpoint_auth_method, + relation_id, + client_id, + ) + else: + # Create a new client + self.on.client_created.emit( + redirect_uri, scope, grant_types, audience, token_endpoint_auth_method, relation_id + ) + + def _get_secret_label(self, relation: Relation) -> str: + return f"client_secret_{relation.id}" + + def _on_relation_broken(self, event: RelationBrokenEvent) -> None: + # There is no way to tell if this event was emitted because the relation was removed or if one of + # the applications was scaled down. Until this is fixed, we don't delete the client. + # Workaround for https://github.com/canonical/operator/issues/888 + # self._pop_relation_data(event.relation.id) + + # self._delete_juju_secret(event.relation) + self.on.client_deleted.emit(event.relation.id) + + def _create_juju_secret(self, client_secret: str, relation: Relation) -> Secret: + """Create a juju secret and grant it to a relation.""" + secret = {CLIENT_SECRET_FIELD: client_secret} + juju_secret = self.model.app.add_secret(secret, label=self._get_secret_label(relation)) + juju_secret.grant(relation) + return juju_secret + + def _delete_juju_secret(self, relation: Relation) -> None: + try: + secret = self.model.get_secret(label=self._get_secret_label(relation)) + except SecretNotFoundError: + return + else: + secret.remove_all_revisions() + + def remove_secret(self, relation: Relation) -> None: + return self._delete_juju_secret(relation) + + def set_provider_info_in_relation_data( + self, + issuer_url: str, + authorization_endpoint: str, + token_endpoint: str, + introspection_endpoint: str, + userinfo_endpoint: str, + jwks_endpoint: str, + scope: str, + groups: Optional[str] = None, + ca_chain: Optional[str] = None, + jwt_access_token: Optional[bool] = False, + ) -> None: + """Put the provider information in the databag.""" + if not self.model.unit.is_leader(): + return + + data = { + "issuer_url": issuer_url, + "authorization_endpoint": authorization_endpoint, + "token_endpoint": token_endpoint, + "introspection_endpoint": introspection_endpoint, + "userinfo_endpoint": userinfo_endpoint, + "jwks_endpoint": jwks_endpoint, + "scope": scope, + "jwt_access_token": jwt_access_token, + } + if groups: + data["groups"] = groups + if ca_chain: + data["ca_chain"] = ca_chain + + for relation in self.model.relations[self._relation_name]: + relation.data[self.model.app].update(_dump_data(data)) + + def set_client_credentials_in_relation_data( + self, relation_id: int, client_id: str, client_secret: str + ) -> None: + """Put the client credentials in the databag.""" + if not self.model.unit.is_leader(): + return + + relation = self.model.get_relation(self._relation_name, relation_id) + if not relation or not relation.app: + return + # TODO: What if we are refreshing the client_secret? We need to add a + # new revision for that + secret = self._create_juju_secret(client_secret, relation) + data = {"client_id": client_id, "client_secret_id": secret.id} + relation.data[self.model.app].update(_dump_data(data)) diff --git a/charm/lib/charms/loki_k8s/v1/loki_push_api.py b/k8s-charm/lib/charms/loki_k8s/v1/loki_push_api.py similarity index 100% rename from charm/lib/charms/loki_k8s/v1/loki_push_api.py rename to k8s-charm/lib/charms/loki_k8s/v1/loki_push_api.py diff --git a/charm/lib/charms/observability_libs/v0/juju_topology.py b/k8s-charm/lib/charms/observability_libs/v0/juju_topology.py similarity index 100% rename from charm/lib/charms/observability_libs/v0/juju_topology.py rename to k8s-charm/lib/charms/observability_libs/v0/juju_topology.py diff --git a/charm/lib/charms/openfga_k8s/v1/openfga.py b/k8s-charm/lib/charms/openfga_k8s/v1/openfga.py similarity index 100% rename from charm/lib/charms/openfga_k8s/v1/openfga.py rename to k8s-charm/lib/charms/openfga_k8s/v1/openfga.py diff --git a/charm/lib/charms/prometheus_k8s/v0/prometheus_scrape.py b/k8s-charm/lib/charms/prometheus_k8s/v0/prometheus_scrape.py similarity index 100% rename from charm/lib/charms/prometheus_k8s/v0/prometheus_scrape.py rename to k8s-charm/lib/charms/prometheus_k8s/v0/prometheus_scrape.py diff --git a/charm/lib/charms/redis_k8s/v0/redis.py b/k8s-charm/lib/charms/redis_k8s/v0/redis.py similarity index 100% rename from charm/lib/charms/redis_k8s/v0/redis.py rename to k8s-charm/lib/charms/redis_k8s/v0/redis.py diff --git a/charm/lib/charms/saml_integrator/v0/saml.py b/k8s-charm/lib/charms/saml_integrator/v0/saml.py similarity index 100% rename from charm/lib/charms/saml_integrator/v0/saml.py rename to k8s-charm/lib/charms/saml_integrator/v0/saml.py diff --git a/charm/lib/charms/smtp_integrator/v0/smtp.py b/k8s-charm/lib/charms/smtp_integrator/v0/smtp.py similarity index 100% rename from charm/lib/charms/smtp_integrator/v0/smtp.py rename to k8s-charm/lib/charms/smtp_integrator/v0/smtp.py diff --git a/charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py b/k8s-charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py similarity index 100% rename from charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py rename to k8s-charm/lib/charms/tempo_coordinator_k8s/v0/tracing.py diff --git a/charm/lib/charms/traefik_k8s/v2/ingress.py b/k8s-charm/lib/charms/traefik_k8s/v2/ingress.py similarity index 100% rename from charm/lib/charms/traefik_k8s/v2/ingress.py rename to k8s-charm/lib/charms/traefik_k8s/v2/ingress.py diff --git a/charm/pyproject.toml b/k8s-charm/pyproject.toml similarity index 100% rename from charm/pyproject.toml rename to k8s-charm/pyproject.toml diff --git a/charm/requirements.txt b/k8s-charm/requirements.txt similarity index 100% rename from charm/requirements.txt rename to k8s-charm/requirements.txt diff --git a/charm/src/charm.py b/k8s-charm/src/charm.py similarity index 100% rename from charm/src/charm.py rename to k8s-charm/src/charm.py diff --git a/charm/tox.ini b/k8s-charm/tox.ini similarity index 100% rename from charm/tox.ini rename to k8s-charm/tox.ini diff --git a/machine-charm/.charmignore b/machine-charm/.charmignore new file mode 100644 index 00000000..661abdf0 --- /dev/null +++ b/machine-charm/.charmignore @@ -0,0 +1,31 @@ +.git +.venv +.env +.tox +.mypy*cache +.ruff_cache +**pycache** +*.charm +\_.egg-info +build/ +parts/ +prime/ +stage/ +node_modules +\*.charm +machine-charm +k8s-charm +konf +node_modules +yarn.lock +**pycache** +docker-entrypoint-initdb.d +.github +.gitignore +docker-compose.yaml +Dockerfile +entrypoint +migrate.sh +package.json +rockcraft.yaml +run diff --git a/machine-charm/.gitignore b/machine-charm/.gitignore new file mode 100644 index 00000000..0d183e72 --- /dev/null +++ b/machine-charm/.gitignore @@ -0,0 +1,14 @@ +venv/ +build/ +*.charm +.tox/ +.coverage +__pycache__/ +*.py[cod] +.idea +.vscode/ +.craft +parts/ +prime/ +overlay/ +stage/ \ No newline at end of file diff --git a/machine-charm/CONTRIBUTING.md b/machine-charm/CONTRIBUTING.md new file mode 100644 index 00000000..71f36dca --- /dev/null +++ b/machine-charm/CONTRIBUTING.md @@ -0,0 +1,35 @@ +# Contributing + +To make contributions to this charm, you'll need a working +[development setup](https://documentation.ubuntu.com/juju/3.6/howto/manage-your-deployment/#set-up-your-deployment-local-testing-and-development). + +You can create an environment for development with `tox`: + +```shell +tox devenv -e integration +source venv/bin/activate +``` + +## Testing + +This project uses `tox` for managing test environments. There are some pre-configured environments +that can be used for linting and formatting code when you're preparing contributions to the charm: + +```shell +tox run -e format # update your code according to linting rules +tox run -e lint # code style +tox run -e static # static type checking +tox run -e unit # unit tests +tox run -e integration # integration tests +tox # runs 'format', 'lint', 'static', and 'unit' environments +``` + +## Build the charm + +Build the charm in this git repository using: + +```shell +charmcraft pack --destructive mode +``` + + + +# machine-charm + +Charmhub package name: machine-charm +More information: https://charmhub.io/machine-charm + +Describe your charm in one or two sentences. + +## Other resources + + + +- [Read more](https://example.com) + +- [Contributing](CONTRIBUTING.md) + +- See the [Juju documentation](https://documentation.ubuntu.com/juju/3.6/howto/manage-charms/) for more information about developing and improving charms. diff --git a/machine-charm/charmcraft.yaml b/machine-charm/charmcraft.yaml new file mode 100644 index 00000000..9db9e716 --- /dev/null +++ b/machine-charm/charmcraft.yaml @@ -0,0 +1,64 @@ +# This file configures Charmcraft. +# See https://documentation.ubuntu.com/charmcraft/stable/reference/files/charmcraft-yaml-file/ +type: charm +name: ubuntu-security-api-vm +title: Ubuntu Security API Machine Charm +summary: An air-gap ready machine charm for the Ubuntu Security API. +description: | + This charm provides an API for accessing Ubuntu security data. + + It is designed to be deployed in an air-gapped environment, and can be used to provide access to Ubuntu security data without requiring an internet connection. + +base: ubuntu@24.04 +platforms: + amd64: + +parts: + local-app: + plugin: dump + source: ../ + source-type: local + override-pull: | + craftctl default + rm -rf .git .venv .env *.charm .mypy_cache .ruff_cache machine-charm k8s-charm konf node_modules yarn.lock __pycache__ docker-entrypoint-initdb.d .github .gitignore docker-compose.yaml Dockerfile entrypoint migrate.sh package.json rockcraft.yaml run + organize: + "*": ./src/flask/app + prime: + - src/flask/app/app.py + - src/flask/app/migrations/* + - src/flask/app/requirements.txt + - src/flask/app/scripts/* + - src/flask/app/templates/* + - src/flask/app/tests/* + - src/flask/app/webapp/* + charm: + plugin: uv + after: [local-app] + source: . + build-snaps: + - astral-uv + +charm-libs: + - lib: postgresql.postgres_client + version: "1" + +requires: + postgresql: + interface: postgresql_client + optional: false + limit: 1 + +config: + options: + database_url: + type: secret + description: "Primary connection string" + replica-one: + type: secret + description: "First read-replica connection string" + replica-two: + type: secret + description: "Second read-replica connection string" + oauth-token-salt: + type: secret + description: "Salt used to encode and decode OAuth tokens" diff --git a/machine-charm/lib/charms/data_platform_libs/v0/data_interfaces.py b/machine-charm/lib/charms/data_platform_libs/v0/data_interfaces.py new file mode 100644 index 00000000..5be1d931 --- /dev/null +++ b/machine-charm/lib/charms/data_platform_libs/v0/data_interfaces.py @@ -0,0 +1,5782 @@ +# Copyright 2023 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r"""Library to manage the relation for the data-platform products. + +This library contains the Requires and Provides classes for handling the relation +between an application and multiple managed application supported by the data-team: +MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. + +### Database (MySQL, Postgresql, MongoDB, and Redis) + +#### Requires Charm +This library is a uniform interface to a selection of common database +metadata, with added custom events that add convenience to database management, +and methods to consume the application related data. + + +Following an example of using the DatabaseCreatedEvent, in the context of the +application charm code: + +```python + +from charms.data_platform_libs.v0.data_interfaces import ( + DatabaseCreatedEvent, + DatabaseRequires, + DatabaseEntityCreatedEvent, +) + +class ApplicationCharm(CharmBase): + # Application charm that connects to database charms. + + def __init__(self, *args): + super().__init__(*args) + + # Charm events defined in the database requires charm library. + self.database = DatabaseRequires(self, relation_name="database", database_name="database") + self.framework.observe(self.database.on.database_created, self._on_database_created) + self.framework.observe(self.database.on.database_entity_created, self._on_database_entity_created) + + def _on_database_created(self, event: DatabaseCreatedEvent) -> None: + # Handle the created database + + # Create configuration file for app + config_file = self._render_app_config_file( + event.username, + event.password, + event.endpoints, + ) + + # Start application with rendered configuration + self._start_application(config_file) + + # Set active status + self.unit.status = ActiveStatus("received database credentials") + + def _on_database_entity_created(self, event: DatabaseEntityCreatedEvent) -> None: + # Handle the created entity + ... +``` + +As shown above, the library provides some custom events to handle specific situations, +which are listed below: + +- database_created: event emitted when the requested database is created. +- database_entity_created: event emitted when the requested entity is created. +- endpoints_changed: event emitted when the read/write endpoints of the database have changed. +- read_only_endpoints_changed: event emitted when the read-only endpoints of the database + have changed. Event is not triggered if read/write endpoints changed too. + +If it is needed to connect multiple database clusters to the same relation endpoint +the application charm can implement the same code as if it would connect to only +one database cluster (like the above code example). + +To differentiate multiple clusters connected to the same relation endpoint +the application charm can use the name of the remote application: + +```python + +def _on_database_created(self, event: DatabaseCreatedEvent) -> None: + # Get the remote app name of the cluster that triggered this event + cluster = event.relation.app.name +``` + +It is also possible to provide an alias for each different database cluster/relation. + +So, it is possible to differentiate the clusters in two ways. +The first is to use the remote application name, i.e., `event.relation.app.name`, as above. + +The second way is to use different event handlers to handle each cluster events. +The implementation would be something like the following code: + +```python + +from charms.data_platform_libs.v0.data_interfaces import ( + DatabaseCreatedEvent, + DatabaseRequires, +) + +class ApplicationCharm(CharmBase): + # Application charm that connects to database charms. + + def __init__(self, *args): + super().__init__(*args) + + # Define the cluster aliases and one handler for each cluster database created event. + self.database = DatabaseRequires( + self, + relation_name="database", + database_name="database", + relations_aliases = ["cluster1", "cluster2"], + ) + self.framework.observe( + self.database.on.cluster1_database_created, self._on_cluster1_database_created + ) + self.framework.observe( + self.database.on.cluster2_database_created, self._on_cluster2_database_created + ) + + def _on_cluster1_database_created(self, event: DatabaseCreatedEvent) -> None: + # Handle the created database on the cluster named cluster1 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.username, + event.password, + event.endpoints, + ) + ... + + def _on_cluster2_database_created(self, event: DatabaseCreatedEvent) -> None: + # Handle the created database on the cluster named cluster2 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.username, + event.password, + event.endpoints, + ) + ... +``` + +When it's needed to check whether a plugin (extension) is enabled on the PostgreSQL +charm, you can use the is_postgresql_plugin_enabled method. To use that, you need to +add the following dependency to your charmcraft.yaml file: + +```yaml + +parts: + charm: + charm-binary-python-packages: + - psycopg[binary] +``` + +### Provider Charm + +Following an example of using the DatabaseRequestedEvent, in the context of the +database charm code: + +```python +from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides + +class SampleCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + # Charm events defined in the database provides charm library. + self.provided_database = DatabaseProvides(self, relation_name="database") + self.framework.observe(self.provided_database.on.database_requested, + self._on_database_requested) + # Database generic helper + self.database = DatabaseHelper() + + def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: + # Handle the event triggered by a new database requested in the relation + # Retrieve the database name using the charm library. + db_name = event.database + # generate a new user credential + username = self.database.generate_user() + password = self.database.generate_password() + # set the credentials for the relation + self.provided_database.set_credentials(event.relation.id, username, password) + # set other variables for the relation event.set_tls("False") +``` + +As shown above, the library provides a custom event (database_requested) to handle +the situation when an application charm requests a new database to be created. +It's preferred to subscribe to this event instead of relation changed event to avoid +creating a new database when other information other than a database name is +exchanged in the relation databag. + +### Kafka + +This library is the interface to use and interact with the Kafka charm. This library contains +custom events that add convenience to manage Kafka, and provides methods to consume the +application related data. + +#### Requirer Charm + +```python + +from charms.data_platform_libs.v0.data_interfaces import ( + BootstrapServerChangedEvent, + KafkaRequires, + TopicCreatedEvent, + TopicEntityCreatedEvent, +) + +class ApplicationCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + self.kafka = KafkaRequires(self, "kafka_client", "test-topic") + self.framework.observe( + self.kafka.on.bootstrap_server_changed, self._on_kafka_bootstrap_server_changed + ) + self.framework.observe( + self.kafka.on.topic_created, self._on_kafka_topic_created + ) + self.framework.observe( + self.kafka.on.topic_entity_created, self._on_kafka_topic_entity_created + ) + + def _on_kafka_bootstrap_server_changed(self, event: BootstrapServerChangedEvent): + # Event triggered when a bootstrap server was changed for this application + + new_bootstrap_server = event.bootstrap_server + ... + + def _on_kafka_topic_created(self, event: TopicCreatedEvent): + # Event triggered when a topic was created for this application + username = event.username + password = event.password + tls = event.tls + tls_ca= event.tls_ca + bootstrap_server event.bootstrap_server + consumer_group_prefic = event.consumer_group_prefix + zookeeper_uris = event.zookeeper_uris + ... + + def _on_kafka_topic_entity_created(self, event: TopicEntityCreatedEvent): + # Event triggered when an entity was created for this application + ... +``` + +As shown above, the library provides some custom events to handle specific situations, +which are listed below: + +- topic_created: event emitted when the requested topic is created. +- bootstrap_server_changed: event emitted when the bootstrap server have changed. +- credential_changed: event emitted when the credentials of Kafka changed. + +### Provider Charm + +Following the previous example, this is an example of the provider charm. + +```python +class SampleCharm(CharmBase): + +from charms.data_platform_libs.v0.data_interfaces import ( + KafkaProvides, + TopicRequestedEvent, +) + + def __init__(self, *args): + super().__init__(*args) + + # Default charm events. + self.framework.observe(self.on.start, self._on_start) + + # Charm events defined in the Kafka Provides charm library. + self.kafka_provider = KafkaProvides(self, relation_name="kafka_client") + self.framework.observe(self.kafka_provider.on.topic_requested, self._on_topic_requested) + self.framework.observe(self.kafka_provider.on.topic_entity_requested, self._on_entity_requested) + # Kafka generic helper + self.kafka = KafkaHelper() + + def _on_topic_requested(self, event: TopicRequestedEvent): + # Handle the on_topic_requested event. + + topic = event.topic + relation_id = event.relation.id + # set connection info in the databag relation + self.kafka_provider.set_bootstrap_server(relation_id, self.kafka.get_bootstrap_server()) + self.kafka_provider.set_credentials(relation_id, username=username, password=password) + self.kafka_provider.set_consumer_group_prefix(relation_id, ...) + self.kafka_provider.set_tls(relation_id, "False") + self.kafka_provider.set_zookeeper_uris(relation_id, ...) + + def _on_entity_requested(self, event: EntityRequestedEvent): + # Handle the on_topic_entity_requested event. + ... +``` +As shown above, the library provides a custom event (topic_requested) to handle +the situation when an application charm requests a new topic to be created. +It is preferred to subscribe to this event instead of relation changed event to avoid +creating a new topic when other information other than a topic name is +exchanged in the relation databag. + +### Karapace + +This library is the interface to use and interact with the Karapace charm. This library contains +custom events that add convenience to manage Karapace, and provides methods to consume the +application related data. + +#### Requirer Charm + +```python + +from charms.data_platform_libs.v0.data_interfaces import ( + EndpointsChangedEvent, + KarapaceRequires, + SubjectAllowedEvent, +) + +class ApplicationCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + self.karapace = KarapaceRequires(self, relation_name="karapace_client", subject="test-subject") + self.framework.observe( + self.karapace.on.server_changed, self._on_karapace_server_changed + ) + self.framework.observe( + self.karapace.on.subject_allowed, self._on_karapace_subject_allowed + ) + self.framework.observe( + self.karapace.on.subject_entity_created, self._on_subject_entity_created + ) + + + def _on_karapace_server_changed(self, event: EndpointsChangedEvent): + # Event triggered when a server endpoint was changed for this application + new_server = event.endpoints + ... + + def _on_karapace_subject_allowed(self, event: SubjectAllowedEvent): + # Event triggered when a subject was allowed for this application + username = event.username + password = event.password + tls = event.tls + endpoints = event.endpoints + ... + + def _on_subject_entity_created(self, event: SubjectEntityCreatedEvent): + # Event triggered when a subject entity was created this application + entity_name = event.entity_name + entity_password = event.entity_password + ... +``` + +As shown above, the library provides some custom events to handle specific situations, +which are listed below: + +- subject_allowed: event emitted when the requested subject is allowed. +- server_changed: event emitted when the server endpoints have changed. + +#### Provider Charm + +Following the previous example, this is an example of the provider charm. + +```python +class SampleCharm(CharmBase): + +from charms.data_platform_libs.v0.data_interfaces import ( + KarapaceProvides, + SubjectRequestedEvent, +) + + def __init__(self, *args): + super().__init__(*args) + + # Default charm events. + self.framework.observe(self.on.start, self._on_start) + + # Charm events defined in the Karapace Provides charm library. + self.karapace_provider = KarapaceProvides(self, relation_name="karapace_client") + self.framework.observe(self.karapace_provider.on.subject_requested, self._on_subject_requested) + # Karapace generic helper + self.karapace = KarapaceHelper() + + def _on_subject_requested(self, event: SubjectRequestedEvent): + # Handle the on_subject_requested event. + + subject = event.subject + relation_id = event.relation.id + # set connection info in the databag relation + self.karapace_provider.set_endpoint(relation_id, self.karapace.get_endpoint()) + self.karapace_provider.set_credentials(relation_id, username=username, password=password) + self.karapace_provider.set_tls(relation_id, "False") +``` + +As shown above, the library provides a custom event (subject_requested) to handle +the situation when an application charm requests a new subject to be created. +It is preferred to subscribe to this event instead of relation changed event to avoid +creating a new subject when other information other than a subject name is +exchanged in the relation databag. +""" + +import copy +import json +import logging +from abc import ABC, abstractmethod +from collections import UserDict, namedtuple +from dataclasses import asdict, dataclass +from datetime import datetime +from enum import Enum +from os import PathLike +from pathlib import Path +from typing import ( + Callable, + Dict, + Final, + ItemsView, + KeysView, + List, + Optional, + Set, + Tuple, + TypedDict, + Union, + ValuesView, + overload, +) + +from ops import JujuVersion, Model, Secret, SecretInfo, SecretNotFoundError +from ops.charm import ( + CharmBase, + CharmEvents, + RelationChangedEvent, + RelationCreatedEvent, + RelationEvent, + SecretChangedEvent, +) +from ops.framework import EventSource, Handle, Object +from ops.model import Application, ModelError, Relation, Unit + +# The unique Charmhub library identifier, never change it +LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 58 + +PYDEPS = ["ops>=2.0.0"] + +# Starting from what LIBPATCH number to apply legacy solutions +# v0.17 was the last version without secrets +LEGACY_SUPPORT_FROM = 17 + +logger = logging.getLogger(__name__) + +Diff = namedtuple("Diff", "added changed deleted") +Diff.__doc__ = """ +A tuple for storing the diff between two data mappings. + +added - keys that were added +changed - keys that still exist but have new values +deleted - key that were deleted""" + +OptionalPathLike = Optional[Union[PathLike, str]] + +ENTITY_USER = "USER" +ENTITY_GROUP = "GROUP" + +PROV_SECRET_PREFIX = "secret-" +PROV_SECRET_FIELDS = "provided-secrets" +REQ_SECRET_FIELDS = "requested-secrets" +STATUS_FIELD = "status" +GROUP_MAPPING_FIELD = "secret_group_mapping" +GROUP_SEPARATOR = "@" + +MODEL_ERRORS = { + "not_leader": "this unit is not the leader", + "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", + "owner_no_refresh": "ERROR secret owner cannot use --refresh", +} + + +############################################################################## +# Exceptions +############################################################################## + + +class DataInterfacesError(Exception): + """Common ancestor for DataInterfaces related exceptions.""" + + +class SecretError(DataInterfacesError): + """Common ancestor for Secrets related exceptions.""" + + +class SecretAlreadyExistsError(SecretError): + """A secret that was to be added already exists.""" + + +class SecretsUnavailableError(SecretError): + """Secrets aren't yet available for Juju version used.""" + + +class SecretsIllegalUpdateError(SecretError): + """Secrets aren't yet available for Juju version used.""" + + +class IllegalOperationError(DataInterfacesError): + """To be used when an operation is not allowed to be performed.""" + + +class PrematureDataAccessError(DataInterfacesError): + """To be raised when the Relation Data may be accessed (written) before protocol init complete.""" + + +############################################################################## +# Global helpers / utilities +############################################################################## + +############################################################################## +# Databag handling and comparison methods +############################################################################## + + +def get_encoded_dict( + relation: Relation, member: Union[Unit, Application], field: str +) -> Optional[Dict[str, str]]: + """Retrieve and decode an encoded field from relation data.""" + data = json.loads(relation.data[member].get(field, "{}")) + if isinstance(data, dict): + return data + logger.error("Unexpected datatype for %s instead of dict.", str(data)) + + +def get_encoded_list( + relation: Relation, member: Union[Unit, Application], field: str +) -> Optional[List[str]]: + """Retrieve and decode an encoded field from relation data.""" + data = json.loads(relation.data[member].get(field, "[]")) + if isinstance(data, list): + return data + logger.error("Unexpected datatype for %s instead of list.", str(data)) + + +def set_encoded_field( + relation: Relation, + member: Union[Unit, Application], + field: str, + value: Union[str, list, Dict[str, str]], +) -> None: + """Set an encoded field from relation data.""" + relation.data[member].update({field: json.dumps(value)}) + + +def diff(event: RelationChangedEvent, bucket: Optional[Union[Unit, Application]]) -> Diff: + """Retrieves the diff of the data in the relation changed databag. + + Args: + event: relation changed event. + bucket: bucket of the databag (app or unit) + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + # Retrieve the old data from the data key in the application relation databag. + if not bucket: + return Diff([], [], []) + + old_data = get_encoded_dict(event.relation, bucket, "data") + + if not old_data: + old_data = {} + + # Retrieve the new data from the event relation databag. + new_data = ( + {key: value for key, value in event.relation.data[event.app].items() if key != "data"} + if event.app + else {} + ) + + # These are the keys that were added to the databag and triggered this event. + added = new_data.keys() - old_data.keys() # pyright: ignore [reportAssignmentType] + # These are the keys that were removed from the databag and triggered this event. + deleted = old_data.keys() - new_data.keys() # pyright: ignore [reportAssignmentType] + # These are the keys that already existed in the databag, + # but had their values changed. + changed = { + key + for key in old_data.keys() & new_data.keys() # pyright: ignore [reportAssignmentType] + if old_data[key] != new_data[key] # pyright: ignore [reportAssignmentType] + } + # Convert the new_data to a serializable format and save it for a next diff check. + set_encoded_field(event.relation, bucket, "data", new_data) + + # Return the diff with all possible changes. + return Diff(added, changed, deleted) + + +############################################################################## +# Module decorators +############################################################################## + + +def leader_only(f): + """Decorator to ensure that only leader can perform given operation.""" + + def wrapper(self, *args, **kwargs): + if self.component == self.local_app and not self.local_unit.is_leader(): + logger.error( + "This operation (%s()) can only be performed by the leader unit", f.__name__ + ) + return + return f(self, *args, **kwargs) + + wrapper.leader_only = True + return wrapper + + +def juju_secrets_only(f): + """Decorator to ensure that certain operations would be only executed on Juju3.""" + + def wrapper(self, *args, **kwargs): + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + return f(self, *args, **kwargs) + + return wrapper + + +def dynamic_secrets_only(f): + """Decorator to ensure that certain operations would be only executed when NO static secrets are defined.""" + + def wrapper(self, *args, **kwargs): + if self.static_secret_fields: + raise IllegalOperationError( + "Unsafe usage of statically and dynamically defined secrets, aborting." + ) + return f(self, *args, **kwargs) + + return wrapper + + +def either_static_or_dynamic_secrets(f): + """Decorator to ensure that static and dynamic secrets won't be used in parallel.""" + + def wrapper(self, *args, **kwargs): + if self.static_secret_fields and set(self.current_secret_fields) - set( + self.static_secret_fields + ): + raise IllegalOperationError( + "Unsafe usage of statically and dynamically defined secrets, aborting." + ) + return f(self, *args, **kwargs) + + return wrapper + + +def legacy_apply_from_version(version: int) -> Callable: + """Decorator to decide whether to apply a legacy function or not. + + Based on LEGACY_SUPPORT_FROM module variable value, the importer charm may only want + to apply legacy solutions starting from a specific LIBPATCH. + + NOTE: All 'legacy' functions have to be defined and called in a way that they return `None`. + This results in cleaner and more secure execution flows in case the function may be disabled. + This requirement implicitly means that legacy functions change the internal state strictly, + don't return information. + """ + + def decorator(f: Callable[..., None]): + """Signature is ensuring None return value.""" + f.legacy_version = version + + def wrapper(self, *args, **kwargs) -> None: + if version >= LEGACY_SUPPORT_FROM: + return f(self, *args, **kwargs) + + return wrapper + + return decorator + + +############################################################################## +# Helper classes +############################################################################## + + +class Scope(Enum): + """Peer relations scope.""" + + APP = "app" + UNIT = "unit" + + +class SecretGroup(str): + """Secret groups specific type.""" + + +@dataclass +class RelationStatus: + """Base data class for status propagation on charm relations.""" + + code: int + message: str + resolution: str + + @property + def is_informational(self) -> bool: + """Is this an informational status?""" + return self.code // 1000 == 1 + + @property + def is_transitory(self) -> bool: + """Is this a transitory status?""" + return self.code // 1000 == 4 + + @property + def is_fatal(self) -> bool: + """Is this a fatal status, requiring removing the relation?""" + return self.code // 1000 == 5 + + +class RelationStatusDict(TypedDict): + """Base type for dict representation of `RelationStatus` dataclass.""" + + code: int + message: str + resolution: str + + +class SecretGroupsAggregate(str): + """Secret groups with option to extend with additional constants.""" + + def __init__(self): + self.USER = SecretGroup("user") + self.TLS = SecretGroup("tls") + self.MTLS = SecretGroup("mtls") + self.ENTITY = SecretGroup("entity") + self.EXTRA = SecretGroup("extra") + + def __setattr__(self, name, value): + """Setting internal constants.""" + if name in self.__dict__: + raise RuntimeError("Can't set constant!") + else: + super().__setattr__(name, SecretGroup(value)) + + def groups(self) -> list: + """Return the list of stored SecretGroups.""" + return list(self.__dict__.values()) + + def get_group(self, group: str) -> Optional[SecretGroup]: + """If the input str translates to a group name, return that.""" + return SecretGroup(group) if group in self.groups() else None + + +SECRET_GROUPS = SecretGroupsAggregate() + + +class CachedSecret: + """Locally cache a secret. + + The data structure is precisely reusing/simulating as in the actual Secret Storage + """ + + KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] + + def __init__( + self, + model: Model, + component: Union[Application, Unit], + label: str, + secret_uri: Optional[str] = None, + legacy_labels: List[str] = [], + ): + self._secret_meta = None + self._secret_content = {} + self._secret_uri = secret_uri + self.label = label + self._model = model + self.component = component + self.legacy_labels = legacy_labels + self.current_label = None + + @property + def meta(self) -> Optional[Secret]: + """Getting cached secret meta-information.""" + if not self._secret_meta: + if not (self._secret_uri or self.label): + return + + try: + self._secret_meta = self._model.get_secret(label=self.label) + except SecretNotFoundError: + # Falling back to seeking for potential legacy labels + self._legacy_compat_find_secret_by_old_label() + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + return self._secret_meta + + ########################################################################## + # Backwards compatibility / Upgrades + ########################################################################## + # These functions are used to keep backwards compatibility on rolling upgrades + # Policy: + # All data is kept intact until the first write operation. (This allows a minimal + # grace period during which rollbacks are fully safe. For more info see the spec.) + # All data involves: + # - databag contents + # - secrets content + # - secret labels (!!!) + # Legacy functions must return None, and leave an equally consistent state whether + # they are executed or skipped (as a high enough versioned execution environment may + # not require so) + + # Compatibility + + @legacy_apply_from_version(34) + def _legacy_compat_find_secret_by_old_label(self) -> None: + """Compatibility function, allowing to find a secret by a legacy label. + + This functionality is typically needed when secret labels changed over an upgrade. + Until the first write operation, we need to maintain data as it was, including keeping + the old secret label. In order to keep track of the old label currently used to access + the secret, and additional 'current_label' field is being defined. + """ + for label in self.legacy_labels: + try: + self._secret_meta = self._model.get_secret(label=label) + except SecretNotFoundError: + pass + except ModelError as e: + # Permission denied can be raised if the secret exists but is not yet granted to us. + if "permission denied" in str(e): + return + raise + else: + if label != self.label: + self.current_label = label + return + + # Migrations + + @legacy_apply_from_version(34) + def _legacy_migration_to_new_label_if_needed(self) -> None: + """Helper function to re-create the secret with a different label. + + Juju does not provide a way to change secret labels. + Thus whenever moving from secrets version that involves secret label changes, + we "re-create" the existing secret, and attach the new label to the new + secret, to be used from then on. + + Note: we replace the old secret with a new one "in place", as we can't + easily switch the containing SecretCache structure to point to a new secret. + Instead we are changing the 'self' (CachedSecret) object to point to the + new instance. + """ + if not self.current_label or not (self.meta and self._secret_meta): + return + + # Create a new secret with the new label + content = self._secret_meta.get_content() + self._secret_uri = None + + # It will be nice to have the possibility to check if we are the owners of the secret... + try: + self._secret_meta = self.add_secret(content, label=self.label) + except ModelError as err: + if MODEL_ERRORS["not_leader"] not in str(err): + raise + if "permission denied" not in str(err): + raise + self.current_label = None + + ########################################################################## + # Public functions + ########################################################################## + + def add_secret( + self, + content: Dict[str, str], + relation: Optional[Relation] = None, + label: Optional[str] = None, + ) -> Secret: + """Create a new secret.""" + if self._secret_uri: + raise SecretAlreadyExistsError( + "Secret is already defined with uri %s", self._secret_uri + ) + + label = self.label if not label else label + + secret = self.component.add_secret(content, label=label) + if relation and relation.app != self._model.app: + # If it's not a peer relation, grant is to be applied + secret.grant(relation) + self._secret_uri = secret.id + self._secret_meta = secret + return self._secret_meta + + def get_content(self) -> Dict[str, str]: + """Getting cached secret content.""" + if not self._secret_content: + if self.meta: + try: + self._secret_content = self.meta.get_content(refresh=True) + except (ValueError, ModelError) as err: + # https://bugs.launchpad.net/juju/+bug/2042596 + # Only triggered when 'refresh' is set + if isinstance(err, ModelError) and not any( + msg in str(err) for msg in self.KNOWN_MODEL_ERRORS + ): + raise + # Due to: ValueError: Secret owner cannot use refresh=True + self._secret_content = self.meta.get_content() + return self._secret_content + + def set_content(self, content: Dict[str, str]) -> None: + """Setting cached secret content.""" + if not self.meta: + return + + # DPE-4182: do not create new revision if the content stay the same + if content == self.get_content(): + return + + if content: + self._legacy_migration_to_new_label_if_needed() + self.meta.set_content(content) + self._secret_content = content + else: + self.meta.remove_all_revisions() + + def get_info(self) -> Optional[SecretInfo]: + """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" + if self.meta: + return self.meta.get_info() + + def remove(self) -> None: + """Remove secret.""" + if not self.meta: + raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") + try: + self.meta.remove_all_revisions() + except SecretNotFoundError: + pass + self._secret_content = {} + self._secret_meta = None + self._secret_uri = None + + +class SecretCache: + """A data structure storing CachedSecret objects.""" + + def __init__(self, model: Model, component: Union[Application, Unit]): + self._model = model + self.component = component + self._secrets: Dict[str, CachedSecret] = {} + + def get( + self, label: str, uri: Optional[str] = None, legacy_labels: List[str] = [] + ) -> Optional[CachedSecret]: + """Getting a secret from Juju Secret store or cache.""" + if not self._secrets.get(label): + secret = CachedSecret( + self._model, self.component, label, uri, legacy_labels=legacy_labels + ) + if secret.meta: + self._secrets[label] = secret + return self._secrets.get(label) + + def add(self, label: str, content: Dict[str, str], relation: Relation) -> CachedSecret: + """Adding a secret to Juju Secret.""" + if self._secrets.get(label): + raise SecretAlreadyExistsError(f"Secret {label} already exists") + + secret = CachedSecret(self._model, self.component, label) + secret.add_secret(content, relation) + self._secrets[label] = secret + return self._secrets[label] + + def remove(self, label: str) -> None: + """Remove a secret from the cache.""" + if secret := self.get(label): + try: + secret.remove() + self._secrets.pop(label) + except (SecretsUnavailableError, KeyError): + pass + else: + return + logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) + + +################################################################################ +# Relation Data base/abstract ancestors (i.e. parent classes) +################################################################################ + + +# Base Data + + +class DataDict(UserDict): + """Python Standard Library 'dict' - like representation of Relation Data.""" + + def __init__(self, relation_data: "Data", relation_id: int): + self.relation_data = relation_data + self.relation_id = relation_id + + @property + def data(self) -> Dict[str, str]: + """Return the full content of the Abstract Relation Data dictionary.""" + result = self.relation_data.fetch_my_relation_data([self.relation_id]) + try: + result_remote = self.relation_data.fetch_relation_data([self.relation_id]) + except NotImplementedError: + result_remote = {self.relation_id: {}} + if result: + result_remote[self.relation_id].update(result[self.relation_id]) + return result_remote.get(self.relation_id, {}) + + def __setitem__(self, key: str, item: str) -> None: + """Set an item of the Abstract Relation Data dictionary.""" + self.relation_data.update_relation_data(self.relation_id, {key: item}) + + def __getitem__(self, key: str) -> str: + """Get an item of the Abstract Relation Data dictionary.""" + result = None + + # Avoiding "leader_only" error when cross-charm non-leader unit, not to report useless error + if ( + not hasattr(self.relation_data.fetch_my_relation_field, "leader_only") + or self.relation_data.component != self.relation_data.local_app + or self.relation_data.local_unit.is_leader() + ): + result = self.relation_data.fetch_my_relation_field(self.relation_id, key) + + if not result: + try: + result = self.relation_data.fetch_relation_field(self.relation_id, key) + except NotImplementedError: + pass + + if not result: + raise KeyError + return result + + def __eq__(self, d: dict) -> bool: + """Equality.""" + return self.data == d + + def __repr__(self) -> str: + """String representation Abstract Relation Data dictionary.""" + return repr(self.data) + + def __len__(self) -> int: + """Length of the Abstract Relation Data dictionary.""" + return len(self.data) + + def __delitem__(self, key: str) -> None: + """Delete an item of the Abstract Relation Data dictionary.""" + self.relation_data.delete_relation_data(self.relation_id, [key]) + + def has_key(self, key: str) -> bool: + """Does the key exist in the Abstract Relation Data dictionary?""" + return key in self.data + + def update(self, items: Dict[str, str]): + """Update the Abstract Relation Data dictionary.""" + self.relation_data.update_relation_data(self.relation_id, items) + + def keys(self) -> KeysView[str]: + """Keys of the Abstract Relation Data dictionary.""" + return self.data.keys() + + def values(self) -> ValuesView[str]: + """Values of the Abstract Relation Data dictionary.""" + return self.data.values() + + def items(self) -> ItemsView[str, str]: + """Items of the Abstract Relation Data dictionary.""" + return self.data.items() + + def pop(self, item: str) -> str: + """Pop an item of the Abstract Relation Data dictionary.""" + result = self.relation_data.fetch_my_relation_field(self.relation_id, item) + if not result: + raise KeyError(f"Item {item} doesn't exist.") + self.relation_data.delete_relation_data(self.relation_id, [item]) + return result + + def __contains__(self, item: str) -> bool: + """Does the Abstract Relation Data dictionary contain item?""" + return item in self.data.values() + + def __iter__(self): + """Iterate through the Abstract Relation Data dictionary.""" + return iter(self.data) + + def get(self, key: str, default: Optional[str] = None) -> Optional[str]: + """Safely get an item of the Abstract Relation Data dictionary.""" + try: + if result := self[key]: + return result + except KeyError: + return default + + +class Data(ABC): + """Base relation data manipulation (abstract) class.""" + + SCOPE = Scope.APP + + # Local map to associate mappings with secrets potentially as a group + SECRET_LABEL_MAP = { + "username": SECRET_GROUPS.USER, + "password": SECRET_GROUPS.USER, + "uris": SECRET_GROUPS.USER, + "read-only-uris": SECRET_GROUPS.USER, + "tls": SECRET_GROUPS.TLS, + "tls-ca": SECRET_GROUPS.TLS, + "mtls-cert": SECRET_GROUPS.MTLS, + "entity-name": SECRET_GROUPS.ENTITY, + "entity-password": SECRET_GROUPS.ENTITY, + } + + SECRET_FIELDS = [] + + def __init__( + self, + model: Model, + relation_name: str, + ) -> None: + self._model = model + self.local_app = self._model.app + self.local_unit = self._model.unit + self.relation_name = relation_name + self._jujuversion = None + self.component = self.local_app if self.SCOPE == Scope.APP else self.local_unit + self.secrets = SecretCache(self._model, self.component) + self.data_component = None + self._local_secret_fields = [] + self._remote_secret_fields = list(self.SECRET_FIELDS) + + @property + def relations(self) -> List[Relation]: + """The list of Relation instances associated with this relation_name.""" + return self._model.relations[self.relation_name] + + @property + def secrets_enabled(self): + """Is this Juju version allowing for Secrets usage?""" + if not self._jujuversion: + self._jujuversion = JujuVersion.from_environ() + return self._jujuversion.has_secrets + + @property + def secret_label_map(self): + """Exposing secret-label map via a property -- could be overridden in descendants!""" + return self.SECRET_LABEL_MAP + + @property + def local_secret_fields(self) -> Optional[List[str]]: + """Local access to secrets field, in case they are being used.""" + if self.secrets_enabled: + return self._local_secret_fields + + @property + def remote_secret_fields(self) -> Optional[List[str]]: + """Local access to secrets field, in case they are being used.""" + if self.secrets_enabled: + return self._remote_secret_fields + + @property + def my_secret_groups(self) -> Optional[List[SecretGroup]]: + """Local access to secrets field, in case they are being used.""" + if self.secrets_enabled: + return [ + self.SECRET_LABEL_MAP[field] + for field in self._local_secret_fields + if field in self.SECRET_LABEL_MAP + ] + + # Mandatory overrides for internal/helper methods + + @juju_secrets_only + def _get_relation_secret( + self, relation_id: int, group_mapping: SecretGroup, relation_name: Optional[str] = None + ) -> Optional[CachedSecret]: + """Retrieve a Juju Secret that's been stored in the relation databag.""" + if not relation_name: + relation_name = self.relation_name + + label = self._generate_secret_label(relation_name, relation_id, group_mapping) + if secret := self.secrets.get(label): + return secret + + relation = self._model.get_relation(relation_name, relation_id) + if not relation: + return + + if secret_uri := self.get_secret_uri(relation, group_mapping): + return self.secrets.get(label, secret_uri) + + # Mandatory overrides for requirer and peer, implemented for Provider + # Requirer uses local component and switched keys + # _local_secret_fields -> PROV_SECRET_FIELDS + # _remote_secret_fields -> REQ_SECRET_FIELDS + # provider uses remote component and + # _local_secret_fields -> REQ_SECRET_FIELDS + # _remote_secret_fields -> PROV_SECRET_FIELDS + @abstractmethod + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + raise NotImplementedError + + def _fetch_specific_relation_data( + self, relation: Relation, fields: Optional[List[str]] + ) -> Dict[str, str]: + """Fetch data available (directily or indirectly -- i.e. secrets) from the relation (remote app data).""" + if not relation.app: + return {} + self._load_secrets_from_databag(relation) + return self._fetch_relation_data_with_secrets( + relation.app, self.remote_secret_fields, relation, fields + ) + + def _fetch_my_specific_relation_data( + self, relation: Relation, fields: Optional[List[str]] + ) -> dict: + """Fetch our own relation data.""" + # load secrets + self._load_secrets_from_databag(relation) + return self._fetch_relation_data_with_secrets( + self.local_app, + self.local_secret_fields, + relation, + fields, + ) + + def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: + """Set values for fields not caring whether it's a secret or not.""" + self._load_secrets_from_databag(relation) + + _, normal_fields = self._process_secret_fields( + relation, + self.local_secret_fields, + list(data), + self._add_or_update_relation_secrets, + data=data, + ) + + normal_content = {k: v for k, v in data.items() if k in normal_fields} + self._update_relation_data_without_secrets(self.local_app, relation, normal_content) + + def _add_or_update_relation_secrets( + self, + relation: Relation, + group: SecretGroup, + secret_fields: Set[str], + data: Dict[str, str], + uri_to_databag=True, + ) -> bool: + """Update contents for Secret group. If the Secret doesn't exist, create it.""" + if self._get_relation_secret(relation.id, group): + return self._update_relation_secret(relation, group, secret_fields, data) + + return self._add_relation_secret(relation, group, secret_fields, data, uri_to_databag) + + @juju_secrets_only + def _add_relation_secret( + self, + relation: Relation, + group_mapping: SecretGroup, + secret_fields: Set[str], + data: Dict[str, str], + uri_to_databag=True, + ) -> bool: + """Add a new Juju Secret that will be registered in the relation databag.""" + if uri_to_databag and self.get_secret_uri(relation, group_mapping): + logging.error("Secret for relation %s already exists, not adding again", relation.id) + return False + + content = self._content_for_secret_group(data, secret_fields, group_mapping) + + label = self._generate_secret_label(self.relation_name, relation.id, group_mapping) + secret = self.secrets.add(label, content, relation) + + if uri_to_databag: + # According to lint we may not have a Secret ID + if not secret.meta or not secret.meta.id: + logging.error("Secret is missing Secret ID") + raise SecretError("Secret added but is missing Secret ID") + + self.set_secret_uri(relation, group_mapping, secret.meta.id) + + # Return the content that was added + return True + + @juju_secrets_only + def _update_relation_secret( + self, + relation: Relation, + group_mapping: SecretGroup, + secret_fields: Set[str], + data: Dict[str, str], + ) -> bool: + """Update the contents of an existing Juju Secret, referred in the relation databag.""" + secret = self._get_relation_secret(relation.id, group_mapping) + + if not secret: + logging.error("Can't update secret for relation %s", relation.id) + return False + + content = self._content_for_secret_group(data, secret_fields, group_mapping) + + old_content = secret.get_content() + full_content = copy.deepcopy(old_content) + full_content.update(content) + secret.set_content(full_content) + + # Return True on success + return True + + @juju_secrets_only + def _delete_relation_secret( + self, relation: Relation, group: SecretGroup, secret_fields: List[str], fields: List[str] + ) -> bool: + """Update the contents of an existing Juju Secret, referred in the relation databag.""" + secret = self._get_relation_secret(relation.id, group) + + if not secret: + logging.error("Can't delete secret for relation %s", str(relation.id)) + return False + + old_content = secret.get_content() + new_content = copy.deepcopy(old_content) + for field in fields: + try: + new_content.pop(field) + except KeyError: + logging.debug( + "Non-existing secret was attempted to be removed %s, %s", + str(relation.id), + str(field), + ) + return False + + # Remove secret from the relation if it's fully gone + if not new_content: + field = self._generate_secret_field_name(group) + try: + relation.data[self.component].pop(field) + except KeyError: + pass + label = self._generate_secret_label(self.relation_name, relation.id, group) + self.secrets.remove(label) + else: + secret.set_content(new_content) + + # Return the content that was removed + return True + + def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: + """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + if relation.app: + self._load_secrets_from_databag(relation) + + _, normal_fields = self._process_secret_fields( + relation, self.local_secret_fields, fields, self._delete_relation_secret, fields=fields + ) + self._delete_relation_data_without_secrets(self.local_app, relation, list(normal_fields)) + + def _register_secret_to_relation( + self, relation_name: str, relation_id: int, secret_id: str, group: SecretGroup + ): + """Fetch secrets and apply local label on them. + + [MAGIC HERE] + If we fetch a secret using get_secret(id=, label=), + then will be "stuck" on the Secret object, whenever it may + appear (i.e. as an event attribute, or fetched manually) on future occasions. + + This will allow us to uniquely identify the secret on Provider side (typically on + 'secret-changed' events), and map it to the corresponding relation. + """ + label = self._generate_secret_label(relation_name, relation_id, group) + + # Fetching the Secret's meta information ensuring that it's locally getting registered with + CachedSecret(self._model, self.component, label, secret_id).meta + + def _register_secrets_to_relation(self, relation: Relation, params_name_list: List[str]): + """Make sure that secrets of the provided list are locally 'registered' from the databag. + + More on 'locally registered' magic is described in _register_secret_to_relation() method + """ + if not relation.app: + return + + for group in SECRET_GROUPS.groups(): + secret_field = self._generate_secret_field_name(group) + if secret_field in params_name_list and ( + secret_uri := self.get_secret_uri(relation, group) + ): + self._register_secret_to_relation(relation.name, relation.id, secret_uri, group) + + # Optional overrides + + def _legacy_apply_on_fetch(self) -> None: + """This function should provide a list of compatibility functions to be applied when fetching (legacy) data.""" + pass + + def _legacy_apply_on_update(self, fields: List[str]) -> None: + """This function should provide a list of compatibility functions to be applied when writing data. + + Since data may be at a legacy version, migration may be mandatory. + """ + pass + + def _legacy_apply_on_delete(self, fields: List[str]) -> None: + """This function should provide a list of compatibility functions to be applied when deleting (legacy) data.""" + pass + + # Internal helper methods + + @staticmethod + def _is_secret_field(field: str) -> bool: + """Is the field in question a secret reference (URI) field or not?""" + return field.startswith(PROV_SECRET_PREFIX) + + @staticmethod + def _generate_secret_label( + relation_name: str, relation_id: int, group_mapping: SecretGroup + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + return f"{relation_name}.{relation_id}.{group_mapping}.secret" + + def _generate_secret_field_name(self, group_mapping: SecretGroup) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + return f"{PROV_SECRET_PREFIX}{group_mapping}" + + def _relation_from_secret_label(self, secret_label: str) -> Optional[Relation]: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 3): + return + + contents.pop() # ".secret" at the end + contents.pop() # Group mapping + relation_id = contents.pop() + try: + relation_id = int(relation_id) + except ValueError: + return + + # In case '.' character appeared in relation name + relation_name = ".".join(contents) + + try: + return self.get_relation(relation_name, relation_id) + except ModelError: + return + + def _group_secret_fields(self, secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: + """Helper function to arrange secret mappings under their group. + + NOTE: All unrecognized items end up in the 'extra' secret bucket. + Make sure only secret fields are passed! + """ + secret_fieldnames_grouped = {} + for key in secret_fields: + if group := self.secret_label_map.get(key): + secret_fieldnames_grouped.setdefault(group, []).append(key) + else: + secret_fieldnames_grouped.setdefault(SECRET_GROUPS.EXTRA, []).append(key) + return secret_fieldnames_grouped + + def _get_group_secret_contents( + self, + relation: Relation, + group: SecretGroup, + secret_fields: Union[Set[str], List[str]] = [], + ) -> Dict[str, str]: + """Helper function to retrieve collective, requested contents of a secret.""" + if (secret := self._get_relation_secret(relation.id, group)) and ( + secret_data := secret.get_content() + ): + return { + k: v for k, v in secret_data.items() if not secret_fields or k in secret_fields + } + return {} + + def _content_for_secret_group( + self, content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup + ) -> Dict[str, str]: + """Select : pairs from input, that belong to this particular Secret group.""" + if group_mapping == SECRET_GROUPS.EXTRA: + return { + k: v + for k, v in content.items() + if k in secret_fields and k not in self.secret_label_map.keys() + } + + return { + k: v + for k, v in content.items() + if k in secret_fields and self.secret_label_map.get(k) == group_mapping + } + + @juju_secrets_only + def _get_relation_secret_data( + self, relation_id: int, group_mapping: SecretGroup, relation_name: Optional[str] = None + ) -> Optional[Dict[str, str]]: + """Retrieve contents of a Juju Secret that's been stored in the relation databag.""" + secret = self._get_relation_secret(relation_id, group_mapping, relation_name) + if secret: + return secret.get_content() + + # Core operations on Relation Fields manipulations (regardless whether the field is in the databag or in a secret) + # Internal functions to be called directly from transparent public interface functions (+closely related helpers) + + def _process_secret_fields( + self, + relation: Relation, + req_secret_fields: Optional[List[str]], + impacted_rel_fields: List[str], + operation: Callable, + *args, + **kwargs, + ) -> Tuple[Dict[str, str], Set[str]]: + """Isolate target secret fields of manipulation, and execute requested operation by Secret Group.""" + result = {} + + # If the relation started on a databag, we just stay on the databag + # (Rolling upgrades may result in a relation starting on databag, getting secrets enabled on-the-fly) + # self.local_app is sufficient to check (ignored if Requires, never has secrets -- works if Provider) + fallback_to_databag = ( + req_secret_fields + and (self.local_unit == self._model.unit and self.local_unit.is_leader()) + and set(req_secret_fields) & set(relation.data[self.component]) + ) + normal_fields = set(impacted_rel_fields) + if req_secret_fields and self.secrets_enabled and not fallback_to_databag: + normal_fields = normal_fields - set(req_secret_fields) + secret_fields = set(impacted_rel_fields) - set(normal_fields) + + secret_fieldnames_grouped = self._group_secret_fields(list(secret_fields)) + + for group in secret_fieldnames_grouped: + # operation() should return nothing when all goes well + if group_result := operation(relation, group, secret_fields, *args, **kwargs): + # If "meaningful" data was returned, we take it. (Some 'operation'-s only return success/failure.) + if isinstance(group_result, dict): + result.update(group_result) + else: + # If it wasn't found as a secret, let's give it a 2nd chance as "normal" field + # Needed when Juju3 Requires meets Juju2 Provider + normal_fields |= set(secret_fieldnames_grouped[group]) + return (result, normal_fields) + + def _fetch_relation_data_without_secrets( + self, component: Union[Application, Unit], relation: Relation, fields: Optional[List[str]] + ) -> Dict[str, str]: + """Fetching databag contents when no secrets are involved. + + Since the Provider's databag is the only one holding secrest, we can apply + a simplified workflow to read the Require's side's databag. + This is used typically when the Provider side wants to read the Requires side's data, + or when the Requires side may want to read its own data. + """ + if component not in relation.data or not relation.data[component]: + return {} + + if fields: + return { + k: relation.data[component][k] for k in fields if k in relation.data[component] + } + else: + return dict(relation.data[component]) + + def _fetch_relation_data_with_secrets( + self, + component: Union[Application, Unit], + req_secret_fields: Optional[List[str]], + relation: Relation, + fields: Optional[List[str]] = None, + ) -> Dict[str, str]: + """Fetching databag contents when secrets may be involved. + + This function has internal logic to resolve if a requested field may be "hidden" + within a Relation Secret, or directly available as a databag field. Typically + used to read the Provider side's databag (eigher by the Requires side, or by + Provider side itself). + """ + result = {} + normal_fields = [] + + if not fields: + if component not in relation.data: + return {} + + all_fields = list(relation.data[component].keys()) + normal_fields = [field for field in all_fields if not self._is_secret_field(field)] + fields = normal_fields + req_secret_fields if req_secret_fields else normal_fields + + if fields: + result, normal_fields = self._process_secret_fields( + relation, req_secret_fields, fields, self._get_group_secret_contents + ) + + # Processing "normal" fields. May include leftover from what we couldn't retrieve as a secret. + # (Typically when Juju3 Requires meets Juju2 Provider) + if normal_fields: + result.update( + self._fetch_relation_data_without_secrets(component, relation, list(normal_fields)) + ) + return result + + def _update_relation_data_without_secrets( + self, component: Union[Application, Unit], relation: Relation, data: Dict[str, str] + ) -> None: + """Updating databag contents when no secrets are involved.""" + if component not in relation.data or relation.data[component] is None: + return + + if relation: + relation.data[component].update(data) + + def _delete_relation_data_without_secrets( + self, component: Union[Application, Unit], relation: Relation, fields: List[str] + ) -> None: + """Remove databag fields 'fields' from Relation.""" + if component not in relation.data or relation.data[component] is None: + return + + for field in fields: + try: + relation.data[component].pop(field) + except KeyError: + logger.debug( + "Non-existing field '%s' was attempted to be removed from the databag (relation ID: %s)", + str(field), + str(relation.id), + ) + pass + + # Public interface methods + # Handling Relation Fields seamlessly, regardless if in databag or a Juju Secret + + def as_dict(self, relation_id: int) -> UserDict: + """Dict behavior representation of the Abstract Data.""" + return DataDict(self, relation_id) + + def get_relation(self, relation_name, relation_id) -> Relation: + """Safe way of retrieving a relation.""" + relation = self._model.get_relation(relation_name, relation_id) + + if not relation: + raise DataInterfacesError( + "Relation %s %s couldn't be retrieved", relation_name, relation_id + ) + + return relation + + def get_secret_uri(self, relation: Relation, group: SecretGroup) -> Optional[str]: + """Get the secret URI for the corresponding group.""" + secret_field = self._generate_secret_field_name(group) + # if the secret is not managed by this component, + # we need to fetch it from the other side + + # Fix for the linter + if self.my_secret_groups is None: + raise DataInterfacesError("Secrets are not enabled for this component") + component = self.component if group in self.my_secret_groups else relation.app + return relation.data[component].get(secret_field) + + def set_secret_uri(self, relation: Relation, group: SecretGroup, secret_uri: str) -> None: + """Set the secret URI for the corresponding group.""" + secret_field = self._generate_secret_field_name(group) + relation.data[self.component][secret_field] = secret_uri + + def fetch_relation_data( + self, + relation_ids: Optional[List[int]] = None, + fields: Optional[List[str]] = None, + relation_name: Optional[str] = None, + ) -> Dict[int, Dict[str, str]]: + """Retrieves data from relation. + + This function can be used to retrieve data from a relation + in the charm code when outside an event callback. + Function cannot be used in `*-relation-broken` events and will raise an exception. + + Returns: + a dict of the values stored in the relation data bag + for all relation instances (indexed by the relation ID). + """ + self._legacy_apply_on_fetch() + + if not relation_name: + relation_name = self.relation_name + + relations = [] + if relation_ids: + relations = [ + self.get_relation(relation_name, relation_id) for relation_id in relation_ids + ] + else: + relations = self.relations + + data = {} + for relation in relations: + if not relation_ids or (relation_ids and relation.id in relation_ids): + data[relation.id] = self._fetch_specific_relation_data(relation, fields) + return data + + def fetch_relation_field( + self, relation_id: int, field: str, relation_name: Optional[str] = None + ) -> Optional[str]: + """Get a single field from the relation data.""" + return ( + self.fetch_relation_data([relation_id], [field], relation_name) + .get(relation_id, {}) + .get(field) + ) + + def fetch_my_relation_data( + self, + relation_ids: Optional[List[int]] = None, + fields: Optional[List[str]] = None, + relation_name: Optional[str] = None, + ) -> Optional[Dict[int, Dict[str, str]]]: + """Fetch data of the 'owner' (or 'this app') side of the relation. + + NOTE: Since only the leader can read the relation's 'this_app'-side + Application databag, the functionality is limited to leaders + """ + self._legacy_apply_on_fetch() + + if not relation_name: + relation_name = self.relation_name + + relations = [] + if relation_ids: + relations = [ + self.get_relation(relation_name, relation_id) for relation_id in relation_ids + ] + else: + relations = self.relations + + data = {} + for relation in relations: + if not relation_ids or relation.id in relation_ids: + data[relation.id] = self._fetch_my_specific_relation_data(relation, fields) + return data + + def fetch_my_relation_field( + self, relation_id: int, field: str, relation_name: Optional[str] = None + ) -> Optional[str]: + """Get a single field from the relation data -- owner side. + + NOTE: Since only the leader can read the relation's 'this_app'-side + Application databag, the functionality is limited to leaders + """ + if relation_data := self.fetch_my_relation_data([relation_id], [field], relation_name): + return relation_data.get(relation_id, {}).get(field) + + @leader_only + def update_relation_data(self, relation_id: int, data: dict) -> None: + """Update the data within the relation.""" + self._legacy_apply_on_update(list(data.keys())) + + relation_name = self.relation_name + relation = self.get_relation(relation_name, relation_id) + return self._update_relation_data(relation, data) + + @leader_only + def delete_relation_data(self, relation_id: int, fields: List[str]) -> None: + """Remove field from the relation.""" + self._legacy_apply_on_delete(fields) + + relation_name = self.relation_name + relation = self.get_relation(relation_name, relation_id) + return self._delete_relation_data(relation, fields) + + +class EventHandlers(Object): + """Requires-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: Data, unique_key: str = ""): + """Manager of base client relations.""" + if not unique_key: + unique_key = relation_data.relation_name + super().__init__(charm, unique_key) + + self.charm = charm + self.relation_data = relation_data + + self.framework.observe( + charm.on[self.relation_data.relation_name].relation_changed, + self._on_relation_changed_event, + ) + + self.framework.observe( + self.charm.on[relation_data.relation_name].relation_created, + self._on_relation_created_event, + ) + + self.framework.observe( + charm.on.secret_changed, + self._on_secret_changed_event, + ) + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + pass + + @abstractmethod + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + def _diff(self, event: RelationChangedEvent) -> Diff: + """Retrieves the diff of the data in the relation changed databag. + + Args: + event: relation changed event. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + return diff(event, self.relation_data.data_component) + + +# Base ProviderData and RequiresData + + +class ProviderData(Data): + """Base provides-side of the data products relation.""" + + RESOURCE_FIELD = "database" + + def __init__( + self, + model: Model, + relation_name: str, + status_schema_path: OptionalPathLike = None, + ) -> None: + super().__init__(model, relation_name) + self.data_component = self.local_app + self._local_secret_fields = [] + self._remote_secret_fields = list(self.SECRET_FIELDS) + self._status_schema = ( + {} if not status_schema_path else self._load_status_schema(Path(status_schema_path)) + ) + + def _load_status_schema(self, schema_path: Path) -> Dict[int, RelationStatus]: + """Load JSON schema defining status codes and their details. + + Args: + schema_path: JSON schema file path. + + Raises: + FileNotFoundError: If the provided path is invalid/inaccessible. + + Returns: + dict[int, RelationStatusDict]: Mapping of status code to RelationStatus data objects. + """ + if not schema_path.exists(): + raise FileNotFoundError(f"Can't locate status schema file: {schema_path}") + + content = json.load(open(schema_path, "r")) + + return {s["code"]: RelationStatus(**s) for s in content.get("statuses", [])} + + def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: + """Set values for fields not caring whether it's a secret or not.""" + keys = set(data.keys()) + if self.fetch_relation_field(relation.id, self.RESOURCE_FIELD) is None and ( + keys - {"endpoints", "read-only-endpoints", "replset"} + ): + raise PrematureDataAccessError( + "Premature access to relation data, update is forbidden before the connection is initialized." + ) + super()._update_relation_data(relation, data) + + # Public methods - "native" + + def set_credentials(self, relation_id: int, username: str, password: str) -> None: + """Set credentials. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + username: user that was created. + password: password of the created user. + """ + self.update_relation_data(relation_id, {"username": username, "password": password}) + + def set_entity_credentials( + self, relation_id: int, entity_name: str, entity_password: Optional[str] = None + ) -> None: + """Set entity credentials. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + entity_name: name of the created entity + entity_password: password of the created entity. + """ + self.update_relation_data( + relation_id, + {"entity-name": entity_name, "entity-password": entity_password}, + ) + + def set_tls(self, relation_id: int, tls: str) -> None: + """Set whether TLS is enabled. + + Args: + relation_id: the identifier for a particular relation. + tls: whether tls is enabled (True or False). + """ + self.update_relation_data(relation_id, {"tls": tls}) + + def set_tls_ca(self, relation_id: int, tls_ca: str) -> None: + """Set the TLS CA in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + tls_ca: TLS certification authority. + """ + self.update_relation_data(relation_id, {"tls-ca": tls_ca}) + + @leader_only + def get_statuses(self, relation_id: int) -> Dict[int, RelationStatus]: + """Return all currently active statuses on this relation. Can only be called on leader units. + + Args: + relation_id (int): the identifier for a particular relation. + + Returns: + Dict[int, RelationStatus]: A mapping of status code to RelationStatus instances. + """ + raw = self.fetch_my_relation_field(relation_id, STATUS_FIELD) or "[]" + + return {item["code"]: RelationStatus(**item) for item in json.loads(raw)} + + @overload + def raise_status(self, relation_id: int, status: int) -> None: ... + + @overload + def raise_status(self, relation_id: int, status: RelationStatusDict) -> None: ... + + @overload + def raise_status(self, relation_id: int, status: RelationStatus) -> None: ... + + def raise_status( + self, relation_id: int, status: Union[RelationStatus, RelationStatusDict, int] + ) -> None: + """Raise a status on the relation. Can only be called on leader units. + + Args: + relation_id (int): the identifier for a particular relation. + status (RelationStatus | RelationStatusDict | int): A representation of the status being raised, + which could be either a RelationStatus, an appropriate dict, or the numeric status code. + + Raises: + ValueError: If the status provided is not correctly formatted. + """ + if isinstance(status, int): + # we expect the status schema to be defined in this case. + if status not in self._status_schema: + raise KeyError(f"Status code [{status}] not defined.") + _status = self._status_schema[status] + elif isinstance(status, dict): + _status = RelationStatus(**status) + elif isinstance(status, RelationStatus): + _status = status + else: + raise ValueError( + "The status should be either a RelationStatus, an appropriate dict, or the numeric status code." + ) + + statuses = self.get_statuses(relation_id) + statuses.update({_status.code: _status}) + serialized = json.dumps([asdict(statuses[k]) for k in sorted(statuses)]) + self.update_relation_data(relation_id, {STATUS_FIELD: serialized}) + + def resolve_status(self, relation_id: int, status_code: int) -> None: + """Set a previously raised status as resolved. + + Args: + relation_id (int): the identifier for a particular relation. + status_code (int): the numeric code of the resolved status. + """ + statuses = self.get_statuses(relation_id) + if status_code not in statuses: + logger.error(f"Status [{status_code}] has never been raised before.") + return + + statuses.pop(status_code) + serialized = json.dumps([asdict(statuses[k]) for k in sorted(statuses)]) + self.update_relation_data(relation_id, {STATUS_FIELD: serialized}) + + def clear_statuses(self, relation_id: int) -> None: + """Clear all previously raised statuses. + + Args: + relation_id (int): the identifier for a particular relation. + """ + self.delete_relation_data(relation_id, [STATUS_FIELD]) + + # Public functions -- inherited + + fetch_my_relation_data = leader_only(Data.fetch_my_relation_data) + fetch_my_relation_field = leader_only(Data.fetch_my_relation_field) + + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + requested_secrets = get_encoded_list(relation, relation.app, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(relation, relation.app, PROV_SECRET_FIELDS) + if requested_secrets is not None: + self._local_secret_fields = requested_secrets + + if provided_secrets is not None: + self._remote_secret_fields = provided_secrets + + +class RequirerData(Data): + """Requirer-side of the relation.""" + + SECRET_FIELDS = [ + "username", + "password", + "tls", + "tls-ca", + "uris", + "read-only-uris", + "entity-name", + "entity-password", + ] + + def __init__( + self, + model, + relation_name: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + requested_entity_secret: Optional[str] = None, + requested_entity_name: Optional[str] = None, + requested_entity_password: Optional[str] = None, + prefix_matching: Optional[str] = None, + ): + """Manager of base client relations.""" + super().__init__(model, relation_name) + self.extra_user_roles = extra_user_roles + self.extra_group_roles = extra_group_roles + self.entity_type = entity_type + self.entity_permissions = entity_permissions + self.requested_entity_secret = requested_entity_secret + self.requested_entity_name = requested_entity_name + self.requested_entity_password = requested_entity_password + self.prefix_matching = prefix_matching + + if ( + self.requested_entity_secret or self.requested_entity_name + ) and not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + + if self.requested_entity_secret and ( + self.requested_entity_name or self.requested_entity_password + ): + raise IllegalOperationError("Unable to use provided and automated entity name secret") + + if self.requested_entity_password and not self.requested_entity_name: + raise IllegalOperationError("Unable to set entity password without an entity name") + + self._validate_entity_type() + self._validate_entity_permissions() + + self._remote_secret_fields = list(self.SECRET_FIELDS) + self._local_secret_fields = [ + field + for field in self.SECRET_LABEL_MAP.keys() + if field not in self._remote_secret_fields + ] + if additional_secret_fields: + self._remote_secret_fields += additional_secret_fields + self.data_component = self.local_unit + + # Internal functions + + def _is_resource_created_for_relation(self, relation: Relation) -> bool: + if not relation.app: + return False + + data = self.fetch_relation_data( + [relation.id], + ["username", "password", "entity-name", "entity-password"], + ).get(relation.id, {}) + + return any( + [ + all(bool(data.get(field)) for field in ("username", "password")), + all(bool(data.get(field)) for field in ("entity-name",)), + ] + ) + + def _validate_entity_type(self) -> None: + """Validates the consistency of the provided entity-type and its extra roles.""" + if self.entity_type and self.entity_type not in {ENTITY_USER, ENTITY_GROUP}: + raise ValueError("Invalid entity-type. Possible values are USER and GROUP") + + if self.entity_type == ENTITY_USER and self.extra_group_roles: + raise ValueError("Inconsistent entity information. Use extra_user_roles instead") + + if self.entity_type == ENTITY_GROUP and self.extra_user_roles: + raise ValueError("Inconsistent entity information. Use extra_group_roles instead") + + def _validate_entity_permissions(self) -> None: + """Validates whether the provided entity permissions follow the right JSON format.""" + if not self.entity_permissions: + return + + accepted_keys = {"resource_name", "resource_type", "privileges"} + + try: + permissions = json.loads(self.entity_permissions) + for permission in permissions: + if permission.keys() != accepted_keys: + raise ValueError("Invalid entity permissions format. See accepted keys") + except json.decoder.JSONDecodeError: + raise ValueError("Invalid entity permissions format. It must be JSON format") + + # Public functions + + def is_resource_created(self, relation_id: Optional[int] = None) -> bool: + """Check if the resource has been created. + + This function can be used to check if the Provider answered with data in the charm code + when outside an event callback. + + Args: + relation_id (int, optional): When provided the check is done only for the relation id + provided, otherwise the check is done for all relations + + Returns: + True or False + + Raises: + IndexError: If relation_id is provided but that relation does not exist + """ + if relation_id is not None: + try: + relation = [relation for relation in self.relations if relation.id == relation_id][ + 0 + ] + return self._is_resource_created_for_relation(relation) + except IndexError: + raise IndexError(f"relation id {relation_id} cannot be accessed") + else: + return ( + all( + self._is_resource_created_for_relation(relation) for relation in self.relations + ) + if self.relations + else False + ) + + # Public functions -- inherited + + fetch_my_relation_data = leader_only(Data.fetch_my_relation_data) + fetch_my_relation_field = leader_only(Data.fetch_my_relation_field) + + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + requested_secrets = get_encoded_list(relation, self.local_unit, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(relation, self.local_unit, PROV_SECRET_FIELDS) + if requested_secrets: + self._remote_secret_fields = requested_secrets + + if provided_secrets: + self._local_secret_fields = provided_secrets + + +class StatusEventBase(RelationEvent): + """Base class for relation status change events.""" + + def __init__( + self, + handle: Handle, + relation: Relation, + status: RelationStatus, + app: Optional[Application] = None, + unit: Optional[Unit] = None, + ): + super().__init__(handle, relation, app=app, unit=unit) + self.status = status + + def snapshot(self) -> dict: + """Return a snapshot of the event.""" + return super().snapshot() | {"status": json.dumps(asdict(self.status))} + + def restore(self, snapshot: dict): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.status = RelationStatus(**json.loads(snapshot["status"])) + + @property + def active_statuses(self) -> List[RelationStatus]: + """Returns a list of all currently active statuses on this relation.""" + if not self.relation.app: + return [] + + raw = json.loads(self.relation.data[self.relation.app].get(STATUS_FIELD, "[]")) + + return [RelationStatus(**item) for item in raw] + + +class StatusRaisedEvent(StatusEventBase): + """Event emitted on the requirer when a new status is being raised by the provider on relation.""" + + +class StatusResolvedEvent(StatusEventBase): + """Event emitted on the requirer when a status is marked as resolved by the provider on relation.""" + + +class RequirerCharmEvents(CharmEvents): + """Base events for data requirer charms.""" + + status_raised = EventSource(StatusRaisedEvent) + status_resolved = EventSource(StatusResolvedEvent) + + +class RequirerEventHandlers(EventHandlers): + """Requires-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: RequirerData, unique_key: str = ""): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + + def _main_credentials_shared(self, diff: Diff) -> bool: + """Whether the relation data-bag contains username / password keys.""" + user_secret = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) + return any( + [ + user_secret in diff.added, + "username" in diff.added and "password" in diff.added, + ] + ) + + def _entity_credentials_shared(self, diff: Diff) -> bool: + """Whether the relation data-bag contains rolename / password keys.""" + entity_secret = self.relation_data._generate_secret_field_name(SECRET_GROUPS.ENTITY) + return any( + [ + entity_secret in diff.added, + "entity-name" in diff.added, + ] + ) + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + if not self.relation_data.local_unit.is_leader(): + return + + if self.relation_data.remote_secret_fields: + if self.relation_data.SCOPE == Scope.APP: + set_encoded_field( + event.relation, + self.relation_data.local_app, + REQ_SECRET_FIELDS, + self.relation_data.remote_secret_fields, + ) + + set_encoded_field( + event.relation, + self.relation_data.local_unit, + REQ_SECRET_FIELDS, + self.relation_data.remote_secret_fields, + ) + + if self.relation_data.local_secret_fields: + if self.relation_data.SCOPE == Scope.APP: + set_encoded_field( + event.relation, + self.relation_data.local_app, + PROV_SECRET_FIELDS, + self.relation_data.local_secret_fields, + ) + set_encoded_field( + event.relation, + self.relation_data.local_unit, + PROV_SECRET_FIELDS, + self.relation_data.local_secret_fields, + ) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + # Retrieve old statuses from "data" + old_data = get_encoded_dict(event.relation, self.relation_data.local_unit, "data") or {} + old_statuses = json.loads(old_data.get(STATUS_FIELD, "[]")) + previous_codes = {status.get("code") for status in old_statuses} + + # Compute current statuses + current_statuses = json.loads( + self.relation_data.fetch_relation_field(event.relation.id, STATUS_FIELD) or "[]" + ) + current_codes = {status.get("code") for status in current_statuses} + + # Detect changes + raised = current_codes - previous_codes + resolved = previous_codes - current_codes + + for status_code in raised: + logger.debug(f"Status [{status_code}] raised") + _status = next(s for s in current_statuses if s["code"] == status_code) + _status_instance = RelationStatus(**_status) + getattr(self.on, "status_raised").emit( + event.relation, + status=_status_instance, + app=event.app, + unit=event.unit, + ) + + for status_code in resolved: + logger.debug(f"Status [{status_code}] resolved") + _status = next(s for s in old_statuses if s["code"] == status_code) + _status_instance = RelationStatus(**_status) + getattr(self.on, "status_resolved").emit( + event.relation, + status=_status_instance, + app=event.app, + unit=event.unit, + ) + + +class ProviderEventHandlers(EventHandlers): + """Provider-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: ProviderData, unique_key: str = ""): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + + @staticmethod + def _validate_entity_consistency(event: RelationEvent, diff: Diff) -> None: + """Validates that entity information is not changed after relation is established. + + - When entity-type changes, backwards compatibility is broken. + - When extra-user-roles changes, role membership checks become incredibly complex. + - When extra-group-roles changes, role membership checks become incredibly complex. + """ + if not isinstance(event, RelationChangedEvent): + return + + for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: + if key in diff.changed: + raise ValueError(f"Cannot change {key} after relation has already been created") + + # Event handlers + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + requested_secrets = get_encoded_list(event.relation, event.relation.app, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(event.relation, event.relation.app, PROV_SECRET_FIELDS) + if requested_secrets is not None: + self.relation_data._local_secret_fields = requested_secrets + + if provided_secrets is not None: + self.relation_data._remote_secret_fields = provided_secrets + + +################################################################################ +# Peer Relation Data +################################################################################ + + +class DataPeerData(RequirerData, ProviderData): + """Represents peer relations data.""" + + SECRET_FIELDS = [] + SECRET_FIELD_NAME = "internal_secret" + SECRET_LABEL_MAP = {} + + def __init__( + self, + model, + relation_name: str, + additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + ): + RequirerData.__init__( + self, + model=model, + relation_name=relation_name, + additional_secret_fields=additional_secret_fields, + ) + self.secret_field_name = secret_field_name if secret_field_name else self.SECRET_FIELD_NAME + self.deleted_label = deleted_label + self._secret_label_map = {} + + # Legacy information holders + self._legacy_labels = [] + self._legacy_secret_uri = None + + # Secrets that are being dynamically added within the scope of this event handler run + self._new_secrets = [] + self._additional_secret_group_mapping = additional_secret_group_mapping + + for group, fields in additional_secret_group_mapping.items(): + if group not in SECRET_GROUPS.groups(): + setattr(SECRET_GROUPS, group, group) + for field in fields: + secret_group = SECRET_GROUPS.get_group(group) + internal_field = self._field_to_internal_name(field, secret_group) + self._secret_label_map.setdefault(group, []).append(internal_field) + self._remote_secret_fields.append(internal_field) + + @property + def scope(self) -> Optional[Scope]: + """Turn component information into Scope.""" + if isinstance(self.component, Application): + return Scope.APP + if isinstance(self.component, Unit): + return Scope.UNIT + + @property + def secret_label_map(self) -> Dict[str, str]: + """Property storing secret mappings.""" + return self._secret_label_map + + @property + def static_secret_fields(self) -> List[str]: + """Re-definition of the property in a way that dynamically extended list is retrieved.""" + return self._remote_secret_fields + + @property + def local_secret_fields(self) -> List[str]: + """Re-definition of the property in a way that dynamically extended list is retrieved.""" + return ( + self.static_secret_fields if self.static_secret_fields else self.current_secret_fields + ) + + @property + def current_secret_fields(self) -> List[str]: + """Helper method to get all currently existing secret fields (added statically or dynamically).""" + if not self.secrets_enabled: + return [] + + if len(self._model.relations[self.relation_name]) > 1: + raise ValueError(f"More than one peer relation on {self.relation_name}") + + relation = self._model.relations[self.relation_name][0] + fields = [] + + ignores = [ + SECRET_GROUPS.get_group("user"), + SECRET_GROUPS.get_group("tls"), + SECRET_GROUPS.get_group("mtls"), + SECRET_GROUPS.get_group("entity"), + ] + for group in SECRET_GROUPS.groups(): + if group in ignores: + continue + if content := self._get_group_secret_contents(relation, group): + fields += list(content.keys()) + return list(set(fields) | set(self._new_secrets)) + + @dynamic_secrets_only + def set_secret( + self, + relation_id: int, + field: str, + value: str, + group_mapping: Optional[SecretGroup] = None, + ) -> None: + """Public interface method to add a Relation Data field specifically as a Juju Secret. + + Args: + relation_id: ID of the relation + field: The secret field that is to be added + value: The string value of the secret + group_mapping: The name of the "secret group", in case the field is to be added to an existing secret + """ + self._legacy_apply_on_update([field]) + + full_field = self._field_to_internal_name(field, group_mapping) + if self.secrets_enabled and full_field not in self.current_secret_fields: + self._new_secrets.append(full_field) + if self.valid_field_pattern(field, full_field): + self.update_relation_data(relation_id, {full_field: value}) + + # Unlike for set_secret(), there's no harm using this operation with static secrets + # The restricion is only added to keep the concept clear + @dynamic_secrets_only + def get_secret( + self, + relation_id: int, + field: str, + group_mapping: Optional[SecretGroup] = None, + ) -> Optional[str]: + """Public interface method to fetch secrets only.""" + self._legacy_apply_on_fetch() + + full_field = self._field_to_internal_name(field, group_mapping) + if ( + self.secrets_enabled + and full_field not in self.current_secret_fields + and field not in self.current_secret_fields + ): + return + if self.valid_field_pattern(field, full_field): + return self.fetch_my_relation_field(relation_id, full_field) + + @dynamic_secrets_only + def delete_secret( + self, + relation_id: int, + field: str, + group_mapping: Optional[SecretGroup] = None, + ) -> Optional[str]: + """Public interface method to delete secrets only.""" + self._legacy_apply_on_delete([field]) + + full_field = self._field_to_internal_name(field, group_mapping) + if self.secrets_enabled and full_field not in self.current_secret_fields: + logger.warning(f"Secret {field} from group {group_mapping} was not found") + return + + if self.valid_field_pattern(field, full_field): + self.delete_relation_data(relation_id, [full_field]) + + ########################################################################## + # Helpers + ########################################################################## + + @staticmethod + def _field_to_internal_name(field: str, group: Optional[SecretGroup]) -> str: + if not group or group == SECRET_GROUPS.EXTRA: + return field + return f"{field}{GROUP_SEPARATOR}{group}" + + @staticmethod + def _internal_name_to_field(name: str) -> Tuple[str, SecretGroup]: + parts = name.split(GROUP_SEPARATOR) + if not len(parts) > 1: + return (parts[0], SECRET_GROUPS.EXTRA) + secret_group = SECRET_GROUPS.get_group(parts[1]) + if not secret_group: + raise ValueError(f"Invalid secret field {name}") + return (parts[0], secret_group) + + def _group_secret_fields(self, secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: + """Helper function to arrange secret mappings under their group. + + NOTE: All unrecognized items end up in the 'extra' secret bucket. + Make sure only secret fields are passed! + """ + secret_fieldnames_grouped = {} + for key in secret_fields: + field, group = self._internal_name_to_field(key) + secret_fieldnames_grouped.setdefault(group, []).append(field) + return secret_fieldnames_grouped + + def _content_for_secret_group( + self, content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup + ) -> Dict[str, str]: + """Select : pairs from input, that belong to this particular Secret group.""" + if group_mapping == SECRET_GROUPS.EXTRA: + return {k: v for k, v in content.items() if k in self.local_secret_fields} + return { + self._internal_name_to_field(k)[0]: v + for k, v in content.items() + if k in self.local_secret_fields + } + + def valid_field_pattern(self, field: str, full_field: str) -> bool: + """Check that no secret group is attempted to be used together without secrets being enabled. + + Secrets groups are impossible to use with versions that are not yet supporting secrets. + """ + if not self.secrets_enabled and full_field != field: + logger.error( + f"Can't access {full_field}: no secrets available (i.e. no secret groups either)." + ) + return False + return True + + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + requested_secrets = get_encoded_list(relation, self.component, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(relation, self.component, PROV_SECRET_FIELDS) + if requested_secrets: + self._remote_secret_fields = requested_secrets + + if provided_secrets: + self._local_secret_fields = provided_secrets + + ########################################################################## + # Backwards compatibility / Upgrades + ########################################################################## + # These functions are used to keep backwards compatibility on upgrades + # Policy: + # All data is kept intact until the first write operation. (This allows a minimal + # grace period during which rollbacks are fully safe. For more info see spec.) + # All data involves: + # - databag + # - secrets content + # - secret labels (!!!) + # Legacy functions must return None, and leave an equally consistent state whether + # they are executed or skipped (as a high enough versioned execution environment may + # not require so) + + # Full legacy stack for each operation + + def _legacy_apply_on_fetch(self) -> None: + """All legacy functions to be applied on fetch.""" + relation = self._model.relations[self.relation_name][0] + self._legacy_compat_generate_prev_labels() + self._legacy_compat_secret_uri_from_databag(relation) + + def _legacy_apply_on_update(self, fields) -> None: + """All legacy functions to be applied on update.""" + relation = self._model.relations[self.relation_name][0] + self._legacy_compat_generate_prev_labels() + self._legacy_compat_secret_uri_from_databag(relation) + self._legacy_migration_remove_secret_from_databag(relation, fields) + self._legacy_migration_remove_secret_field_name_from_databag(relation) + + def _legacy_apply_on_delete(self, fields) -> None: + """All legacy functions to be applied on delete.""" + relation = self._model.relations[self.relation_name][0] + self._legacy_compat_generate_prev_labels() + self._legacy_compat_secret_uri_from_databag(relation) + self._legacy_compat_check_deleted_label(relation, fields) + + # Compatibility + + @legacy_apply_from_version(18) + def _legacy_compat_check_deleted_label(self, relation, fields) -> None: + """Helper function for legacy behavior. + + As long as https://bugs.launchpad.net/juju/+bug/2028094 wasn't fixed, + we did not delete fields but rather kept them in the secret with a string value + expressing invalidity. This function is maintainnig that behavior when needed. + """ + if not self.deleted_label: + return + + current_data = self.fetch_my_relation_data([relation.id], fields) + if current_data is not None: + # Check if the secret we wanna delete actually exists + # Given the "deleted label", here we can't rely on the default mechanism (i.e. 'key not found') + if non_existent := (set(fields) & set(self.local_secret_fields)) - set( + current_data.get(relation.id, []) + ): + logger.debug( + "Non-existing secret %s was attempted to be removed.", + ", ".join(non_existent), + ) + + @legacy_apply_from_version(18) + def _legacy_compat_secret_uri_from_databag(self, relation) -> None: + """Fetching the secret URI from the databag, in case stored there.""" + self._legacy_secret_uri = relation.data[self.component].get( + self._generate_secret_field_name(), None + ) + + @legacy_apply_from_version(34) + def _legacy_compat_generate_prev_labels(self) -> None: + """Generator for legacy secret label names, for backwards compatibility. + + Secret label is part of the data that MUST be maintained across rolling upgrades. + In case there may be a change on a secret label, the old label must be recognized + after upgrades, and left intact until the first write operation -- when we roll over + to the new label. + + This function keeps "memory" of previously used secret labels. + NOTE: Return value takes decorator into account -- all 'legacy' functions may return `None` + + v0.34 (rev69): Fixing issue https://github.com/canonical/data-platform-libs/issues/155 + meant moving from '.' (i.e. 'mysql.app', 'mysql.unit') + to labels '..' (like 'peer.mysql.app') + """ + if self._legacy_labels: + return + + result = [] + members = [self._model.app.name] + if self.scope: + members.append(self.scope.value) + result.append(f"{'.'.join(members)}") + self._legacy_labels = result + + # Migration + + @legacy_apply_from_version(18) + def _legacy_migration_remove_secret_from_databag(self, relation, fields: List[str]) -> None: + """For Rolling Upgrades -- when moving from databag to secrets usage. + + Practically what happens here is to remove stuff from the databag that is + to be stored in secrets. + """ + if not self.local_secret_fields: + return + + secret_fields_passed = set(self.local_secret_fields) & set(fields) + for field in secret_fields_passed: + if self._fetch_relation_data_without_secrets(self.component, relation, [field]): + self._delete_relation_data_without_secrets(self.component, relation, [field]) + + @legacy_apply_from_version(18) + def _legacy_migration_remove_secret_field_name_from_databag(self, relation) -> None: + """Making sure that the old databag URI is gone. + + This action should not be executed more than once. + + There was a phase (before moving secrets usage to libs) when charms saved the peer + secret URI to the databag, and used this URI from then on to retrieve their secret. + When upgrading to charm versions using this library, we need to add a label to the + secret and access it via label from than on, and remove the old traces from the databag. + """ + # Nothing to do if 'internal-secret' is not in the databag + if not (relation.data[self.component].get(self._generate_secret_field_name())): + return + + # Making sure that the secret receives its label + # (This should have happened by the time we get here, rather an extra security measure.) + secret = self._get_relation_secret(relation.id) + + # Either app scope secret with leader executing, or unit scope secret + leader_or_unit_scope = self.component != self.local_app or self.local_unit.is_leader() + if secret and leader_or_unit_scope: + # Databag reference to the secret URI can be removed, now that it's labelled + relation.data[self.component].pop(self._generate_secret_field_name(), None) + + ########################################################################## + # Event handlers + ########################################################################## + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + pass + + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the secret has changed.""" + pass + + ########################################################################## + # Overrides of Relation Data handling functions + ########################################################################## + + def _generate_secret_label( + self, relation_name: str, relation_id: int, group_mapping: SecretGroup + ) -> str: + members = [relation_name, self._model.app.name] + if self.scope: + members.append(self.scope.value) + if group_mapping != SECRET_GROUPS.EXTRA: + members.append(group_mapping) + return f"{'.'.join(members)}" + + def _generate_secret_field_name(self, group_mapping: SecretGroup = SECRET_GROUPS.EXTRA) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + return f"{self.secret_field_name}" + + @juju_secrets_only + def _get_relation_secret( + self, + relation_id: int, + group_mapping: SecretGroup = SECRET_GROUPS.EXTRA, + relation_name: Optional[str] = None, + ) -> Optional[CachedSecret]: + """Retrieve a Juju Secret specifically for peer relations. + + In case this code may be executed within a rolling upgrade, and we may need to + migrate secrets from the databag to labels, we make sure to stick the correct + label on the secret, and clean up the local databag. + """ + if not relation_name: + relation_name = self.relation_name + + relation = self._model.get_relation(relation_name, relation_id) + if not relation: + return + + label = self._generate_secret_label(relation_name, relation_id, group_mapping) + + # URI or legacy label is only to applied when moving single legacy secret to a (new) label + if group_mapping == SECRET_GROUPS.EXTRA: + # Fetching the secret with fallback to URI (in case label is not yet known) + # Label would we "stuck" on the secret in case it is found + return self.secrets.get( + label, self._legacy_secret_uri, legacy_labels=self._legacy_labels + ) + return self.secrets.get(label) + + def _get_group_secret_contents( + self, + relation: Relation, + group: SecretGroup, + secret_fields: Union[Set[str], List[str]] = [], + ) -> Dict[str, str]: + """Helper function to retrieve collective, requested contents of a secret.""" + secret_fields = [self._internal_name_to_field(k)[0] for k in secret_fields] + result = super()._get_group_secret_contents(relation, group, secret_fields) + if self.deleted_label: + result = {key: result[key] for key in result if result[key] != self.deleted_label} + if self._additional_secret_group_mapping: + return {self._field_to_internal_name(key, group): result[key] for key in result} + return result + + @either_static_or_dynamic_secrets + def _fetch_my_specific_relation_data( + self, relation: Relation, fields: Optional[List[str]] + ) -> Dict[str, str]: + """Fetch data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + return self._fetch_relation_data_with_secrets( + self.component, self.local_secret_fields, relation, fields + ) + + @either_static_or_dynamic_secrets + def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: + """Update data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + self._load_secrets_from_databag(relation) + + _, normal_fields = self._process_secret_fields( + relation, + self.local_secret_fields, + list(data), + self._add_or_update_relation_secrets, + data=data, + uri_to_databag=False, + ) + + normal_content = {k: v for k, v in data.items() if k in normal_fields} + self._update_relation_data_without_secrets(self.component, relation, normal_content) + + @either_static_or_dynamic_secrets + def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: + """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + self._load_secrets_from_databag(relation) + if self.local_secret_fields and self.deleted_label: + _, normal_fields = self._process_secret_fields( + relation, + self.local_secret_fields, + fields, + self._update_relation_secret, + data=dict.fromkeys(fields, self.deleted_label), + ) + else: + _, normal_fields = self._process_secret_fields( + relation, + self.local_secret_fields, + fields, + self._delete_relation_secret, + fields=fields, + ) + self._delete_relation_data_without_secrets(self.component, relation, list(normal_fields)) + + def fetch_relation_data( + self, + relation_ids: Optional[List[int]] = None, + fields: Optional[List[str]] = None, + relation_name: Optional[str] = None, + ) -> Dict[int, Dict[str, str]]: + """This method makes no sense for a Peer Relation.""" + raise NotImplementedError( + "Peer Relation only supports 'self-side' fetch methods: " + "fetch_my_relation_data() and fetch_my_relation_field()" + ) + + def fetch_relation_field( + self, relation_id: int, field: str, relation_name: Optional[str] = None + ) -> Optional[str]: + """This method makes no sense for a Peer Relation.""" + raise NotImplementedError( + "Peer Relation only supports 'self-side' fetch methods: " + "fetch_my_relation_data() and fetch_my_relation_field()" + ) + + ########################################################################## + # Public functions -- inherited + ########################################################################## + + fetch_my_relation_data = Data.fetch_my_relation_data + fetch_my_relation_field = Data.fetch_my_relation_field + + +class DataPeerEventHandlers(RequirerEventHandlers): + """Requires-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: RequirerData, unique_key: str = ""): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + pass + + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the secret has changed.""" + pass + + +class DataPeer(DataPeerData, DataPeerEventHandlers): + """Represents peer relations.""" + + def __init__( + self, + charm, + relation_name: str, + additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + unique_key: str = "", + ): + DataPeerData.__init__( + self, + charm.model, + relation_name, + additional_secret_fields, + additional_secret_group_mapping, + secret_field_name, + deleted_label, + ) + DataPeerEventHandlers.__init__(self, charm, self, unique_key) + + +class DataPeerUnitData(DataPeerData): + """Unit data abstraction representation.""" + + SCOPE = Scope.UNIT + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class DataPeerUnit(DataPeerUnitData, DataPeerEventHandlers): + """Unit databag representation.""" + + def __init__( + self, + charm, + relation_name: str, + additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + unique_key: str = "", + ): + DataPeerData.__init__( + self, + charm.model, + relation_name, + additional_secret_fields, + additional_secret_group_mapping, + secret_field_name, + deleted_label, + ) + DataPeerEventHandlers.__init__(self, charm, self, unique_key) + + +class DataPeerOtherUnitData(DataPeerUnitData): + """Unit data abstraction representation.""" + + def __init__(self, unit: Unit, *args, **kwargs): + super().__init__(*args, **kwargs) + self.local_unit = unit + self.component = unit + + def update_relation_data(self, relation_id: int, data: dict) -> None: + """This method makes no sense for a Other Peer Relation.""" + raise NotImplementedError("It's not possible to update data of another unit.") + + def delete_relation_data(self, relation_id: int, fields: List[str]) -> None: + """This method makes no sense for a Other Peer Relation.""" + raise NotImplementedError("It's not possible to delete data of another unit.") + + +class DataPeerOtherUnitEventHandlers(DataPeerEventHandlers): + """Requires-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: DataPeerUnitData): + """Manager of base client relations.""" + unique_key = f"{relation_data.relation_name}-{relation_data.local_unit.name}" + super().__init__(charm, relation_data, unique_key=unique_key) + + +class DataPeerOtherUnit(DataPeerOtherUnitData, DataPeerOtherUnitEventHandlers): + """Unit databag representation for another unit than the executor.""" + + def __init__( + self, + unit: Unit, + charm: CharmBase, + relation_name: str, + additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + ): + DataPeerOtherUnitData.__init__( + self, + unit, + charm.model, + relation_name, + additional_secret_fields, + additional_secret_group_mapping, + secret_field_name, + deleted_label, + ) + DataPeerOtherUnitEventHandlers.__init__(self, charm, self) + + +################################################################################ +# Cross-charm Relations Data Handling and Events +################################################################################ + +# Generic events + + +class RelationEventWithSecret(RelationEvent): + """Base class for Relation Events that need to handle secrets.""" + + @property + def _secrets(self) -> dict: + """Caching secrets to avoid fetching them each time a field is referrd. + + DON'T USE the encapsulated helper variable outside of this function + """ + if not hasattr(self, "_cached_secrets"): + self._cached_secrets = {} + return self._cached_secrets + + def _get_secret(self, group) -> Optional[Dict[str, str]]: + """Retrieving secrets.""" + if not self.app: + return + if not self._secrets.get(group): + self._secrets[group] = None + secret_field = f"{PROV_SECRET_PREFIX}{group}" + if secret_uri := self.relation.data[self.app].get(secret_field): + secret = self.framework.model.get_secret(id=secret_uri) + self._secrets[group] = secret.get_content() + return self._secrets[group] + + @property + def secrets_enabled(self): + """Is this Juju version allowing for Secrets usage?""" + return JujuVersion.from_environ().has_secrets + + +class EntityProvidesEvent(RelationEvent): + """Base class for data events.""" + + @property + def extra_user_roles(self) -> Optional[str]: + """Returns the extra user roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-user-roles") + + @property + def extra_group_roles(self) -> Optional[str]: + """Returns the extra group roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-group-roles") + + @property + def entity_type(self) -> Optional[str]: + """Returns the entity_type that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("entity-type") + + @property + def entity_permissions(self) -> Optional[str]: + """Returns the entity_permissions that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("entity-permissions") + + +class EntityRequiresEvent(RelationEventWithSecret): + """Base class for authentication fields for events. + + The amount of logic added here is not ideal -- but this was the only way to preserve + the interface when moving to Juju Secrets + """ + + @property + def entity_name(self) -> Optional[str]: + """Returns the name for the created entity.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("entity") + if secret: + return secret.get("entity-name") + + return self.relation.data[self.relation.app].get("entity-name") + + @property + def entity_password(self) -> Optional[str]: + """Returns the password for the created entity.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("entity") + if secret: + return secret.get("entity-password") + + return self.relation.data[self.relation.app].get("entity-password") + + +class AuthenticationEvent(RelationEventWithSecret): + """Base class for authentication fields for events. + + The amount of logic added here is not ideal -- but this was the only way to preserve + the interface when moving to Juju Secrets + """ + + @property + def username(self) -> Optional[str]: + """Returns the created username.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("user") + if secret: + return secret.get("username") + + return self.relation.data[self.relation.app].get("username") + + @property + def password(self) -> Optional[str]: + """Returns the password for the created user.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("user") + if secret: + return secret.get("password") + + return self.relation.data[self.relation.app].get("password") + + @property + def tls(self) -> Optional[str]: + """Returns whether TLS is configured.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("tls") + if secret: + return secret.get("tls") + + return self.relation.data[self.relation.app].get("tls") + + @property + def tls_ca(self) -> Optional[str]: + """Returns TLS CA.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("tls") + if secret: + return secret.get("tls-ca") + + return self.relation.data[self.relation.app].get("tls-ca") + + +# Database related events and fields + + +class DatabaseProvidesEvent(RelationEvent): + """Base class for database events.""" + + @property + def database(self) -> Optional[str]: + """Returns the database that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("database") + + +class DatabaseRequestedEvent(DatabaseProvidesEvent): + """Event emitted when a new database is requested for use on this relation.""" + + @property + def extra_user_roles(self) -> Optional[str]: + """Returns the extra user roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-user-roles") + + @property + def external_node_connectivity(self) -> bool: + """Returns the requested external_node_connectivity field.""" + if not self.relation.app: + return False + + return ( + self.relation.data[self.relation.app].get("external-node-connectivity", "false") + == "true" + ) + + @property + def requested_entity_secret_content(self) -> Optional[Dict[str, Optional[str]]]: + """Returns the content of the requested entity secret.""" + names = None + if secret_uri := self.relation.data.get(self.relation.app, {}).get( + "requested-entity-secret" + ): + secret = self.framework.model.get_secret(id=secret_uri) + if content := secret.get_content(refresh=True): + if "entity-name" in content: + names = {content["entity-name"]: content.get("password")} + else: + logger.warning("Invalid requested-entity-secret: no entity name") + return names + + @property + def prefix_matching(self) -> Optional[str]: + """Returns the prefix matching strategy that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("prefix-matching") + + +class DatabaseEntityRequestedEvent(DatabaseProvidesEvent, EntityProvidesEvent): + """Event emitted when a new entity is requested for use on this relation.""" + + +class DatabaseEntityPermissionsChangedEvent(DatabaseProvidesEvent, EntityProvidesEvent): + """Event emitted when existing entity permissions are changed on this relation.""" + + +class DatabaseProvidesEvents(CharmEvents): + """Database events. + + This class defines the events that the database can emit. + """ + + database_requested = EventSource(DatabaseRequestedEvent) + database_entity_requested = EventSource(DatabaseEntityRequestedEvent) + database_entity_permissions_changed = EventSource(DatabaseEntityPermissionsChangedEvent) + + +class DatabaseRequiresEvent(RelationEventWithSecret): + """Base class for database events.""" + + @property + def database(self) -> Optional[str]: + """Returns the database name.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("database") + + @property + def endpoints(self) -> Optional[str]: + """Returns a comma separated list of read/write endpoints. + + In VM charms, this is the primary's address. + In kubernetes charms, this is the service to the primary pod. + """ + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("endpoints") + + @property + def read_only_endpoints(self) -> Optional[str]: + """Returns a comma separated list of read only endpoints. + + In VM charms, this is the address of all the secondary instances. + In kubernetes charms, this is the service to all replica pod instances. + """ + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("read-only-endpoints") + + @property + def replset(self) -> Optional[str]: + """Returns the replicaset name. + + MongoDB only. + """ + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("replset") + + @property + def uris(self) -> Optional[str]: + """Returns the connection URIs. + + MongoDB, Redis, OpenSearch. + """ + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("user") + if secret: + return secret.get("uris") + + return self.relation.data[self.relation.app].get("uris") + + @property + def read_only_uris(self) -> Optional[str]: + """Returns the readonly connection URIs.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("user") + if secret: + return secret.get("read-only-uris") + + return self.relation.data[self.relation.app].get("read-only-uris") + + @property + def version(self) -> Optional[str]: + """Returns the version of the database. + + Version as informed by the database daemon. + """ + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("version") + + @property + def prefix_databases(self) -> Optional[List[str]]: + """Returns a list of databases matching a prefix.""" + if not self.relation.app: + return None + + if prefixed_databases := self.relation.data[self.relation.app].get("prefix-databases"): + return prefixed_databases.split(",") + return [] + + +class DatabaseCreatedEvent(AuthenticationEvent, DatabaseRequiresEvent): + """Event emitted when a new database is created for use on this relation.""" + + +class DatabaseEntityCreatedEvent(EntityRequiresEvent, DatabaseRequiresEvent): + """Event emitted when a new entity is created for use on this relation.""" + + +class DatabaseEndpointsChangedEvent(AuthenticationEvent, DatabaseRequiresEvent): + """Event emitted when the read/write endpoints are changed.""" + + +class DatabaseReadOnlyEndpointsChangedEvent(AuthenticationEvent, DatabaseRequiresEvent): + """Event emitted when the read only endpoints are changed.""" + + +class DatabasePrefixDatabasesChangedEvent(AuthenticationEvent, DatabaseRequiresEvent): + """Event emitted when the prefix databases are changed.""" + + +class DatabaseRequiresEvents(RequirerCharmEvents): + """Database events. + + This class defines the events that the database can emit. + """ + + database_created = EventSource(DatabaseCreatedEvent) + database_entity_created = EventSource(DatabaseEntityCreatedEvent) + endpoints_changed = EventSource(DatabaseEndpointsChangedEvent) + read_only_endpoints_changed = EventSource(DatabaseReadOnlyEndpointsChangedEvent) + prefix_databases_changed = EventSource(DatabasePrefixDatabasesChangedEvent) + + +# Database Provider and Requires + + +class DatabaseProviderData(ProviderData): + """Provider-side data of the database relations.""" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_database(self, relation_id: int, database_name: str) -> None: + """Set database name. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + database_name: database name. + """ + self.update_relation_data(relation_id, {"database": database_name}) + + def set_prefix_databases(self, relation_id: int, databases: List[str]) -> None: + """Set a coma separated list of databases matching a prefix. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + databases: list of database names matching the requested prefix. + """ + self.update_relation_data(relation_id, {"prefix-databases": ",".join(sorted(databases))}) + + def set_endpoints(self, relation_id: int, connection_strings: str) -> None: + """Set database primary connections. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + In VM charms, only the primary's address should be passed as an endpoint. + In kubernetes charms, the service endpoint to the primary pod should be + passed as an endpoint. + + Args: + relation_id: the identifier for a particular relation. + connection_strings: database hosts and ports comma separated list. + """ + self.update_relation_data(relation_id, {"endpoints": connection_strings}) + + def set_read_only_endpoints(self, relation_id: int, connection_strings: str) -> None: + """Set database replicas connection strings. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + connection_strings: database hosts and ports comma separated list. + """ + self.update_relation_data(relation_id, {"read-only-endpoints": connection_strings}) + + def set_replset(self, relation_id: int, replset: str) -> None: + """Set replica set name in the application relation databag. + + MongoDB only. + + Args: + relation_id: the identifier for a particular relation. + replset: replica set name. + """ + self.update_relation_data(relation_id, {"replset": replset}) + + def set_uris(self, relation_id: int, uris: str) -> None: + """Set the database connection URIs in the application relation databag. + + MongoDB, Redis, and OpenSearch only. + + Args: + relation_id: the identifier for a particular relation. + uris: connection URIs. + """ + self.update_relation_data(relation_id, {"uris": uris}) + + def set_read_only_uris(self, relation_id: int, uris: str) -> None: + """Set the database readonly connection URIs in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + uris: connection URIs. + """ + self.update_relation_data(relation_id, {"read-only-uris": uris}) + + def set_version(self, relation_id: int, version: str) -> None: + """Set the database version in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + version: database version. + """ + self.update_relation_data(relation_id, {"version": version}) + + def set_subordinated(self, relation_id: int) -> None: + """Raises the subordinated flag in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + """ + self.update_relation_data(relation_id, {"subordinated": "true"}) + + +class DatabaseProviderEventHandlers(ProviderEventHandlers): + """Provider-side of the database relation handlers.""" + + on = DatabaseProvidesEvents() # pyright: ignore [reportAssignmentType] + + def __init__( + self, charm: CharmBase, relation_data: DatabaseProviderData, unique_key: str = "" + ): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + # Just to calm down pyright, it can't parse that the same type is being used in the super() call above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Validate entity information is not dynamically changed + self._validate_entity_consistency(event, diff) + + # Emit a database requested event if the setup key (database name) + # was added to the relation databag, but the entity-type key was not. + if "database" in diff.added and "entity-type" not in diff.added: + getattr(self.on, "database_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (database name) + # was added to the relation databag, in addition to the entity-type key. + if "database" in diff.added and "entity-type" in diff.added: + getattr(self.on, "database_entity_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (database name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + "database" not in diff.added + and "entity-type" not in diff.added + and ("entity-permissions" in diff.added or "entity-permissions" in diff.changed) + ): + getattr(self.on, "database_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the secret has changed.""" + pass + + +class DatabaseProvides(DatabaseProviderData, DatabaseProviderEventHandlers): + """Provider-side of the database relations.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + DatabaseProviderData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + DatabaseProviderEventHandlers.__init__(self, charm, self) + + +class DatabaseRequirerData(RequirerData): + """Requirer-side of the database relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + database_name: str, + extra_user_roles: Optional[str] = None, + relations_aliases: Optional[List[str]] = None, + additional_secret_fields: Optional[List[str]] = [], + external_node_connectivity: bool = False, + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + requested_entity_secret: Optional[str] = None, + requested_entity_name: Optional[str] = None, + requested_entity_password: Optional[str] = None, + prefix_matching: Optional[str] = None, + ): + """Manager of database client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + requested_entity_secret, + requested_entity_name, + requested_entity_password, + prefix_matching, + ) + self.database = database_name + self.relations_aliases = relations_aliases + self.external_node_connectivity = external_node_connectivity + + def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: + """Returns whether a plugin is enabled in the database. + + Args: + plugin: name of the plugin to check. + relation_index: optional relation index to check the database + (default: 0 - first relation). + + PostgreSQL only. + """ + # Psycopg 3 is imported locally to avoid the need of its package installation + # when relating to a database charm other than PostgreSQL. + import psycopg + + # Return False if no relation is established. + if len(self.relations) == 0: + return False + + relation_id = self.relations[relation_index].id + host = self.fetch_relation_field(relation_id, "endpoints") + + # Return False if there is no endpoint available. + if host is None: + return False + + host = host.split(":")[0] + + content = self.fetch_relation_data([relation_id], ["username", "password"]).get( + relation_id, {} + ) + user = content.get("username") + password = content.get("password") + + connection_string = ( + f"host='{host}' dbname='{self.database}' user='{user}' password='{password}'" + ) + try: + with psycopg.connect(connection_string) as connection: + with connection.cursor() as cursor: + cursor.execute( + "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) + ) + return cursor.fetchone() is not None + except psycopg.Error as e: + logger.exception( + f"failed to check whether {plugin} plugin is enabled in the database: %s", str(e) + ) + return False + + +class DatabaseRequirerEventHandlers(RequirerEventHandlers): + """Requires-side of the relation.""" + + on = DatabaseRequiresEvents() # pyright: ignore [reportAssignmentType] + + def __init__( + self, charm: CharmBase, relation_data: DatabaseRequirerData, unique_key: str = "" + ): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + # Define custom event names for each alias. + if self.relation_data.relations_aliases: + # Ensure the number of aliases does not exceed the maximum + # of connections allowed in the specific relation. + relation_connection_limit = self.charm.meta.requires[ + self.relation_data.relation_name + ].limit + if len(self.relation_data.relations_aliases) != relation_connection_limit: + raise ValueError( + f"The number of aliases must match the maximum number of connections allowed in the relation. " + f"Expected {relation_connection_limit}, got {len(self.relation_data.relations_aliases)}" + ) + + if self.relation_data.relations_aliases: + for relation_alias in self.relation_data.relations_aliases: + self.on.define_event( + f"{relation_alias}_database_created", + DatabaseCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_database_entity_created", + DatabaseEntityCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_endpoints_changed", + DatabaseEndpointsChangedEvent, + ) + self.on.define_event( + f"{relation_alias}_read_only_endpoints_changed", + DatabaseReadOnlyEndpointsChangedEvent, + ) + self.on.define_event( + f"{relation_alias}_prefix_databases_changed", + DatabasePrefixDatabasesChangedEvent, + ) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + def _assign_relation_alias(self, relation_id: int) -> None: + """Assigns an alias to a relation. + + This function writes in the unit data bag. + + Args: + relation_id: the identifier for a particular relation. + """ + # If no aliases were provided, return immediately. + if not self.relation_data.relations_aliases: + return + + # Return if an alias was already assigned to this relation + # (like when there are more than one unit joining the relation). + relation = self.charm.model.get_relation(self.relation_data.relation_name, relation_id) + if relation and relation.data[self.relation_data.local_unit].get("alias"): + return + + # Retrieve the available aliases (the ones that weren't assigned to any relation). + available_aliases = self.relation_data.relations_aliases[:] + for relation in self.charm.model.relations[self.relation_data.relation_name]: + alias = relation.data[self.relation_data.local_unit].get("alias") + if alias: + logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) + available_aliases.remove(alias) + + # Set the alias in the unit relation databag of the specific relation. + relation = self.charm.model.get_relation(self.relation_data.relation_name, relation_id) + if relation: + relation.data[self.relation_data.local_unit].update({"alias": available_aliases[0]}) + + # We need to set relation alias also on the application level so, + # it will be accessible in show-unit juju command, executed for a consumer application unit + if self.relation_data.local_unit.is_leader(): + self.relation_data.update_relation_data(relation_id, {"alias": available_aliases[0]}) + + def _emit_aliased_event(self, event: RelationChangedEvent, event_name: str) -> None: + """Emit an aliased event to a particular relation if it has an alias. + + Args: + event: the relation changed event that was received. + event_name: the name of the event to emit. + """ + alias = self._get_relation_alias(event.relation.id) + if alias: + getattr(self.on, f"{alias}_{event_name}").emit( + event.relation, app=event.app, unit=event.unit + ) + + def _get_relation_alias(self, relation_id: int) -> Optional[str]: + """Returns the relation alias. + + Args: + relation_id: the identifier for a particular relation. + + Returns: + the relation alias or None if the relation was not found. + """ + for relation in self.charm.model.relations[self.relation_data.relation_name]: + if relation.id == relation_id: + return relation.data[self.relation_data.local_unit].get("alias") + return None + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the database relation is created.""" + super()._on_relation_created_event(event) + + # If relations aliases were provided, assign one to the relation. + self._assign_relation_alias(event.relation.id) + + # Sets both database and extra user roles in the relation + # if the roles are provided. Otherwise, sets only the database. + if not self.relation_data.local_unit.is_leader(): + return + + event_data = {"database": self.relation_data.database} + + if self.relation_data.extra_user_roles: + event_data["extra-user-roles"] = self.relation_data.extra_user_roles + if self.relation_data.extra_group_roles: + event_data["extra-group-roles"] = self.relation_data.extra_group_roles + if self.relation_data.entity_type: + event_data["entity-type"] = self.relation_data.entity_type + if self.relation_data.entity_permissions: + event_data["entity-permissions"] = self.relation_data.entity_permissions + if self.relation_data.requested_entity_secret: + event_data["requested-entity-secret"] = self.relation_data.requested_entity_secret + if self.relation_data.prefix_matching: + event_data["prefix-matching"] = self.relation_data.prefix_matching + + # Create helper secret if needed + if ( + self.relation_data.requested_entity_name + and not self.relation_data.requested_entity_secret + ): + content = {"entity-name": self.relation_data.requested_entity_name} + if self.relation_data.requested_entity_password: + content["password"] = self.relation_data.requested_entity_password + secret = self.charm.app.add_secret( + content, label=f"{self.model.uuid}-{event.relation.id}-requested-entity" + ) + secret.grant(event.relation) + if not secret.id: + raise SecretError("Secret helper missing Id") + event_data["requested-entity-secret"] = secret.id + + # set external-node-connectivity field + if self.relation_data.external_node_connectivity: + event_data["external-node-connectivity"] = "true" + + self.relation_data.update_relation_data(event.relation.id, event_data) + + def _clear_helper_secret(self, event: RelationChangedEvent, app_databag: Dict) -> None: + """Remove helper secret if set.""" + if ( + self.relation_data.local_unit.is_leader() + and self.relation_data.requested_entity_name + and (secret_uri := app_databag.get("requested-entity-secret")) + ): + try: + secret = self.framework.model.get_secret(id=secret_uri) + secret.remove_all_revisions() + except ModelError: + logger.debug("Unable to remove helper secret") + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the database relation has changed.""" + super()._on_relation_changed_event(event) + is_subordinate = False + remote_unit_data = None + for key in event.relation.data.keys(): + if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): + remote_unit_data = event.relation.data[key] + elif isinstance(key, Application) and key.name != self.charm.app.name: + is_subordinate = event.relation.data[key].get("subordinated") == "true" + + if is_subordinate: + if not remote_unit_data or remote_unit_data.get("state") != "ready": + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + app_databag = get_encoded_dict(event.relation, event.app, "data") + if app_databag is None: + app_databag = {} + + # Check if the database is created + # (the database charm shared the credentials). + if self._main_credentials_shared(diff) and "entity-type" not in app_databag: + # Emit the default event (the one without an alias). + logger.info("database created at %s", datetime.now()) + getattr(self.on, "database_created").emit( + event.relation, app=event.app, unit=event.unit + ) + + # Emit the aliased event (if any). + self._emit_aliased_event(event, "database_created") + self._clear_helper_secret(event, app_databag) + + # To avoid unnecessary application restarts do not trigger other events. + return + + if self._entity_credentials_shared(diff) and "entity-type" in app_databag: + # Emit the default event (the one without an alias). + logger.info("entity created at %s", datetime.now()) + getattr(self.on, "database_entity_created").emit( + event.relation, app=event.app, unit=event.unit + ) + + # Emit the aliased event (if any). + self._emit_aliased_event(event, "database_entity_created") + self._clear_helper_secret(event, app_databag) + + # To avoid unnecessary application restarts do not trigger other events. + return + + for key, event_name in [ + ("endpoints", "endpoints_changed"), + ("read-only-endpoints", "read_only_endpoints_changed"), + ("prefix-databases", "prefix_databases_changed"), + ]: + # Emit a change event if the key changed. + if key in diff.added or key in diff.changed: + # Emit the default event (the one without an alias). + logger.info("%s changed on %s", key, datetime.now()) + getattr(self.on, event_name).emit(event.relation, app=event.app, unit=event.unit) + + # Emit the aliased event (if any). + self._emit_aliased_event(event, event_name) + + # To avoid unnecessary application restarts do not trigger other events. + return + + +class DatabaseRequires(DatabaseRequirerData, DatabaseRequirerEventHandlers): + """Provider-side of the database relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + database_name: str, + extra_user_roles: Optional[str] = None, + relations_aliases: Optional[List[str]] = None, + additional_secret_fields: Optional[List[str]] = [], + external_node_connectivity: bool = False, + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + requested_entity_secret: Optional[str] = None, + requested_entity_name: Optional[str] = None, + requested_entity_password: Optional[str] = None, + prefix_matching: Optional[str] = None, + ): + DatabaseRequirerData.__init__( + self, + charm.model, + relation_name, + database_name, + extra_user_roles, + relations_aliases, + additional_secret_fields, + external_node_connectivity, + extra_group_roles, + entity_type, + entity_permissions, + requested_entity_secret, + requested_entity_name, + requested_entity_password, + prefix_matching, + ) + DatabaseRequirerEventHandlers.__init__(self, charm, self) + + +################################################################################ +# Charm-specific Relations Data and Events +################################################################################ + +# Kafka Events + + +class KafkaProvidesEvent(RelationEventWithSecret): + """Base class for Kafka events.""" + + @property + def topic(self) -> Optional[str]: + """Returns the topic that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("topic") + + @property + def consumer_group_prefix(self) -> Optional[str]: + """Returns the consumer-group-prefix that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("consumer-group-prefix") + + @property + def mtls_cert(self) -> Optional[str]: + """Returns TLS cert of the client.""" + if not self.relation.app: + return None + + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + + secret_field = f"{PROV_SECRET_PREFIX}{SECRET_GROUPS.MTLS}" + if secret_uri := self.relation.data[self.app].get(secret_field): + secret = self.framework.model.get_secret(id=secret_uri) + content = secret.get_content(refresh=True) + if content: + return content.get("mtls-cert") + + +class KafkaClientMtlsCertUpdatedEvent(KafkaProvidesEvent): + """Event emitted when the mtls relation is updated.""" + + def __init__(self, handle, relation, old_mtls_cert: Optional[str] = None, app=None, unit=None): + super().__init__(handle, relation, app, unit) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot["old_mtls_cert"] + + +class TopicRequestedEvent(KafkaProvidesEvent): + """Event emitted when a new topic is requested for use on this relation.""" + + @property + def extra_user_roles(self) -> Optional[str]: + """Returns the extra user roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-user-roles") + + +class TopicEntityRequestedEvent(KafkaProvidesEvent, EntityProvidesEvent): + """Event emitted when a new entity is requested for use on this relation.""" + + +class TopicEntityPermissionsChangedEvent(KafkaProvidesEvent, EntityProvidesEvent): + """Event emitted when existing entity permissions are changed on this relation.""" + + +class KafkaProvidesEvents(CharmEvents): + """Kafka events. + + This class defines the events that the Kafka can emit. + """ + + topic_requested = EventSource(TopicRequestedEvent) + topic_entity_requested = EventSource(TopicEntityRequestedEvent) + topic_entity_permissions_changed = EventSource(TopicEntityPermissionsChangedEvent) + mtls_cert_updated = EventSource(KafkaClientMtlsCertUpdatedEvent) + + +class KafkaRequiresEvent(RelationEvent): + """Base class for Kafka events.""" + + @property + def topic(self) -> Optional[str]: + """Returns the topic.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("topic") + + @property + def bootstrap_server(self) -> Optional[str]: + """Returns a comma-separated list of broker uris.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("endpoints") + + @property + def consumer_group_prefix(self) -> Optional[str]: + """Returns the consumer-group-prefix.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("consumer-group-prefix") + + @property + def zookeeper_uris(self) -> Optional[str]: + """Returns a comma separated list of Zookeeper uris.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("zookeeper-uris") + + +class TopicCreatedEvent(AuthenticationEvent, KafkaRequiresEvent): + """Event emitted when a new topic is created for use on this relation.""" + + +class TopicEntityCreatedEvent(EntityRequiresEvent, KafkaRequiresEvent): + """Event emitted when a new entity is created for use on this relation.""" + + +class BootstrapServerChangedEvent(AuthenticationEvent, KafkaRequiresEvent): + """Event emitted when the bootstrap server is changed.""" + + +class KafkaRequiresEvents(RequirerCharmEvents): + """Kafka events. + + This class defines the events that the Kafka can emit. + """ + + topic_created = EventSource(TopicCreatedEvent) + topic_entity_created = EventSource(TopicEntityCreatedEvent) + bootstrap_server_changed = EventSource(BootstrapServerChangedEvent) + + +# Kafka Provides and Requires + + +class KafkaProviderData(ProviderData): + """Provider-side of the Kafka relation.""" + + RESOURCE_FIELD = "topic" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_topic(self, relation_id: int, topic: str) -> None: + """Set topic name in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + topic: the topic name. + """ + self.update_relation_data(relation_id, {"topic": topic}) + + def set_bootstrap_server(self, relation_id: int, bootstrap_server: str) -> None: + """Set the bootstrap server in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + bootstrap_server: the bootstrap server address. + """ + self.update_relation_data(relation_id, {"endpoints": bootstrap_server}) + + def set_consumer_group_prefix(self, relation_id: int, consumer_group_prefix: str) -> None: + """Set the consumer group prefix in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + consumer_group_prefix: the consumer group prefix string. + """ + self.update_relation_data(relation_id, {"consumer-group-prefix": consumer_group_prefix}) + + def set_zookeeper_uris(self, relation_id: int, zookeeper_uris: str) -> None: + """Set the zookeeper uris in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + zookeeper_uris: comma-separated list of ZooKeeper server uris. + """ + self.update_relation_data(relation_id, {"zookeeper-uris": zookeeper_uris}) + + +class KafkaProviderEventHandlers(ProviderEventHandlers): + """Provider-side of the Kafka relation.""" + + on = KafkaProvidesEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KafkaProviderData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + + new_data_keys = list(event.relation.data[event.app].keys()) + if any(newval for newval in new_data_keys if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, new_data_keys) + + getattr(self.on, "mtls_cert_updated").emit(event.relation, app=event.app, unit=event.unit) + + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Validate entity information is not dynamically changed + self._validate_entity_consistency(event, diff) + + # Emit a topic requested event if the setup key (topic name) + # was added to the relation databag, but the entity-type key was not. + if "topic" in diff.added and "entity-type" not in diff.added: + getattr(self.on, "topic_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (topic name) + # was added to the relation databag, in addition to the entity-type key. + if "topic" in diff.added and "entity-type" in diff.added: + getattr(self.on, "topic_entity_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (topic name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + "topic" not in diff.added + and "entity-type" not in diff.added + and ("entity-permissions" in diff.added or "entity-permissions" in diff.changed) + ): + getattr(self.on, "topic_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + + relation = self.relation_data._relation_from_secret_label(event.secret.label) + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + + old_mtls_cert = event.secret.get_content().get("mtls-cert") + # mtls-cert is the only secret that can be updated + logger.info("mtls-cert updated") + getattr(self.on, "mtls_cert_updated").emit( + relation, app=relation.app, unit=remote_unit, old_mtls_cert=old_mtls_cert + ) + + +class KafkaProvides(KafkaProviderData, KafkaProviderEventHandlers): + """Provider-side of the Kafka relation.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + KafkaProviderData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + KafkaProviderEventHandlers.__init__(self, charm, self) + + +class KafkaRequirerData(RequirerData): + """Requirer-side of the Kafka relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + topic: str, + extra_user_roles: Optional[str] = None, + consumer_group_prefix: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + mtls_cert: Optional[str] = None, + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ): + """Manager of Kafka client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + self.topic = topic + self.consumer_group_prefix = consumer_group_prefix or "" + self.mtls_cert = mtls_cert + + @staticmethod + def is_topic_value_acceptable(topic_value: str) -> bool: + """Check whether the given Kafka topic value is acceptable.""" + return "*" not in topic_value[:3] + + @property + def topic(self): + """Topic to use in Kafka.""" + return self._topic + + @topic.setter + def topic(self, value): + if not self.is_topic_value_acceptable(value): + raise ValueError(f"Error on topic '{value}', unacceptable value.") + self._topic = value + + def set_mtls_cert(self, relation_id: int, mtls_cert: str) -> None: + """Set the mtls cert in the application relation databag / secret. + + Args: + relation_id: the identifier for a particular relation. + mtls_cert: mtls cert. + """ + self.update_relation_data(relation_id, {"mtls-cert": mtls_cert}) + + +class KafkaRequirerEventHandlers(RequirerEventHandlers): + """Requires-side of the Kafka relation.""" + + on = KafkaRequiresEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KafkaRequirerData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the Kafka relation is created.""" + super()._on_relation_created_event(event) + + if not self.relation_data.local_unit.is_leader(): + return + + # Sets topic, extra user roles, and "consumer-group-prefix" in the relation + relation_data = {"topic": self.relation_data.topic} + + if self.relation_data.mtls_cert: + relation_data["mtls-cert"] = self.relation_data.mtls_cert + + if self.relation_data.consumer_group_prefix: + relation_data["consumer-group-prefix"] = self.relation_data.consumer_group_prefix + + if self.relation_data.extra_user_roles: + relation_data["extra-user-roles"] = self.relation_data.extra_user_roles + if self.relation_data.extra_group_roles: + relation_data["extra-group-roles"] = self.relation_data.extra_group_roles + if self.relation_data.entity_type: + relation_data["entity-type"] = self.relation_data.entity_type + if self.relation_data.entity_permissions: + relation_data["entity-permissions"] = self.relation_data.entity_permissions + + self.relation_data.update_relation_data(event.relation.id, relation_data) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the Kafka relation has changed.""" + super()._on_relation_changed_event(event) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Check if the topic is created + # (the Kafka charm shared the credentials). + + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + app_databag = get_encoded_dict(event.relation, event.app, "data") + if app_databag is None: + app_databag = {} + + if self._main_credentials_shared(diff) and "entity-type" not in app_databag: + # Emit the default event (the one without an alias). + logger.info("topic created at %s", datetime.now()) + getattr(self.on, "topic_created").emit(event.relation, app=event.app, unit=event.unit) + + # To avoid unnecessary application restarts do not trigger other events. + return + + if self._entity_credentials_shared(diff) and "entity-type" in app_databag: + # Emit the default event (the one without an alias). + logger.info("entity created at %s", datetime.now()) + getattr(self.on, "topic_entity_created").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an endpoints (bootstrap-server) changed event if the Kafka endpoints + # added or changed this info in the relation databag. + if "endpoints" in diff.added or "endpoints" in diff.changed: + # Emit the default event (the one without an alias). + logger.info("endpoints changed on %s", datetime.now()) + getattr(self.on, "bootstrap_server_changed").emit( + event.relation, app=event.app, unit=event.unit + ) # here check if this is the right design + + # To avoid unnecessary application restarts do not trigger other events. + return + + +class KafkaRequires(KafkaRequirerData, KafkaRequirerEventHandlers): + """Provider-side of the Kafka relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + topic: str, + extra_user_roles: Optional[str] = None, + consumer_group_prefix: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + mtls_cert: Optional[str] = None, + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ) -> None: + KafkaRequirerData.__init__( + self, + charm.model, + relation_name, + topic, + extra_user_roles=extra_user_roles, + consumer_group_prefix=consumer_group_prefix, + additional_secret_fields=additional_secret_fields, + mtls_cert=mtls_cert, + extra_group_roles=extra_group_roles, + entity_type=entity_type, + entity_permissions=entity_permissions, + ) + KafkaRequirerEventHandlers.__init__(self, charm, self) + + +# Karapace related events + + +class KarapaceProvidesEvent(RelationEvent): + """Base class for Karapace events.""" + + @property + def subject(self) -> Optional[str]: + """Returns the subject that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("subject") + + +class SubjectRequestedEvent(KarapaceProvidesEvent): + """Event emitted when a new subject is requested for use on this relation.""" + + @property + def extra_user_roles(self) -> Optional[str]: + """Returns the extra user roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-user-roles") + + +class SubjectEntityRequestedEvent(KarapaceProvidesEvent, EntityProvidesEvent): + """Event emitted when a new entity is requested for use on this relation.""" + + +class SubjectEntityPermissionsChangedEvent(KarapaceProvidesEvent, EntityProvidesEvent): + """Event emitted when existing entity permissions are changed on this relation.""" + + +class KarapaceProvidesEvents(CharmEvents): + """Karapace events. + + This class defines the events that the Karapace can emit. + """ + + subject_requested = EventSource(SubjectRequestedEvent) + subject_entity_requested = EventSource(SubjectEntityRequestedEvent) + subject_entity_permissions_changed = EventSource(SubjectEntityPermissionsChangedEvent) + + +class KarapaceRequiresEvent(RelationEvent): + """Base class for Karapace events.""" + + @property + def subject(self) -> Optional[str]: + """Returns the subject.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("subject") + + @property + def endpoints(self) -> Optional[str]: + """Returns a comma-separated list of broker uris.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("endpoints") + + +class SubjectAllowedEvent(AuthenticationEvent, KarapaceRequiresEvent): + """Event emitted when a new subject ACL is created for use on this relation.""" + + +class SubjectEntityCreatedEvent(EntityRequiresEvent, KarapaceRequiresEvent): + """Event emitted when a new entity is created for use on this relation.""" + + +class EndpointsChangedEvent(AuthenticationEvent, KarapaceRequiresEvent): + """Event emitted when the endpoints are changed.""" + + +class KarapaceRequiresEvents(RequirerCharmEvents): + """Karapace events. + + This class defines the events that Karapace can emit. + """ + + subject_allowed = EventSource(SubjectAllowedEvent) + subject_entity_created = EventSource(SubjectEntityCreatedEvent) + server_changed = EventSource(EndpointsChangedEvent) + + +# Karapace Provides and Requires + + +class KarapaceProviderData(ProviderData): + """Provider-side of the Karapace relation.""" + + RESOURCE_FIELD = "subject" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_subject(self, relation_id: int, subject: str) -> None: + """Set subject name in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + subject: the subject name. + """ + self.update_relation_data(relation_id, {"subject": subject}) + + def set_endpoint(self, relation_id: int, endpoint: str) -> None: + """Set the endpoint in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + endpoint: the server address. + """ + self.update_relation_data(relation_id, {"endpoints": endpoint}) + + +class KarapaceProviderEventHandlers(ProviderEventHandlers): + """Provider-side of the Karapace relation.""" + + on = KarapaceProvidesEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KarapaceProviderData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Validate entity information is not dynamically changed + self._validate_entity_consistency(event, diff) + + # Emit a subject requested event if the setup key (subject name) + # was added to the relation databag, but the entity-type key was not. + if "subject" in diff.added and "entity-type" not in diff.added: + getattr(self.on, "subject_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (subject name) + # was added to the relation databag, in addition to the entity-type key. + if "subject" in diff.added and "entity-type" in diff.added: + getattr(self.on, "subject_entity_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (subject name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + "subject" not in diff.added + and "entity-type" not in diff.added + and ("entity-permissions" in diff.added or "entity-permissions" in diff.changed) + ): + getattr(self.on, "subject_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + +class KarapaceProvides(KarapaceProviderData, KarapaceProviderEventHandlers): + """Provider-side of the Karapace relation.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + KarapaceProviderData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + KarapaceProviderEventHandlers.__init__(self, charm, self) + + +class KarapaceRequirerData(RequirerData): + """Requirer-side of the Karapace relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + subject: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ): + """Manager of Karapace client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + self.subject = subject + + @property + def subject(self): + """Topic to use in Karapace.""" + return self._subject + + @subject.setter + def subject(self, value): + # Avoid wildcards + if value == "*": + raise ValueError(f"Error on subject '{value}', cannot be a wildcard.") + self._subject = value + + +class KarapaceRequirerEventHandlers(RequirerEventHandlers): + """Requires-side of the Karapace relation.""" + + on = KarapaceRequiresEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KarapaceRequirerData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the Karapace relation is created.""" + super()._on_relation_created_event(event) + + if not self.relation_data.local_unit.is_leader(): + return + + # Sets subject and extra user roles + relation_data = {"subject": self.relation_data.subject} + + if self.relation_data.extra_user_roles: + relation_data["extra-user-roles"] = self.relation_data.extra_user_roles + if self.relation_data.extra_group_roles: + relation_data["extra-group-roles"] = self.relation_data.extra_group_roles + if self.relation_data.entity_type: + relation_data["entity-type"] = self.relation_data.entity_type + if self.relation_data.entity_permissions: + relation_data["entity-permissions"] = self.relation_data.entity_permissions + + self.relation_data.update_relation_data(event.relation.id, relation_data) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the Karapace relation has changed.""" + super()._on_relation_changed_event(event) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Check if the subject ACLs are created + # (the Karapace charm shared the credentials). + + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + app_databag = get_encoded_dict(event.relation, event.app, "data") + if app_databag is None: + app_databag = {} + + if self._main_credentials_shared(diff) and "entity-type" not in app_databag: + # Emit the default event (the one without an alias). + logger.info("subject ACL created at %s", datetime.now()) + getattr(self.on, "subject_allowed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + if self._entity_credentials_shared(diff) and "entity-type" in app_databag: + # Emit the default event (the one without an alias). + logger.info("entity created at %s", datetime.now()) + getattr(self.on, "subject_entity_created").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an endpoints changed event if the Karapace endpoints added or changed + # this info in the relation databag. + if "endpoints" in diff.added or "endpoints" in diff.changed: + # Emit the default event (the one without an alias). + logger.info("endpoints changed on %s", datetime.now()) + getattr(self.on, "server_changed").emit( + event.relation, app=event.app, unit=event.unit + ) # here check if this is the right design + + # To avoid unnecessary application restarts do not trigger other events. + return + + +class KarapaceRequires(KarapaceRequirerData, KarapaceRequirerEventHandlers): + """Provider-side of the Karapace relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + subject: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ) -> None: + KarapaceRequirerData.__init__( + self, + charm.model, + relation_name, + subject, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + KarapaceRequirerEventHandlers.__init__(self, charm, self) + + +# Kafka Connect Events + + +class KafkaConnectProvidesEvent(RelationEvent): + """Base class for Kafka Connect Provider events.""" + + @property + def plugin_url(self) -> Optional[str]: + """Returns the REST endpoint URL which serves the connector plugin.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("plugin-url") + + +class IntegrationRequestedEvent(KafkaConnectProvidesEvent): + """Event emitted when a new integrator boots up and is ready to serve the connector plugin.""" + + +class KafkaConnectProvidesEvents(CharmEvents): + """Kafka Connect Provider Events.""" + + integration_requested = EventSource(IntegrationRequestedEvent) + + +class KafkaConnectRequiresEvent(AuthenticationEvent): + """Base class for Kafka Connect Requirer events.""" + + @property + def plugin_url(self) -> Optional[str]: + """Returns the REST endpoint URL which serves the connector plugin.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("plugin-url") + + +class IntegrationCreatedEvent(KafkaConnectRequiresEvent): + """Event emitted when the credentials are created for this integrator.""" + + +class IntegrationEndpointsChangedEvent(KafkaConnectRequiresEvent): + """Event emitted when Kafka Connect REST endpoints change.""" + + +class KafkaConnectRequiresEvents(RequirerCharmEvents): + """Kafka Connect Requirer Events.""" + + integration_created = EventSource(IntegrationCreatedEvent) + integration_endpoints_changed = EventSource(IntegrationEndpointsChangedEvent) + + +class KafkaConnectProviderData(ProviderData): + """Provider-side of the Kafka Connect relation.""" + + RESOURCE_FIELD = "plugin-url" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_endpoints(self, relation_id: int, endpoints: str) -> None: + """Sets REST endpoints of the Kafka Connect service.""" + self.update_relation_data(relation_id, {"endpoints": endpoints}) + + +class KafkaConnectProviderEventHandlers(EventHandlers): + """Provider-side implementation of the Kafka Connect event handlers.""" + + on = KafkaConnectProvidesEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KafkaConnectProviderData) -> None: + super().__init__(charm, relation_data) + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + if "plugin-url" in diff.added: + getattr(self.on, "integration_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + +class KafkaConnectProvides(KafkaConnectProviderData, KafkaConnectProviderEventHandlers): + """Provider-side implementation of the Kafka Connect relation.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + KafkaConnectProviderData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + KafkaConnectProviderEventHandlers.__init__(self, charm, self) + + +# Sentinel value passed from Kafka Connect requirer side when it does not need to serve any plugins. +PLUGIN_URL_NOT_REQUIRED: Final[str] = "NOT-REQUIRED" + + +class KafkaConnectRequirerData(RequirerData): + """Requirer-side of the Kafka Connect relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + plugin_url: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + ): + """Manager of Kafka client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles=extra_user_roles, + additional_secret_fields=additional_secret_fields, + ) + self.plugin_url = plugin_url + + @property + def plugin_url(self): + """The REST endpoint URL which serves the connector plugin.""" + return self._plugin_url + + @plugin_url.setter + def plugin_url(self, value): + self._plugin_url = value + + +class KafkaConnectRequirerEventHandlers(RequirerEventHandlers): + """Requirer-side of the Kafka Connect relation.""" + + on = KafkaConnectRequiresEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KafkaConnectRequirerData) -> None: + super().__init__(charm, relation_data) + self.relation_data = relation_data + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the Kafka Connect relation is created.""" + super()._on_relation_created_event(event) + + if not self.relation_data.local_unit.is_leader(): + return + + relation_data = {"plugin-url": self.relation_data.plugin_url} + self.relation_data.update_relation_data(event.relation.id, relation_data) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the Kafka Connect relation has changed.""" + super()._on_relation_changed_event(event) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + if self._main_credentials_shared(diff): + logger.info("integration created at %s", datetime.now()) + getattr(self.on, "integration_created").emit( + event.relation, app=event.app, unit=event.unit + ) + return + + # Emit an endpoints changed event if the provider added or + # changed this info in the relation databag. + if "endpoints" in diff.added or "endpoints" in diff.changed: + # Emit the default event (the one without an alias). + logger.info("endpoints changed on %s", datetime.now()) + getattr(self.on, "integration_endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + return + + +class KafkaConnectRequires(KafkaConnectRequirerData, KafkaConnectRequirerEventHandlers): + """Requirer-side implementation of the Kafka Connect relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + plugin_url: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + ) -> None: + KafkaConnectRequirerData.__init__( + self, + charm.model, + relation_name, + plugin_url, + extra_user_roles=extra_user_roles, + additional_secret_fields=additional_secret_fields, + ) + KafkaConnectRequirerEventHandlers.__init__(self, charm, self) + + +# Opensearch related events + + +class OpenSearchProvidesEvent(RelationEvent): + """Base class for OpenSearch events.""" + + @property + def index(self) -> Optional[str]: + """Returns the index that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("index") + + +class IndexRequestedEvent(OpenSearchProvidesEvent): + """Event emitted when a new index is requested for use on this relation.""" + + @property + def extra_user_roles(self) -> Optional[str]: + """Returns the extra user roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-user-roles") + + +class IndexEntityRequestedEvent(OpenSearchProvidesEvent, EntityProvidesEvent): + """Event emitted when a new entity is requested for use on this relation.""" + + +class IndexEntityPermissionsChangedEvent(OpenSearchProvidesEvent, EntityProvidesEvent): + """Event emitted when existing entity permissions are changed on this relation.""" + + +class OpenSearchProvidesEvents(CharmEvents): + """OpenSearch events. + + This class defines the events that OpenSearch can emit. + """ + + index_requested = EventSource(IndexRequestedEvent) + index_entity_requested = EventSource(IndexEntityRequestedEvent) + index_entity_permissions_changed = EventSource(IndexEntityPermissionsChangedEvent) + + +class OpenSearchRequiresEvent(DatabaseRequiresEvent): + """Base class for OpenSearch requirer events.""" + + +class IndexCreatedEvent(AuthenticationEvent, OpenSearchRequiresEvent): + """Event emitted when a new index is created for use on this relation.""" + + +class IndexEntityCreatedEvent(EntityRequiresEvent, OpenSearchRequiresEvent): + """Event emitted when a new index is created for use on this relation.""" + + +class OpenSearchRequiresEvents(RequirerCharmEvents): + """OpenSearch events. + + This class defines the events that the opensearch requirer can emit. + """ + + index_created = EventSource(IndexCreatedEvent) + index_entity_created = EventSource(IndexEntityCreatedEvent) + endpoints_changed = EventSource(DatabaseEndpointsChangedEvent) + authentication_updated = EventSource(AuthenticationEvent) + + +# OpenSearch Provides and Requires Objects + + +class OpenSearchProvidesData(ProviderData): + """Provider-side of the OpenSearch relation.""" + + RESOURCE_FIELD = "index" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_index(self, relation_id: int, index: str) -> None: + """Set the index in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + index: the index as it is _created_ on the provider charm. This needn't match the + requested index, and can be used to present a different index name if, for example, + the requested index is invalid. + """ + self.update_relation_data(relation_id, {"index": index}) + + def set_endpoints(self, relation_id: int, endpoints: str) -> None: + """Set the endpoints in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + endpoints: the endpoint addresses for opensearch nodes. + """ + self.update_relation_data(relation_id, {"endpoints": endpoints}) + + def set_version(self, relation_id: int, version: str) -> None: + """Set the opensearch version in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + version: database version. + """ + self.update_relation_data(relation_id, {"version": version}) + + +class OpenSearchProvidesEventHandlers(ProviderEventHandlers): + """Provider-side of the OpenSearch relation.""" + + on = OpenSearchProvidesEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: OpenSearchProvidesData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Validate entity information is not dynamically changed + self._validate_entity_consistency(event, diff) + + # Emit an index requested event if the setup key (index name) + # was added to the relation databag, but the entity-type key was not. + if "index" in diff.added and "entity-type" not in diff.added: + getattr(self.on, "index_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (index name) + # was added to the relation databag, in addition to the entity-type key. + if "index" in diff.added and "entity-type" in diff.added: + getattr(self.on, "index_entity_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (index name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + "index" not in diff.added + and "entity-type" not in diff.added + and ("entity-permissions" in diff.added or "entity-permissions" in diff.changed) + ): + getattr(self.on, "index_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + pass + + +class OpenSearchProvides(OpenSearchProvidesData, OpenSearchProvidesEventHandlers): + """Provider-side of the OpenSearch relation.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + OpenSearchProvidesData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + OpenSearchProvidesEventHandlers.__init__(self, charm, self) + + +class OpenSearchRequiresData(RequirerData): + """Requires data side of the OpenSearch relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + index: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ): + """Manager of OpenSearch client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + self.index = index + + +class OpenSearchRequiresEventHandlers(RequirerEventHandlers): + """Requires events side of the OpenSearch relation.""" + + on = OpenSearchRequiresEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: OpenSearchRequiresData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the OpenSearch relation is created.""" + super()._on_relation_created_event(event) + + if not self.relation_data.local_unit.is_leader(): + return + + # Sets both index and extra user roles in the relation if the roles are provided. + # Otherwise, sets only the index. + data = {"index": self.relation_data.index} + + if self.relation_data.extra_user_roles: + data["extra-user-roles"] = self.relation_data.extra_user_roles + if self.relation_data.extra_group_roles: + data["extra-group-roles"] = self.relation_data.extra_group_roles + if self.relation_data.entity_type: + data["entity-type"] = self.relation_data.entity_type + if self.relation_data.entity_permissions: + data["entity-permissions"] = self.relation_data.entity_permissions + + self.relation_data.update_relation_data(event.relation.id, data) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + + relation = self.relation_data._relation_from_secret_label(event.secret.label) + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + + logger.info("authentication updated") + getattr(self.on, "authentication_updated").emit( + relation, app=relation.app, unit=remote_unit + ) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the OpenSearch relation has changed. + + This event triggers individual custom events depending on the changing relation. + """ + super()._on_relation_changed_event(event) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + secret_field_user = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) + secret_field_tls = self.relation_data._generate_secret_field_name(SECRET_GROUPS.TLS) + updates = {"username", "password", "tls", "tls-ca", secret_field_user, secret_field_tls} + if len(set(diff._asdict().keys()) - updates) < len(diff): + logger.info("authentication updated at: %s", datetime.now()) + getattr(self.on, "authentication_updated").emit( + event.relation, app=event.app, unit=event.unit + ) + + app_databag = get_encoded_dict(event.relation, event.app, "data") + if app_databag is None: + app_databag = {} + + # Check if the index is created + # (the OpenSearch charm shares the credentials). + if self._main_credentials_shared(diff) and "entity-type" not in app_databag: + # Emit the default event (the one without an alias). + logger.info("index created at: %s", datetime.now()) + getattr(self.on, "index_created").emit(event.relation, app=event.app, unit=event.unit) + + # To avoid unnecessary application restarts do not trigger other events. + return + + if self._entity_credentials_shared(diff) and "entity-type" in app_databag: + # Emit the default event (the one without an alias). + logger.info("entity created at: %s", datetime.now()) + getattr(self.on, "index_entity_created").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a endpoints changed event if the OpenSearch application + # added or changed this info in the relation databag. + if "endpoints" in diff.added or "endpoints" in diff.changed: + # Emit the default event (the one without an alias). + logger.info("endpoints changed on %s", datetime.now()) + getattr(self.on, "endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + +class OpenSearchRequires(OpenSearchRequiresData, OpenSearchRequiresEventHandlers): + """Requires-side of the OpenSearch relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + index: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ) -> None: + OpenSearchRequiresData.__init__( + self, + charm.model, + relation_name, + index, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + OpenSearchRequiresEventHandlers.__init__(self, charm, self) + + +# Etcd related events + + +class EtcdProviderEvent(RelationEventWithSecret): + """Base class for Etcd events.""" + + @property + def prefix(self) -> Optional[str]: + """Returns the index that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("prefix") + + @property + def mtls_cert(self) -> Optional[str]: + """Returns TLS cert of the client.""" + if not self.relation.app: + return None + + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + + secret_field = f"{PROV_SECRET_PREFIX}{SECRET_GROUPS.MTLS}" + if secret_uri := self.relation.data[self.app].get(secret_field): + secret = self.framework.model.get_secret(id=secret_uri) + content = secret.get_content(refresh=True) + if content: + return content.get("mtls-cert") + + +class MTLSCertUpdatedEvent(EtcdProviderEvent): + """Event emitted when the mtls relation is updated.""" + + def __init__(self, handle, relation, old_mtls_cert: Optional[str] = None, app=None, unit=None): + super().__init__(handle, relation, app, unit) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot["old_mtls_cert"] + + +class EtcdProviderEvents(CharmEvents): + """Etcd events. + + This class defines the events that Etcd can emit. + """ + + mtls_cert_updated = EventSource(MTLSCertUpdatedEvent) + + +class EtcdReadyEvent(AuthenticationEvent, DatabaseRequiresEvent): + """Event emitted when the etcd relation is ready to be consumed.""" + + +class EtcdRequirerEvents(RequirerCharmEvents): + """Etcd events. + + This class defines the events that the etcd requirer can emit. + """ + + endpoints_changed = EventSource(DatabaseEndpointsChangedEvent) + etcd_ready = EventSource(EtcdReadyEvent) + + +# Etcd Provides and Requires Objects + + +class EtcdProviderData(ProviderData): + """Provider-side of the Etcd relation.""" + + RESOURCE_FIELD = "prefix" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_uris(self, relation_id: int, uris: str) -> None: + """Set the database connection URIs in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + uris: connection URIs. + """ + self.update_relation_data(relation_id, {"uris": uris}) + + def set_endpoints(self, relation_id: int, endpoints: str) -> None: + """Set the endpoints in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + endpoints: the endpoint addresses for etcd nodes "ip:port" format. + """ + self.update_relation_data(relation_id, {"endpoints": endpoints}) + + def set_version(self, relation_id: int, version: str) -> None: + """Set the etcd version in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + version: etcd API version. + """ + self.update_relation_data(relation_id, {"version": version}) + + def set_tls_ca(self, relation_id: int, tls_ca: str) -> None: + """Set the TLS CA in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + tls_ca: TLS certification authority. + """ + self.update_relation_data(relation_id, {"tls-ca": tls_ca, "tls": "True"}) + + +class EtcdProviderEventHandlers(ProviderEventHandlers): + """Provider-side of the Etcd relation.""" + + on = EtcdProviderEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: EtcdProviderData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + # register all new secrets with their labels + new_data_keys = list(event.relation.data[event.app].keys()) + if any(newval for newval in new_data_keys if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, new_data_keys) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Validate entity information is not dynamically changed + self._validate_entity_consistency(event, diff) + + getattr(self.on, "mtls_cert_updated").emit(event.relation, app=event.app, unit=event.unit) + return + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + + relation = self.relation_data._relation_from_secret_label(event.secret.label) + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + + old_mtls_cert = event.secret.get_content().get("mtls-cert") + # mtls-cert is the only secret that can be updated + logger.info("mtls-cert updated") + getattr(self.on, "mtls_cert_updated").emit( + relation, app=relation.app, unit=remote_unit, old_mtls_cert=old_mtls_cert + ) + + +class EtcdProvides(EtcdProviderData, EtcdProviderEventHandlers): + """Provider-side of the Etcd relation.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + EtcdProviderData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + EtcdProviderEventHandlers.__init__(self, charm, self) + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + + +class EtcdRequirerData(RequirerData): + """Requires data side of the Etcd relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + prefix: str, + mtls_cert: Optional[str], + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ): + """Manager of Etcd client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + self.prefix = prefix + self.mtls_cert = mtls_cert + + def set_mtls_cert(self, relation_id: int, mtls_cert: str) -> None: + """Set the mtls cert in the application relation databag / secret. + + Args: + relation_id: the identifier for a particular relation. + mtls_cert: mtls cert. + """ + self.update_relation_data(relation_id, {"mtls-cert": mtls_cert}) + + +class EtcdRequirerEventHandlers(RequirerEventHandlers): + """Requires events side of the Etcd relation.""" + + on = EtcdRequirerEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: EtcdRequirerData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the Etcd relation is created.""" + super()._on_relation_created_event(event) + + payload = { + "prefix": self.relation_data.prefix, + } + if self.relation_data.mtls_cert: + payload["mtls-cert"] = self.relation_data.mtls_cert + + self.relation_data.update_relation_data( + event.relation.id, + payload, + ) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the Etcd relation has changed. + + This event triggers individual custom events depending on the changing relation. + """ + super()._on_relation_changed_event(event) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + secret_field_user = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) + secret_field_tls = self.relation_data._generate_secret_field_name(SECRET_GROUPS.TLS) + + # Emit a endpoints changed event if the etcd application added or changed this info + # in the relation databag. + if "endpoints" in diff.added or "endpoints" in diff.changed: + # Emit the default event (the one without an alias). + logger.info("endpoints changed on %s", datetime.now()) + getattr(self.on, "endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + if ( + secret_field_tls in diff.added + or secret_field_tls in diff.changed + or secret_field_user in diff.added + or secret_field_user in diff.changed + or "username" in diff.added + or "username" in diff.changed + ): + # Emit the default event (the one without an alias). + logger.info("etcd ready on %s", datetime.now()) + getattr(self.on, "etcd_ready").emit(event.relation, app=event.app, unit=event.unit) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + + relation = self.relation_data._relation_from_secret_label(event.secret.label) + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + + # secret-user or secret-tls updated + logger.info("etcd_ready updated") + getattr(self.on, "etcd_ready").emit(relation, app=relation.app, unit=remote_unit) + + +class EtcdRequires(EtcdRequirerData, EtcdRequirerEventHandlers): + """Requires-side of the Etcd relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + prefix: str, + mtls_cert: Optional[str], + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ) -> None: + EtcdRequirerData.__init__( + self, + charm.model, + relation_name, + prefix, + mtls_cert, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + EtcdRequirerEventHandlers.__init__(self, charm, self) + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") diff --git a/machine-charm/pyproject.toml b/machine-charm/pyproject.toml new file mode 100644 index 00000000..0132358a --- /dev/null +++ b/machine-charm/pyproject.toml @@ -0,0 +1,80 @@ +# Copyright 2026 Samuel Olwe +# See LICENSE file for licensing details. + +[project] +name = "machine-charm" +version = "0.0.1" +requires-python = ">=3.10" + +# Dependencies of the charm code +# You should include the dependencies of the code in src/. You should also include the +# dependencies of any charmlibs that the charm uses (copy the dependencies from PYDEPS). +dependencies = [ + "charmlibs-apt>=1,<2", + "charmlibs-pathops>=1.2.1", + "ops>=3,<4", + "pydantic>=2.12.5", + "psycopg[binary]==3.3.3", +] + +[dependency-groups] +# Dependencies of linting and static type checks +lint = [ + "ruff", + "codespell", + "pyright", +] +# Dependencies of unit tests +unit = [ + "coverage[toml]", + "ops[testing]", + "pytest", +] +# Dependencies of integration tests +integration = [ + "jubilant", + "pytest", +] + +# Testing tools configuration +[tool.coverage.run] +branch = true + +[tool.coverage.report] +show_missing = true + +[tool.pytest.ini_options] +minversion = "6.0" +log_cli_level = "INFO" + +# Linting tools configuration +[tool.ruff] +line-length = 99 + +lint.select = ["E", "W", "F", "C", "N", "D", "I001"] +lint.ignore = [ + "D105", + "D107", + "D203", + "D204", + "D213", + "D215", + "D400", + "D404", + "D406", + "D407", + "D408", + "D409", + "D413", +] +lint.per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} +extend-exclude = ["__pycache__", "*.egg_info"] + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.codespell] +skip = "build,lib,venv,icon.svg,.tox,.git,.mypy_cache,.ruff_cache,.coverage" + +[tool.pyright] +include = ["src", "tests"] diff --git a/machine-charm/src/charm.py b/machine-charm/src/charm.py new file mode 100755 index 00000000..4516fa05 --- /dev/null +++ b/machine-charm/src/charm.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python3 +# Copyright 2026 Samuel Olwe +# See LICENSE file for licensing details. + +"""Charm the application.""" + +import logging +import os + +import ops +import pydantic +from charms.data_platform_libs.v0.data_interfaces import ( + DatabaseCreatedEvent, + DatabaseEntityCreatedEvent, + DatabaseRequires, +) + +# A standalone module for workload-specific logic (no charming concerns): +import workload + +logger = logging.getLogger(__name__) + + +class WorkloadConfig(pydantic.BaseModel): + database: str = pydantic.Field("", description="Configures the database url") + replica_one: str = pydantic.Field( + "", description="Configures the database url for a replica if available." + ) + replica_two: str = pydantic.Field( + "", description="Configures the database url for a second replica if available." + ) + oauth_token_salt: str = pydantic.Field( + "", description="Salt used to encode and decode OAuth tokens" + ) + bind_address: str = pydantic.Field("0.0.0.0:8000", description="Address to bind the webapp to") + workers: str = pydantic.Field("3", description="Number of workers for the webapp") + timeout: str = pydantic.Field("30", description="Worker timeout for the webapp") + + +class MachineCharmCharm(ops.CharmBase): + """Charm the application.""" + + def __init__(self, framework: ops.Framework): + super().__init__(framework) + framework.observe(self.on.install, self._on_install) + # framework.observe(self.on.start, self._on_start) + framework.observe(self.on.config_changed, self._on_config_changed) + # Charm events defined in the database requires charm library. + self.database = DatabaseRequires( + self, relation_name="postgresql", database_name="postgresql" + ) + self.framework.observe(self.database.on.database_created, self._on_database_created) + self.framework.observe( + self.database.on.database_entity_created, self._on_database_entity_created + ) + + def _on_database_created(self, event: DatabaseCreatedEvent) -> None: + """Handle database created event.""" + logger.info("Database created with connection string: %s", event.connection_string) + + # Add env variable for flask-base + os.environ["DATABASE_URL"] = event.connection_string + # Start workload when database is ready. + self._start() + + def _on_database_entity_created(self, event: DatabaseEntityCreatedEvent) -> None: + """Handle database entity created event.""" + logger.info("Database entity created with connection string: %s", event.connection_string) + + def _on_install(self, event: ops.InstallEvent): + """Install the workload on the machine.""" + workload.install(self.charm_dir.absolute().as_posix()) + + def _start(self): + """Start the workload.""" + self.unit.status = ops.MaintenanceStatus("starting workload") + config = self.load_config(WorkloadConfig) + workload.start(config.bind_address, config.workers, config.timeout) + self.unit.status = ops.ActiveStatus() + + def _on_start(self, event: ops.StartEvent): + """Handle start event.""" + self._start() + + def _on_config_changed(self, event: ops.ConfigChangedEvent) -> None: + """Handle config-changed event.""" + self.configure_and_run() + + def configure_and_run(self) -> None: + """Ensure that the workload is running with the correct config.""" + try: + config = self.load_config(WorkloadConfig) + except pydantic.ValidationError: + # The collect-status handler will run next and will set status for the user to see. + return + + +if __name__ == "__main__": # pragma: nocover + ops.main(MachineCharmCharm) diff --git a/machine-charm/src/workload.py b/machine-charm/src/workload.py new file mode 100644 index 00000000..6cfee095 --- /dev/null +++ b/machine-charm/src/workload.py @@ -0,0 +1,155 @@ +# Copyright 2026 Samuel Olwe +# See LICENSE file for licensing details. + +"""Functions for managing and interacting with the workload. + +The intention is that this module could be used outside the context of a charm. +""" + +import logging +import os +import subprocess +from contextlib import contextmanager +from typing import Generator + +from charmlibs import apt + +logger = logging.getLogger(__name__) + + +@contextmanager +def use_path(path: str) -> Generator: + """Execute a function within the specified directory.""" + cwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(cwd) + + +def run_command(*args) -> None: + """Run a subprocess and raise a RuntimeError if the subprocess result indicates an error. + + We do this to bubble up the error message from the subprocess to the debug-log. + """ + try: + subprocess.run(args, check=True, capture_output=True, text=True) + except subprocess.CalledProcessError as e: + raise RuntimeError(e.stderr) from e + + +def ensure_config(port: int, slug: str) -> bool: + """Ensure that required config variables are supplied.""" + if os.getenv("DATABASE_URL"): + return True + return False + + +def run_migrations() -> None: + """Run database migrations.""" + with use_path("../.."): + subprocess.run( + ["flask", "--app", "webapp.app", "db", "upgrade"], + check=True, + capture_output=True, + text=True, + ) + + +def install(charm_dir: str) -> None: + """Install the workload (by installing a snap, for example).""" + # Install apt packages + apt.update() + apt.add_package("libsodium-dev", "1.0.18-1ubuntu0.24.04.1") + apt.add_package("python3-virtualenv", "20.25.0+ds-2") + apt.add_package("postgresql-16", "16.11-0ubuntu0.24.04.1") + + # Create a virtual environment + run_command("virtualenv", "/venv") + + # Install workload python packages + run_command( + "/venv/bin/python", + "-m", + "pip", + "install", + "-r", + f"{charm_dir}/src/flask/app/requirements.txt", + ) + # Install the pg_trgm extension + run_command("psql", "-c", "CREATE EXTENSION IF NOT EXISTS pg_trgm;") + # Then run migrations + run_command( + "/venv/bin/python", + "-m", + "flask", + "--app", + f"{charm_dir}/src/flask/app/webapp.app", + "db", + "upgrade", + ) + + +def start(address: str, workers: str, timeout: str) -> None: + """Start the webapp.""" + run_command( + "/venv/bin/python", + "-m", + "talisker.gunicorn", + "webapp.app:app", + "--bind", + address, + "--workers", + workers, + "--timeout", + timeout, + ) + + +# def is_installed() -> bool: +# """Return whether the tinyproxy executable is available.""" +# return shutil.which("tinyproxy") is not None + + +# def is_running() -> bool: +# """Return whether tinyproxy is running.""" +# return bool(_get_pid()) + + +# def reload_config() -> None: +# """Ask tinyproxy to reload config.""" +# pid = _get_pid() +# if not pid: +# raise RuntimeError("tinyproxy is not running") +# # Sending signal SIGUSR1 doesn't terminate the process. It asks the process to reload config. +# # See https://manpages.ubuntu.com/manpages/jammy/en/man8/tinyproxy.8.html#signals +# os.kill(pid, signal.SIGUSR1) + + +# def stop() -> None: +# """Stop tinyproxy.""" +# pid = _get_pid() +# if pid: +# os.kill(pid, signal.SIGTERM) + + +# def uninstall() -> None: +# """Uninstall the tinyproxy executable and remove files.""" +# apt.remove_package("tinyproxy-bin") +# PID_FILE.unlink(missing_ok=True) +# CONFIG_FILE.unlink(missing_ok=True) +# CONFIG_FILE.parent.rmdir() + + +# def _get_pid() -> int | None: +# """Return the PID of the tinyproxy process, or None if the process can't be found.""" +# if not PID_FILE.exists(): +# return None +# pid = int(PID_FILE.read_text()) +# try: +# # Sending signal 0 doesn't terminate the process. It just checks whether the PID exists. +# os.kill(pid, 0) +# except ProcessLookupError: +# return None +# return pid diff --git a/machine-charm/tests/integration/conftest.py b/machine-charm/tests/integration/conftest.py new file mode 100644 index 00000000..9b977ee7 --- /dev/null +++ b/machine-charm/tests/integration/conftest.py @@ -0,0 +1,47 @@ +# Copyright 2026 Samuel Olwe +# See LICENSE file for licensing details. +# +# The integration tests use the Jubilant library. See https://documentation.ubuntu.com/jubilant/ +# To learn more about testing, see https://documentation.ubuntu.com/ops/latest/explanation/testing/ + +import logging +import os +import pathlib +import sys +import time + +import jubilant +import pytest + +logger = logging.getLogger(__name__) + + +@pytest.fixture(scope="module") +def juju(request: pytest.FixtureRequest): + """Create a temporary Juju model for running tests.""" + with jubilant.temp_model() as juju: + yield juju + + if request.session.testsfailed: + logger.info("Collecting Juju logs...") + time.sleep(0.5) # Wait for Juju to process logs. + log = juju.debug_log(limit=1000) + print(log, end="", file=sys.stderr) + + +@pytest.fixture(scope="session") +def charm(): + """Return the path of the charm under test.""" + if "CHARM_PATH" in os.environ: + charm_path = pathlib.Path(os.environ["CHARM_PATH"]) + if not charm_path.exists(): + raise FileNotFoundError(f"Charm does not exist: {charm_path}") + return charm_path + # Modify below if you're building for multiple bases or architectures. + charm_paths = list(pathlib.Path(".").glob("*.charm")) + if not charm_paths: + raise FileNotFoundError("No .charm file in current directory") + if len(charm_paths) > 1: + path_list = ", ".join(str(path) for path in charm_paths) + raise ValueError(f"More than one .charm file in current directory: {path_list}") + return charm_paths[0] diff --git a/machine-charm/tests/integration/test_charm.py b/machine-charm/tests/integration/test_charm.py new file mode 100644 index 00000000..ea87c3a5 --- /dev/null +++ b/machine-charm/tests/integration/test_charm.py @@ -0,0 +1,29 @@ +# Copyright 2026 Samuel Olwe +# See LICENSE file for licensing details. +# +# The integration tests use the Jubilant library. See https://documentation.ubuntu.com/jubilant/ +# To learn more about testing, see https://documentation.ubuntu.com/ops/latest/explanation/testing/ + +import logging +import pathlib + +import jubilant +import pytest + +logger = logging.getLogger(__name__) + + +def test_deploy(charm: pathlib.Path, juju: jubilant.Juju): + """Deploy the charm under test.""" + juju.deploy(charm.resolve(), app="machine-charm") + juju.wait(jubilant.all_active) + + +# If you implement workload.get_version in the charm source, +# remove the @pytest.mark.skip line to enable this test. +# Alternatively, remove this test if you don't need it. +@pytest.mark.skip(reason="workload.get_version is not implemented") +def test_workload_version_is_set(charm: pathlib.Path, juju: jubilant.Juju): + """Check that the correct version of the workload is running.""" + version = juju.status().apps["machine-charm"].version + assert version == "3.14" # Replace 3.14 by the expected version of the workload. diff --git a/machine-charm/tests/unit/test_charm.py b/machine-charm/tests/unit/test_charm.py new file mode 100644 index 00000000..7df11810 --- /dev/null +++ b/machine-charm/tests/unit/test_charm.py @@ -0,0 +1,26 @@ +# Copyright 2026 Samuel Olwe +# See LICENSE file for licensing details. +# +# To learn more about testing, see https://documentation.ubuntu.com/ops/latest/explanation/testing/ + +import pytest +from ops import testing + +from charm import MachineCharmCharm + + +def mock_get_version(): + """Get a mock version string without executing the workload code.""" + return "1.0.0" + + +def test_start(monkeypatch: pytest.MonkeyPatch): + """Test that the charm has the correct state after handling the start event.""" + # Arrange: + ctx = testing.Context(MachineCharmCharm) + monkeypatch.setattr("charm.workload.get_version", mock_get_version) + # Act: + state_out = ctx.run(ctx.on.start(), testing.State()) + # Assert: + assert state_out.workload_version is not None + assert state_out.unit_status == testing.ActiveStatus() diff --git a/machine-charm/tox.ini b/machine-charm/tox.ini new file mode 100644 index 00000000..5c97b65b --- /dev/null +++ b/machine-charm/tox.ini @@ -0,0 +1,80 @@ +# Copyright 2026 Samuel Olwe +# See LICENSE file for licensing details. + +[tox] +no_package = True +skip_missing_interpreters = True +env_list = format, lint, unit +min_version = 4.0.0 + +[vars] +src_path = {tox_root}/src +tests_path = {tox_root}/tests +;lib_path = {tox_root}/lib/charms/operator_name_with_underscores +all_path = {[vars]src_path} {[vars]tests_path} + +[testenv] +set_env = + PYTHONPATH = {tox_root}/lib:{[vars]src_path} + PYTHONBREAKPOINT=pdb.set_trace + PY_COLORS=1 +pass_env = + PYTHONPATH + CHARM_BUILD_DIR + MODEL_SETTINGS + +[testenv:format] +description = Apply coding style standards to code +deps = + ruff +commands = + ruff format {[vars]all_path} + ruff check --fix {[vars]all_path} + +[testenv:lint] +description = Check code against coding style standards, and static checks +runner = uv-venv-lock-runner +dependency_groups = + lint + unit + integration +commands = + # if this charm owns a lib, uncomment "lib_path" variable + # and uncomment the following line + # codespell {[vars]lib_path} + codespell {tox_root} + ruff check {[vars]all_path} + ruff format --check --diff {[vars]all_path} + pyright {posargs} + +[testenv:unit] +description = Run unit tests +runner = uv-venv-lock-runner +dependency_groups = + unit +commands = + coverage run --source={[vars]src_path} -m pytest \ + -v \ + -s \ + --tb native \ + {[vars]tests_path}/unit \ + {posargs} + coverage report + +[testenv:integration] +description = Run integration tests +runner = uv-venv-lock-runner +dependency_groups = + integration +pass_env = + # The integration tests don't pack the charm. If CHARM_PATH is set, the tests deploy the + # specified .charm file. Otherwise, the tests look for a .charm file in the project dir. + CHARM_PATH +commands = + pytest \ + -v \ + -s \ + --tb native \ + --log-cli-level=INFO \ + {[vars]tests_path}/integration \ + {posargs} diff --git a/machine-charm/uv.lock b/machine-charm/uv.lock new file mode 100644 index 00000000..1239d526 --- /dev/null +++ b/machine-charm/uv.lock @@ -0,0 +1,617 @@ +version = 1 +revision = 3 +requires-python = ">=3.10" +resolution-markers = [ + "python_full_version >= '3.11'", + "python_full_version < '3.11'", +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "charmlibs-apt" +version = "1.0.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/58/33e87779fdbcf62a3b34e3444d7175c1168b4b2726cc29c98849c09ac086/charmlibs_apt-1.0.0.post0.tar.gz", hash = "sha256:9c2e0b3c1f553ebcaae99c9aad72e15383aec56677a8dd3f6479dc6f084189a6", size = 31942, upload-time = "2025-10-15T02:40:29.521Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/92/4db19cd8bc94db51a115f7a2e3c46d96b991ca7ebe27207beac9a6570bc6/charmlibs_apt-1.0.0.post0-py3-none-any.whl", hash = "sha256:958e84719eb1feff539f058dc6c7af648c53c88b9ebe7c6157ec8d2bdf5fbfc6", size = 19287, upload-time = "2025-10-15T02:40:27.756Z" }, +] + +[[package]] +name = "charmlibs-pathops" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ops" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/18/120f0635f449af7ef9e1a431b7f1c21eb4e09a39870f3862350853ba92a1/charmlibs_pathops-1.2.1.tar.gz", hash = "sha256:00fa50f95bb7fbfbe3d5507de94e583f7333f63ec6ef42a49600b641aabbcfd3", size = 21892, upload-time = "2026-02-06T00:34:03.882Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/57/b54e4fcb9c53085fde0bc6016232e0bb5f88cbf7a03ef78176848793b648/charmlibs_pathops-1.2.1-py3-none-any.whl", hash = "sha256:36dc4e5f76ae3eb89020df916c14e169f71ba856f71a430dab94b0b7948e9b10", size = 27005, upload-time = "2026-02-06T00:34:02.669Z" }, +] + +[[package]] +name = "codespell" +version = "2.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/e0/709453393c0ea77d007d907dd436b3ee262e28b30995ea1aa36c6ffbccaf/codespell-2.4.1.tar.gz", hash = "sha256:299fcdcb09d23e81e35a671bbe746d5ad7e8385972e65dbb833a2eaac33c01e5", size = 344740, upload-time = "2025-01-28T18:52:39.411Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/01/b394922252051e97aab231d416c86da3d8a6d781eeadcdca1082867de64e/codespell-2.4.1-py3-none-any.whl", hash = "sha256:3dadafa67df7e4a3dbf51e0d7315061b80d265f9552ebd699b3dd6834b47e425", size = 344501, upload-time = "2025-01-28T18:52:37.057Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/e0/98670a80884f64578f0c22cd70c5e81a6e07b08167721c7487b4d70a7ca0/coverage-7.9.1.tar.gz", hash = "sha256:6cf43c78c4282708a28e466316935ec7489a9c487518a77fa68f716c67909cec", size = 813650, upload-time = "2025-06-13T13:02:28.627Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/78/1c1c5ec58f16817c09cbacb39783c3655d54a221b6552f47ff5ac9297603/coverage-7.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cc94d7c5e8423920787c33d811c0be67b7be83c705f001f7180c7b186dcf10ca", size = 212028, upload-time = "2025-06-13T13:00:29.293Z" }, + { url = "https://files.pythonhosted.org/packages/98/db/e91b9076f3a888e3b4ad7972ea3842297a52cc52e73fd1e529856e473510/coverage-7.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16aa0830d0c08a2c40c264cef801db8bc4fc0e1892782e45bcacbd5889270509", size = 212420, upload-time = "2025-06-13T13:00:34.027Z" }, + { url = "https://files.pythonhosted.org/packages/0e/d0/2b3733412954576b0aea0a16c3b6b8fbe95eb975d8bfa10b07359ead4252/coverage-7.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf95981b126f23db63e9dbe4cf65bd71f9a6305696fa5e2262693bc4e2183f5b", size = 241529, upload-time = "2025-06-13T13:00:35.786Z" }, + { url = "https://files.pythonhosted.org/packages/b3/00/5e2e5ae2e750a872226a68e984d4d3f3563cb01d1afb449a17aa819bc2c4/coverage-7.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f05031cf21699785cd47cb7485f67df619e7bcdae38e0fde40d23d3d0210d3c3", size = 239403, upload-time = "2025-06-13T13:00:37.399Z" }, + { url = "https://files.pythonhosted.org/packages/37/3b/a2c27736035156b0a7c20683afe7df498480c0dfdf503b8c878a21b6d7fb/coverage-7.9.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb4fbcab8764dc072cb651a4bcda4d11fb5658a1d8d68842a862a6610bd8cfa3", size = 240548, upload-time = "2025-06-13T13:00:39.647Z" }, + { url = "https://files.pythonhosted.org/packages/98/f5/13d5fc074c3c0e0dc80422d9535814abf190f1254d7c3451590dc4f8b18c/coverage-7.9.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0f16649a7330ec307942ed27d06ee7e7a38417144620bb3d6e9a18ded8a2d3e5", size = 240459, upload-time = "2025-06-13T13:00:40.934Z" }, + { url = "https://files.pythonhosted.org/packages/36/24/24b9676ea06102df824c4a56ffd13dc9da7904478db519efa877d16527d5/coverage-7.9.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cea0a27a89e6432705fffc178064503508e3c0184b4f061700e771a09de58187", size = 239128, upload-time = "2025-06-13T13:00:42.343Z" }, + { url = "https://files.pythonhosted.org/packages/be/05/242b7a7d491b369ac5fee7908a6e5ba42b3030450f3ad62c645b40c23e0e/coverage-7.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e980b53a959fa53b6f05343afbd1e6f44a23ed6c23c4b4c56c6662bbb40c82ce", size = 239402, upload-time = "2025-06-13T13:00:43.634Z" }, + { url = "https://files.pythonhosted.org/packages/73/e0/4de7f87192fa65c9c8fbaeb75507e124f82396b71de1797da5602898be32/coverage-7.9.1-cp310-cp310-win32.whl", hash = "sha256:70760b4c5560be6ca70d11f8988ee6542b003f982b32f83d5ac0b72476607b70", size = 214518, upload-time = "2025-06-13T13:00:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ab/5e4e2fe458907d2a65fab62c773671cfc5ac704f1e7a9ddd91996f66e3c2/coverage-7.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a66e8f628b71f78c0e0342003d53b53101ba4e00ea8dabb799d9dba0abbbcebe", size = 215436, upload-time = "2025-06-13T13:00:47.245Z" }, + { url = "https://files.pythonhosted.org/packages/60/34/fa69372a07d0903a78ac103422ad34db72281c9fc625eba94ac1185da66f/coverage-7.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:95c765060e65c692da2d2f51a9499c5e9f5cf5453aeaf1420e3fc847cc060582", size = 212146, upload-time = "2025-06-13T13:00:48.496Z" }, + { url = "https://files.pythonhosted.org/packages/27/f0/da1894915d2767f093f081c42afeba18e760f12fdd7a2f4acbe00564d767/coverage-7.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba383dc6afd5ec5b7a0d0c23d38895db0e15bcba7fb0fa8901f245267ac30d86", size = 212536, upload-time = "2025-06-13T13:00:51.535Z" }, + { url = "https://files.pythonhosted.org/packages/10/d5/3fc33b06e41e390f88eef111226a24e4504d216ab8e5d1a7089aa5a3c87a/coverage-7.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37ae0383f13cbdcf1e5e7014489b0d71cc0106458878ccde52e8a12ced4298ed", size = 245092, upload-time = "2025-06-13T13:00:52.883Z" }, + { url = "https://files.pythonhosted.org/packages/0a/39/7aa901c14977aba637b78e95800edf77f29f5a380d29768c5b66f258305b/coverage-7.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69aa417a030bf11ec46149636314c24c8d60fadb12fc0ee8f10fda0d918c879d", size = 242806, upload-time = "2025-06-13T13:00:54.571Z" }, + { url = "https://files.pythonhosted.org/packages/43/fc/30e5cfeaf560b1fc1989227adedc11019ce4bb7cce59d65db34fe0c2d963/coverage-7.9.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a4be2a28656afe279b34d4f91c3e26eccf2f85500d4a4ff0b1f8b54bf807338", size = 244610, upload-time = "2025-06-13T13:00:56.932Z" }, + { url = "https://files.pythonhosted.org/packages/bf/15/cca62b13f39650bc87b2b92bb03bce7f0e79dd0bf2c7529e9fc7393e4d60/coverage-7.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:382e7ddd5289f140259b610e5f5c58f713d025cb2f66d0eb17e68d0a94278875", size = 244257, upload-time = "2025-06-13T13:00:58.545Z" }, + { url = "https://files.pythonhosted.org/packages/cd/1a/c0f2abe92c29e1464dbd0ff9d56cb6c88ae2b9e21becdb38bea31fcb2f6c/coverage-7.9.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e5532482344186c543c37bfad0ee6069e8ae4fc38d073b8bc836fc8f03c9e250", size = 242309, upload-time = "2025-06-13T13:00:59.836Z" }, + { url = "https://files.pythonhosted.org/packages/57/8d/c6fd70848bd9bf88fa90df2af5636589a8126d2170f3aade21ed53f2b67a/coverage-7.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a39d18b3f50cc121d0ce3838d32d58bd1d15dab89c910358ebefc3665712256c", size = 242898, upload-time = "2025-06-13T13:01:02.506Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9e/6ca46c7bff4675f09a66fe2797cd1ad6a24f14c9c7c3b3ebe0470a6e30b8/coverage-7.9.1-cp311-cp311-win32.whl", hash = "sha256:dd24bd8d77c98557880def750782df77ab2b6885a18483dc8588792247174b32", size = 214561, upload-time = "2025-06-13T13:01:04.012Z" }, + { url = "https://files.pythonhosted.org/packages/a1/30/166978c6302010742dabcdc425fa0f938fa5a800908e39aff37a7a876a13/coverage-7.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:6b55ad10a35a21b8015eabddc9ba31eb590f54adc9cd39bcf09ff5349fd52125", size = 215493, upload-time = "2025-06-13T13:01:05.702Z" }, + { url = "https://files.pythonhosted.org/packages/60/07/a6d2342cd80a5be9f0eeab115bc5ebb3917b4a64c2953534273cf9bc7ae6/coverage-7.9.1-cp311-cp311-win_arm64.whl", hash = "sha256:6ad935f0016be24c0e97fc8c40c465f9c4b85cbbe6eac48934c0dc4d2568321e", size = 213869, upload-time = "2025-06-13T13:01:09.345Z" }, + { url = "https://files.pythonhosted.org/packages/68/d9/7f66eb0a8f2fce222de7bdc2046ec41cb31fe33fb55a330037833fb88afc/coverage-7.9.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8de12b4b87c20de895f10567639c0797b621b22897b0af3ce4b4e204a743626", size = 212336, upload-time = "2025-06-13T13:01:10.909Z" }, + { url = "https://files.pythonhosted.org/packages/20/20/e07cb920ef3addf20f052ee3d54906e57407b6aeee3227a9c91eea38a665/coverage-7.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5add197315a054e92cee1b5f686a2bcba60c4c3e66ee3de77ace6c867bdee7cb", size = 212571, upload-time = "2025-06-13T13:01:12.518Z" }, + { url = "https://files.pythonhosted.org/packages/78/f8/96f155de7e9e248ca9c8ff1a40a521d944ba48bec65352da9be2463745bf/coverage-7.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600a1d4106fe66f41e5d0136dfbc68fe7200a5cbe85610ddf094f8f22e1b0300", size = 246377, upload-time = "2025-06-13T13:01:14.87Z" }, + { url = "https://files.pythonhosted.org/packages/3e/cf/1d783bd05b7bca5c10ded5f946068909372e94615a4416afadfe3f63492d/coverage-7.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a876e4c3e5a2a1715a6608906aa5a2e0475b9c0f68343c2ada98110512ab1d8", size = 243394, upload-time = "2025-06-13T13:01:16.23Z" }, + { url = "https://files.pythonhosted.org/packages/02/dd/e7b20afd35b0a1abea09fb3998e1abc9f9bd953bee548f235aebd2b11401/coverage-7.9.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81f34346dd63010453922c8e628a52ea2d2ccd73cb2487f7700ac531b247c8a5", size = 245586, upload-time = "2025-06-13T13:01:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/4e/38/b30b0006fea9d617d1cb8e43b1bc9a96af11eff42b87eb8c716cf4d37469/coverage-7.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:888f8eee13f2377ce86d44f338968eedec3291876b0b8a7289247ba52cb984cd", size = 245396, upload-time = "2025-06-13T13:01:19.164Z" }, + { url = "https://files.pythonhosted.org/packages/31/e4/4d8ec1dc826e16791f3daf1b50943e8e7e1eb70e8efa7abb03936ff48418/coverage-7.9.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9969ef1e69b8c8e1e70d591f91bbc37fc9a3621e447525d1602801a24ceda898", size = 243577, upload-time = "2025-06-13T13:01:22.433Z" }, + { url = "https://files.pythonhosted.org/packages/25/f4/b0e96c5c38e6e40ef465c4bc7f138863e2909c00e54a331da335faf0d81a/coverage-7.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:60c458224331ee3f1a5b472773e4a085cc27a86a0b48205409d364272d67140d", size = 244809, upload-time = "2025-06-13T13:01:24.143Z" }, + { url = "https://files.pythonhosted.org/packages/8a/65/27e0a1fa5e2e5079bdca4521be2f5dabf516f94e29a0defed35ac2382eb2/coverage-7.9.1-cp312-cp312-win32.whl", hash = "sha256:5f646a99a8c2b3ff4c6a6e081f78fad0dde275cd59f8f49dc4eab2e394332e74", size = 214724, upload-time = "2025-06-13T13:01:25.435Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a8/d5b128633fd1a5e0401a4160d02fa15986209a9e47717174f99dc2f7166d/coverage-7.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:30f445f85c353090b83e552dcbbdad3ec84c7967e108c3ae54556ca69955563e", size = 215535, upload-time = "2025-06-13T13:01:27.861Z" }, + { url = "https://files.pythonhosted.org/packages/a3/37/84bba9d2afabc3611f3e4325ee2c6a47cd449b580d4a606b240ce5a6f9bf/coverage-7.9.1-cp312-cp312-win_arm64.whl", hash = "sha256:af41da5dca398d3474129c58cb2b106a5d93bbb196be0d307ac82311ca234342", size = 213904, upload-time = "2025-06-13T13:01:29.202Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a7/a027970c991ca90f24e968999f7d509332daf6b8c3533d68633930aaebac/coverage-7.9.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:31324f18d5969feef7344a932c32428a2d1a3e50b15a6404e97cba1cc9b2c631", size = 212358, upload-time = "2025-06-13T13:01:30.909Z" }, + { url = "https://files.pythonhosted.org/packages/f2/48/6aaed3651ae83b231556750280682528fea8ac7f1232834573472d83e459/coverage-7.9.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0c804506d624e8a20fb3108764c52e0eef664e29d21692afa375e0dd98dc384f", size = 212620, upload-time = "2025-06-13T13:01:32.256Z" }, + { url = "https://files.pythonhosted.org/packages/6c/2a/f4b613f3b44d8b9f144847c89151992b2b6b79cbc506dee89ad0c35f209d/coverage-7.9.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef64c27bc40189f36fcc50c3fb8f16ccda73b6a0b80d9bd6e6ce4cffcd810bbd", size = 245788, upload-time = "2025-06-13T13:01:33.948Z" }, + { url = "https://files.pythonhosted.org/packages/04/d2/de4fdc03af5e4e035ef420ed26a703c6ad3d7a07aff2e959eb84e3b19ca8/coverage-7.9.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4fe2348cc6ec372e25adec0219ee2334a68d2f5222e0cba9c0d613394e12d86", size = 243001, upload-time = "2025-06-13T13:01:35.285Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e8/eed18aa5583b0423ab7f04e34659e51101135c41cd1dcb33ac1d7013a6d6/coverage-7.9.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34ed2186fe52fcc24d4561041979a0dec69adae7bce2ae8d1c49eace13e55c43", size = 244985, upload-time = "2025-06-13T13:01:36.712Z" }, + { url = "https://files.pythonhosted.org/packages/17/f8/ae9e5cce8885728c934eaa58ebfa8281d488ef2afa81c3dbc8ee9e6d80db/coverage-7.9.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:25308bd3d00d5eedd5ae7d4357161f4df743e3c0240fa773ee1b0f75e6c7c0f1", size = 245152, upload-time = "2025-06-13T13:01:39.303Z" }, + { url = "https://files.pythonhosted.org/packages/5a/c8/272c01ae792bb3af9b30fac14d71d63371db227980682836ec388e2c57c0/coverage-7.9.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73e9439310f65d55a5a1e0564b48e34f5369bee943d72c88378f2d576f5a5751", size = 243123, upload-time = "2025-06-13T13:01:40.727Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d0/2819a1e3086143c094ab446e3bdf07138527a7b88cb235c488e78150ba7a/coverage-7.9.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:37ab6be0859141b53aa89412a82454b482c81cf750de4f29223d52268a86de67", size = 244506, upload-time = "2025-06-13T13:01:42.184Z" }, + { url = "https://files.pythonhosted.org/packages/8b/4e/9f6117b89152df7b6112f65c7a4ed1f2f5ec8e60c4be8f351d91e7acc848/coverage-7.9.1-cp313-cp313-win32.whl", hash = "sha256:64bdd969456e2d02a8b08aa047a92d269c7ac1f47e0c977675d550c9a0863643", size = 214766, upload-time = "2025-06-13T13:01:44.482Z" }, + { url = "https://files.pythonhosted.org/packages/27/0f/4b59f7c93b52c2c4ce7387c5a4e135e49891bb3b7408dcc98fe44033bbe0/coverage-7.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:be9e3f68ca9edb897c2184ad0eee815c635565dbe7a0e7e814dc1f7cbab92c0a", size = 215568, upload-time = "2025-06-13T13:01:45.772Z" }, + { url = "https://files.pythonhosted.org/packages/09/1e/9679826336f8c67b9c39a359352882b24a8a7aee48d4c9cad08d38d7510f/coverage-7.9.1-cp313-cp313-win_arm64.whl", hash = "sha256:1c503289ffef1d5105d91bbb4d62cbe4b14bec4d13ca225f9c73cde9bb46207d", size = 213939, upload-time = "2025-06-13T13:01:47.087Z" }, + { url = "https://files.pythonhosted.org/packages/bb/5b/5c6b4e7a407359a2e3b27bf9c8a7b658127975def62077d441b93a30dbe8/coverage-7.9.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0b3496922cb5f4215bf5caaef4cf12364a26b0be82e9ed6d050f3352cf2d7ef0", size = 213079, upload-time = "2025-06-13T13:01:48.554Z" }, + { url = "https://files.pythonhosted.org/packages/a2/22/1e2e07279fd2fd97ae26c01cc2186e2258850e9ec125ae87184225662e89/coverage-7.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9565c3ab1c93310569ec0d86b017f128f027cab0b622b7af288696d7ed43a16d", size = 213299, upload-time = "2025-06-13T13:01:49.997Z" }, + { url = "https://files.pythonhosted.org/packages/14/c0/4c5125a4b69d66b8c85986d3321520f628756cf524af810baab0790c7647/coverage-7.9.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2241ad5dbf79ae1d9c08fe52b36d03ca122fb9ac6bca0f34439e99f8327ac89f", size = 256535, upload-time = "2025-06-13T13:01:51.314Z" }, + { url = "https://files.pythonhosted.org/packages/81/8b/e36a04889dda9960be4263e95e777e7b46f1bb4fc32202612c130a20c4da/coverage-7.9.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bb5838701ca68b10ebc0937dbd0eb81974bac54447c55cd58dea5bca8451029", size = 252756, upload-time = "2025-06-13T13:01:54.403Z" }, + { url = "https://files.pythonhosted.org/packages/98/82/be04eff8083a09a4622ecd0e1f31a2c563dbea3ed848069e7b0445043a70/coverage-7.9.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a25f814591a8c0c5372c11ac8967f669b97444c47fd794926e175c4047ece", size = 254912, upload-time = "2025-06-13T13:01:56.769Z" }, + { url = "https://files.pythonhosted.org/packages/0f/25/c26610a2c7f018508a5ab958e5b3202d900422cf7cdca7670b6b8ca4e8df/coverage-7.9.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2d04b16a6062516df97969f1ae7efd0de9c31eb6ebdceaa0d213b21c0ca1a683", size = 256144, upload-time = "2025-06-13T13:01:58.19Z" }, + { url = "https://files.pythonhosted.org/packages/c5/8b/fb9425c4684066c79e863f1e6e7ecebb49e3a64d9f7f7860ef1688c56f4a/coverage-7.9.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7931b9e249edefb07cd6ae10c702788546341d5fe44db5b6108a25da4dca513f", size = 254257, upload-time = "2025-06-13T13:01:59.645Z" }, + { url = "https://files.pythonhosted.org/packages/93/df/27b882f54157fc1131e0e215b0da3b8d608d9b8ef79a045280118a8f98fe/coverage-7.9.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:52e92b01041151bf607ee858e5a56c62d4b70f4dac85b8c8cb7fb8a351ab2c10", size = 255094, upload-time = "2025-06-13T13:02:01.37Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/cad1c3dbed8b3ee9e16fa832afe365b4e3eeab1fb6edb65ebbf745eabc92/coverage-7.9.1-cp313-cp313t-win32.whl", hash = "sha256:684e2110ed84fd1ca5f40e89aa44adf1729dc85444004111aa01866507adf363", size = 215437, upload-time = "2025-06-13T13:02:02.905Z" }, + { url = "https://files.pythonhosted.org/packages/99/4d/fad293bf081c0e43331ca745ff63673badc20afea2104b431cdd8c278b4c/coverage-7.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:437c576979e4db840539674e68c84b3cda82bc824dd138d56bead1435f1cb5d7", size = 216605, upload-time = "2025-06-13T13:02:05.638Z" }, + { url = "https://files.pythonhosted.org/packages/1f/56/4ee027d5965fc7fc126d7ec1187529cc30cc7d740846e1ecb5e92d31b224/coverage-7.9.1-cp313-cp313t-win_arm64.whl", hash = "sha256:18a0912944d70aaf5f399e350445738a1a20b50fbea788f640751c2ed9208b6c", size = 214392, upload-time = "2025-06-13T13:02:07.642Z" }, + { url = "https://files.pythonhosted.org/packages/3e/e5/c723545c3fd3204ebde3b4cc4b927dce709d3b6dc577754bb57f63ca4a4a/coverage-7.9.1-pp39.pp310.pp311-none-any.whl", hash = "sha256:db0f04118d1db74db6c9e1cb1898532c7dcc220f1d2718f058601f7c3f499514", size = 204009, upload-time = "2025-06-13T13:02:25.787Z" }, + { url = "https://files.pythonhosted.org/packages/08/b8/7ddd1e8ba9701dea08ce22029917140e6f66a859427406579fd8d0ca7274/coverage-7.9.1-py3-none-any.whl", hash = "sha256:66b974b145aa189516b6bf2d8423e888b742517d37872f6ee4c5be0073bd9a3c", size = 204000, upload-time = "2025-06-13T13:02:27.173Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "jubilant" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/49/9ea5efac9127c76247d42e286e56e26d9b5c01edbf9f24bcfae9aab3cf81/jubilant-1.3.0.tar.gz", hash = "sha256:ff43d6eb67a986958db6317d7ff3df1c8c160d0c56736628919ac1f7319d444e", size = 26842, upload-time = "2025-07-24T22:31:55.833Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/97/ad9cbc4718cdc4feed0e841ccb2a3d15de7cb1187d63d1e2ba419cc34f51/jubilant-1.3.0-py3-none-any.whl", hash = "sha256:a5ea4a3bf487ab0286eaad0de9df145761657c08beb834931340b9ebb1f41292", size = 26484, upload-time = "2025-07-24T22:31:54.467Z" }, +] + +[[package]] +name = "machine-charm" +version = "0.0.1" +source = { virtual = "." } +dependencies = [ + { name = "charmlibs-apt" }, + { name = "charmlibs-pathops" }, + { name = "ops" }, + { name = "pydantic" }, +] + +[package.dev-dependencies] +integration = [ + { name = "jubilant" }, + { name = "pytest" }, +] +lint = [ + { name = "codespell" }, + { name = "pyright" }, + { name = "ruff" }, +] +unit = [ + { name = "coverage", extra = ["toml"] }, + { name = "ops", extra = ["testing"] }, + { name = "pytest" }, +] + +[package.metadata] +requires-dist = [ + { name = "charmlibs-apt", specifier = ">=1,<2" }, + { name = "charmlibs-pathops", specifier = ">=1.2.1" }, + { name = "ops", specifier = ">=3,<4" }, + { name = "pydantic", specifier = ">=2.12.5" }, +] + +[package.metadata.requires-dev] +integration = [ + { name = "jubilant" }, + { name = "pytest" }, +] +lint = [ + { name = "codespell" }, + { name = "pyright" }, + { name = "ruff" }, +] +unit = [ + { name = "coverage", extras = ["toml"] }, + { name = "ops", extras = ["testing"] }, + { name = "pytest" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "opentelemetry-api" +version = "1.34.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/5e/94a8cb759e4e409022229418294e098ca7feca00eb3c467bb20cbd329bda/opentelemetry_api-1.34.1.tar.gz", hash = "sha256:64f0bd06d42824843731d05beea88d4d4b6ae59f9fe347ff7dfa2cc14233bbb3", size = 64987, upload-time = "2025-06-10T08:55:19.818Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/3a/2ba85557e8dc024c0842ad22c570418dc02c36cbd1ab4b832a93edf071b8/opentelemetry_api-1.34.1-py3-none-any.whl", hash = "sha256:b7df4cb0830d5a6c29ad0c0691dbae874d8daefa934b8b1d642de48323d32a8c", size = 65767, upload-time = "2025-06-10T08:54:56.717Z" }, +] + +[[package]] +name = "ops" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "opentelemetry-api" }, + { name = "pyyaml" }, + { name = "websocket-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/92/6bf6c91d4d0e4007d835fe73b1b73159d0c3dde4059c301c572ce2fc3ddd/ops-3.0.0.tar.gz", hash = "sha256:f4709f7699a2b8b0aaa3a6ad891fb8e4925792121fcb762ef264d24f87675680", size = 527591, upload-time = "2025-07-02T10:40:20.152Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/d6/fe94db94ab773efa87223ffbf1f3c243e04c37d82b366c6a900c1ba1ea0e/ops-3.0.0-py3-none-any.whl", hash = "sha256:f71d62c1d5ae58c01acc37fb330c6aa13aa1e2913fc44519b5ac31b93b9181ec", size = 188167, upload-time = "2025-07-02T10:40:18.439Z" }, +] + +[package.optional-dependencies] +testing = [ + { name = "ops-scenario" }, +] + +[[package]] +name = "ops-scenario" +version = "8.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ops" }, + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/10/23/3737c3a76bd3129eb571538d22e81f58727c0670b6cba24febd4ccdeb2f0/ops_scenario-8.0.0.tar.gz", hash = "sha256:b358228f3c88ab36f93c467e926c2ef20f16a99578a490a9fd9733eb1eab175c", size = 109454, upload-time = "2025-07-02T10:40:32.623Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/c7/78f623df92e3c6220eb8dd963f7fbb3df69934c74aa073deb7c77d4407e3/ops_scenario-8.0.0-py3-none-any.whl", hash = "sha256:5d52e7162cc7cb2a26c8fd62df3f3fb88668a407af4024c1be6003959c80e2de", size = 64300, upload-time = "2025-07-02T10:40:31.002Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298, upload-time = "2025-11-04T13:39:04.116Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475, upload-time = "2025-11-04T13:39:06.055Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815, upload-time = "2025-11-04T13:39:10.41Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567, upload-time = "2025-11-04T13:39:12.244Z" }, + { url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442, upload-time = "2025-11-04T13:39:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956, upload-time = "2025-11-04T13:39:15.889Z" }, + { url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253, upload-time = "2025-11-04T13:39:17.403Z" }, + { url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050, upload-time = "2025-11-04T13:39:19.351Z" }, + { url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178, upload-time = "2025-11-04T13:39:21Z" }, + { url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833, upload-time = "2025-11-04T13:39:22.606Z" }, + { url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156, upload-time = "2025-11-04T13:39:25.843Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378, upload-time = "2025-11-04T13:39:27.92Z" }, + { url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622, upload-time = "2025-11-04T13:39:29.848Z" }, + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351, upload-time = "2025-11-04T13:43:02.058Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363, upload-time = "2025-11-04T13:43:05.159Z" }, + { url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615, upload-time = "2025-11-04T13:43:08.116Z" }, + { url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369, upload-time = "2025-11-04T13:43:12.49Z" }, + { url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218, upload-time = "2025-11-04T13:43:15.431Z" }, + { url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951, upload-time = "2025-11-04T13:43:18.062Z" }, + { url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428, upload-time = "2025-11-04T13:43:20.679Z" }, + { url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009, upload-time = "2025-11-04T13:43:23.286Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.402" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/04/ce0c132d00e20f2d2fb3b3e7c125264ca8b909e693841210534b1ea1752f/pyright-1.1.402.tar.gz", hash = "sha256:85a33c2d40cd4439c66aa946fd4ce71ab2f3f5b8c22ce36a623f59ac22937683", size = 3888207, upload-time = "2025-06-11T08:48:35.759Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/37/1a1c62d955e82adae588be8e374c7f77b165b6cb4203f7d581269959abbc/pyright-1.1.402-py3-none-any.whl", hash = "sha256:2c721f11869baac1884e846232800fe021c33f1b4acb3929cff321f7ea4e2982", size = 5624004, upload-time = "2025-06-11T08:48:33.998Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +] + +[[package]] +name = "ruff" +version = "0.12.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/eb/8c073deb376e46ae767f4961390d17545e8535921d2f65101720ed8bd434/ruff-0.12.10.tar.gz", hash = "sha256:189ab65149d11ea69a2d775343adf5f49bb2426fc4780f65ee33b423ad2e47f9", size = 5310076, upload-time = "2025-08-21T18:23:22.595Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/24/e7/560d049d15585d6c201f9eeacd2fd130def3741323e5ccf123786e0e3c95/ruff-0.12.10-py3-none-linux_armv6l.whl", hash = "sha256:8b593cb0fb55cc8692dac7b06deb29afda78c721c7ccfed22db941201b7b8f7b", size = 11935161, upload-time = "2025-08-21T18:22:26.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b0/ad2464922a1113c365d12b8f80ed70fcfb39764288ac77c995156080488d/ruff-0.12.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ebb7333a45d56efc7c110a46a69a1b32365d5c5161e7244aaf3aa20ce62399c1", size = 12660884, upload-time = "2025-08-21T18:22:30.925Z" }, + { url = "https://files.pythonhosted.org/packages/d7/f1/97f509b4108d7bae16c48389f54f005b62ce86712120fd8b2d8e88a7cb49/ruff-0.12.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d59e58586829f8e4a9920788f6efba97a13d1fa320b047814e8afede381c6839", size = 11872754, upload-time = "2025-08-21T18:22:34.035Z" }, + { url = "https://files.pythonhosted.org/packages/12/ad/44f606d243f744a75adc432275217296095101f83f966842063d78eee2d3/ruff-0.12.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:822d9677b560f1fdeab69b89d1f444bf5459da4aa04e06e766cf0121771ab844", size = 12092276, upload-time = "2025-08-21T18:22:36.764Z" }, + { url = "https://files.pythonhosted.org/packages/06/1f/ed6c265e199568010197909b25c896d66e4ef2c5e1c3808caf461f6f3579/ruff-0.12.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b4a64f4062a50c75019c61c7017ff598cb444984b638511f48539d3a1c98db", size = 11734700, upload-time = "2025-08-21T18:22:39.822Z" }, + { url = "https://files.pythonhosted.org/packages/63/c5/b21cde720f54a1d1db71538c0bc9b73dee4b563a7dd7d2e404914904d7f5/ruff-0.12.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6f4064c69d2542029b2a61d39920c85240c39837599d7f2e32e80d36401d6e", size = 13468783, upload-time = "2025-08-21T18:22:42.559Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/39369e6ac7f2a1848f22fb0b00b690492f20811a1ac5c1fd1d2798329263/ruff-0.12.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:059e863ea3a9ade41407ad71c1de2badfbe01539117f38f763ba42a1206f7559", size = 14436642, upload-time = "2025-08-21T18:22:45.612Z" }, + { url = "https://files.pythonhosted.org/packages/e3/03/5da8cad4b0d5242a936eb203b58318016db44f5c5d351b07e3f5e211bb89/ruff-0.12.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1bef6161e297c68908b7218fa6e0e93e99a286e5ed9653d4be71e687dff101cf", size = 13859107, upload-time = "2025-08-21T18:22:48.886Z" }, + { url = "https://files.pythonhosted.org/packages/19/19/dd7273b69bf7f93a070c9cec9494a94048325ad18fdcf50114f07e6bf417/ruff-0.12.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4f1345fbf8fb0531cd722285b5f15af49b2932742fc96b633e883da8d841896b", size = 12886521, upload-time = "2025-08-21T18:22:51.567Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1d/b4207ec35e7babaee62c462769e77457e26eb853fbdc877af29417033333/ruff-0.12.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f68433c4fbc63efbfa3ba5db31727db229fa4e61000f452c540474b03de52a9", size = 13097528, upload-time = "2025-08-21T18:22:54.609Z" }, + { url = "https://files.pythonhosted.org/packages/ff/00/58f7b873b21114456e880b75176af3490d7a2836033779ca42f50de3b47a/ruff-0.12.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:141ce3d88803c625257b8a6debf4a0473eb6eed9643a6189b68838b43e78165a", size = 13080443, upload-time = "2025-08-21T18:22:57.413Z" }, + { url = "https://files.pythonhosted.org/packages/12/8c/9e6660007fb10189ccb78a02b41691288038e51e4788bf49b0a60f740604/ruff-0.12.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f3fc21178cd44c98142ae7590f42ddcb587b8e09a3b849cbc84edb62ee95de60", size = 11896759, upload-time = "2025-08-21T18:23:00.473Z" }, + { url = "https://files.pythonhosted.org/packages/67/4c/6d092bb99ea9ea6ebda817a0e7ad886f42a58b4501a7e27cd97371d0ba54/ruff-0.12.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7d1a4e0bdfafcd2e3e235ecf50bf0176f74dd37902f241588ae1f6c827a36c56", size = 11701463, upload-time = "2025-08-21T18:23:03.211Z" }, + { url = "https://files.pythonhosted.org/packages/59/80/d982c55e91df981f3ab62559371380616c57ffd0172d96850280c2b04fa8/ruff-0.12.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e67d96827854f50b9e3e8327b031647e7bcc090dbe7bb11101a81a3a2cbf1cc9", size = 12691603, upload-time = "2025-08-21T18:23:06.935Z" }, + { url = "https://files.pythonhosted.org/packages/ad/37/63a9c788bbe0b0850611669ec6b8589838faf2f4f959647f2d3e320383ae/ruff-0.12.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ae479e1a18b439c59138f066ae79cc0f3ee250712a873d00dbafadaad9481e5b", size = 13164356, upload-time = "2025-08-21T18:23:10.225Z" }, + { url = "https://files.pythonhosted.org/packages/47/d4/1aaa7fb201a74181989970ebccd12f88c0fc074777027e2a21de5a90657e/ruff-0.12.10-py3-none-win32.whl", hash = "sha256:9de785e95dc2f09846c5e6e1d3a3d32ecd0b283a979898ad427a9be7be22b266", size = 11896089, upload-time = "2025-08-21T18:23:14.232Z" }, + { url = "https://files.pythonhosted.org/packages/ad/14/2ad38fd4037daab9e023456a4a40ed0154e9971f8d6aed41bdea390aabd9/ruff-0.12.10-py3-none-win_amd64.whl", hash = "sha256:7837eca8787f076f67aba2ca559cefd9c5cbc3a9852fd66186f4201b87c1563e", size = 13004616, upload-time = "2025-08-21T18:23:17.422Z" }, + { url = "https://files.pythonhosted.org/packages/24/3c/21cf283d67af33a8e6ed242396863af195a8a6134ec581524fd22b9811b6/ruff-0.12.10-py3-none-win_arm64.whl", hash = "sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc", size = 12074225, upload-time = "2025-08-21T18:23:20.137Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "websocket-client" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload-time = "2024-04-23T22:16:16.976Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] From 87663274214101111d002f432d8ffa50f7237070 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Fri, 27 Feb 2026 16:38:32 +0300 Subject: [PATCH 02/17] feat: added restart logic --- machine-charm/src/charm.py | 34 ++++++++++------------------------ machine-charm/src/workload.py | 9 +++++++-- 2 files changed, 17 insertions(+), 26 deletions(-) diff --git a/machine-charm/src/charm.py b/machine-charm/src/charm.py index 4516fa05..652df828 100755 --- a/machine-charm/src/charm.py +++ b/machine-charm/src/charm.py @@ -11,7 +11,6 @@ import pydantic from charms.data_platform_libs.v0.data_interfaces import ( DatabaseCreatedEvent, - DatabaseEntityCreatedEvent, DatabaseRequires, ) @@ -43,16 +42,12 @@ class MachineCharmCharm(ops.CharmBase): def __init__(self, framework: ops.Framework): super().__init__(framework) framework.observe(self.on.install, self._on_install) - # framework.observe(self.on.start, self._on_start) framework.observe(self.on.config_changed, self._on_config_changed) # Charm events defined in the database requires charm library. self.database = DatabaseRequires( self, relation_name="postgresql", database_name="postgresql" ) self.framework.observe(self.database.on.database_created, self._on_database_created) - self.framework.observe( - self.database.on.database_entity_created, self._on_database_entity_created - ) def _on_database_created(self, event: DatabaseCreatedEvent) -> None: """Handle database created event.""" @@ -63,36 +58,27 @@ def _on_database_created(self, event: DatabaseCreatedEvent) -> None: # Start workload when database is ready. self._start() - def _on_database_entity_created(self, event: DatabaseEntityCreatedEvent) -> None: - """Handle database entity created event.""" - logger.info("Database entity created with connection string: %s", event.connection_string) - def _on_install(self, event: ops.InstallEvent): """Install the workload on the machine.""" - workload.install(self.charm_dir.absolute().as_posix()) + workload.install(self.charm_dir.absolute().as_posix(), os.environ["DATABASE_URL"]) - def _start(self): + def _start(self) -> None: """Start the workload.""" self.unit.status = ops.MaintenanceStatus("starting workload") config = self.load_config(WorkloadConfig) workload.start(config.bind_address, config.workers, config.timeout) self.unit.status = ops.ActiveStatus() - def _on_start(self, event: ops.StartEvent): - """Handle start event.""" + def _on_config_changed(self, event: ops.ConfigChangedEvent): + """Handle config changes.""" + # For simplicity, we will just restart the workload on any config change. + self.unit.status = ops.MaintenanceStatus("config changed, restarting workload") + self._stop() self._start() - def _on_config_changed(self, event: ops.ConfigChangedEvent) -> None: - """Handle config-changed event.""" - self.configure_and_run() - - def configure_and_run(self) -> None: - """Ensure that the workload is running with the correct config.""" - try: - config = self.load_config(WorkloadConfig) - except pydantic.ValidationError: - # The collect-status handler will run next and will set status for the user to see. - return + def _stop(self) -> None: + """Stop the workload.""" + workload.stop() if __name__ == "__main__": # pragma: nocover diff --git a/machine-charm/src/workload.py b/machine-charm/src/workload.py index 6cfee095..bc8233e3 100644 --- a/machine-charm/src/workload.py +++ b/machine-charm/src/workload.py @@ -57,7 +57,7 @@ def run_migrations() -> None: ) -def install(charm_dir: str) -> None: +def install(charm_dir: str, database_url: str) -> None: """Install the workload (by installing a snap, for example).""" # Install apt packages apt.update() @@ -78,7 +78,7 @@ def install(charm_dir: str) -> None: f"{charm_dir}/src/flask/app/requirements.txt", ) # Install the pg_trgm extension - run_command("psql", "-c", "CREATE EXTENSION IF NOT EXISTS pg_trgm;") + run_command("psql", "-c", "CREATE EXTENSION IF NOT EXISTS pg_trgm;", database_url) # Then run migrations run_command( "/venv/bin/python", @@ -107,6 +107,11 @@ def start(address: str, workers: str, timeout: str) -> None: ) +def stop() -> None: + """Stop the webapp.""" + run_command("pkill", "-f", "gunicorn") + + # def is_installed() -> bool: # """Return whether the tinyproxy executable is available.""" # return shutil.which("tinyproxy") is not None From b63714ac9db33ca294012a09bbe4dc34e544034c Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Fri, 27 Feb 2026 16:46:20 +0300 Subject: [PATCH 03/17] chore: update docs --- machine-charm/README.md | 32 ++++++++++++++++++++++++-------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/machine-charm/README.md b/machine-charm/README.md index 1d1ede09..dae18f7f 100644 --- a/machine-charm/README.md +++ b/machine-charm/README.md @@ -8,19 +8,35 @@ Avoid using this README file for information that is maintained or published els Use links instead. --> -# machine-charm +# ubuntu-security-api-vm -Charmhub package name: machine-charm -More information: https://charmhub.io/machine-charm +Charmhub package name: ubuntu-security-api-vm +More information: https://charmhub.io/ubuntu-security-api-vm -Describe your charm in one or two sentences. +This is a machine charm to run the ubuntu-security-api on virtual machines. -## Other resources +## Development + +This charm requires postgres, which can be added using + +```bash +juju deploy postgresql --channel 16/stable +``` + +In order to add the files included in the repo, we have to pack the charm using destructive mode, i.e. - +```bash +charmcraft pack --destructive-mode +``` -- [Read more](https://example.com) +Then deploy the charm, and integrate with the postgresql charm. -- [Contributing](CONTRIBUTING.md) +```bash +juju deploy ./ubuntu-security-api-vm_amd64.charm + +juju relate ubuntu-security-api-vm postgresql +``` + +## Other resources - See the [Juju documentation](https://documentation.ubuntu.com/juju/3.6/howto/manage-charms/) for more information about developing and improving charms. From 9c86b0cb87057b13d93c6dc088b547cc1409f942 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Fri, 27 Feb 2026 16:54:30 +0300 Subject: [PATCH 04/17] feat: added separate migration step --- machine-charm/src/charm.py | 12 ++++++------ machine-charm/src/workload.py | 6 +++++- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/machine-charm/src/charm.py b/machine-charm/src/charm.py index 652df828..31225ddd 100755 --- a/machine-charm/src/charm.py +++ b/machine-charm/src/charm.py @@ -5,7 +5,6 @@ """Charm the application.""" import logging -import os import ops import pydantic @@ -47,20 +46,20 @@ def __init__(self, framework: ops.Framework): self.database = DatabaseRequires( self, relation_name="postgresql", database_name="postgresql" ) - self.framework.observe(self.database.on.database_created, self._on_database_created) + framework.observe(self.database.on.database_created, self._on_database_created) def _on_database_created(self, event: DatabaseCreatedEvent) -> None: """Handle database created event.""" logger.info("Database created with connection string: %s", event.connection_string) - - # Add env variable for flask-base - os.environ["DATABASE_URL"] = event.connection_string + self.unit.status = ops.MaintenanceStatus("running migrations") + # Run migrations before starting the workload to ensure the database is ready. + workload.migrate(self.charm_dir.absolute().as_posix(), event.connection_string) # Start workload when database is ready. self._start() def _on_install(self, event: ops.InstallEvent): """Install the workload on the machine.""" - workload.install(self.charm_dir.absolute().as_posix(), os.environ["DATABASE_URL"]) + workload.install(self.charm_dir.absolute().as_posix()) def _start(self) -> None: """Start the workload.""" @@ -78,6 +77,7 @@ def _on_config_changed(self, event: ops.ConfigChangedEvent): def _stop(self) -> None: """Stop the workload.""" + self.unit.status = ops.MaintenanceStatus("stopping workload") workload.stop() diff --git a/machine-charm/src/workload.py b/machine-charm/src/workload.py index bc8233e3..501525b8 100644 --- a/machine-charm/src/workload.py +++ b/machine-charm/src/workload.py @@ -57,7 +57,7 @@ def run_migrations() -> None: ) -def install(charm_dir: str, database_url: str) -> None: +def install(charm_dir: str) -> None: """Install the workload (by installing a snap, for example).""" # Install apt packages apt.update() @@ -77,6 +77,10 @@ def install(charm_dir: str, database_url: str) -> None: "-r", f"{charm_dir}/src/flask/app/requirements.txt", ) + + +def migrate(charm_dir: str, database_url: str) -> None: + """Run database migrations.""" # Install the pg_trgm extension run_command("psql", "-c", "CREATE EXTENSION IF NOT EXISTS pg_trgm;", database_url) # Then run migrations From cec2390130a6eace5b56ad130ecefaf1ee504e95 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Fri, 27 Feb 2026 16:55:19 +0300 Subject: [PATCH 05/17] feat: added relation warning --- machine-charm/src/charm.py | 1 + 1 file changed, 1 insertion(+) diff --git a/machine-charm/src/charm.py b/machine-charm/src/charm.py index 31225ddd..b650952e 100755 --- a/machine-charm/src/charm.py +++ b/machine-charm/src/charm.py @@ -42,6 +42,7 @@ def __init__(self, framework: ops.Framework): super().__init__(framework) framework.observe(self.on.install, self._on_install) framework.observe(self.on.config_changed, self._on_config_changed) + self.unit.status = ops.MaintenanceStatus("waiting for database relation") # Charm events defined in the database requires charm library. self.database = DatabaseRequires( self, relation_name="postgresql", database_name="postgresql" From 6f2c9aa9100daf4ced2c917797b9459fb159d067 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Fri, 27 Feb 2026 17:46:57 +0300 Subject: [PATCH 06/17] feat: add setuptools to install --- machine-charm/pyproject.toml | 1 + machine-charm/src/charm.py | 5 +- machine-charm/src/workload.py | 21 +++---- machine-charm/uv.lock | 102 ++++++++++++++++++++++++++++++++++ 4 files changed, 114 insertions(+), 15 deletions(-) diff --git a/machine-charm/pyproject.toml b/machine-charm/pyproject.toml index 0132358a..bc741d7b 100644 --- a/machine-charm/pyproject.toml +++ b/machine-charm/pyproject.toml @@ -15,6 +15,7 @@ dependencies = [ "ops>=3,<4", "pydantic>=2.12.5", "psycopg[binary]==3.3.3", + "setuptools>=82.0.0", ] [dependency-groups] diff --git a/machine-charm/src/charm.py b/machine-charm/src/charm.py index b650952e..47bde377 100755 --- a/machine-charm/src/charm.py +++ b/machine-charm/src/charm.py @@ -42,7 +42,6 @@ def __init__(self, framework: ops.Framework): super().__init__(framework) framework.observe(self.on.install, self._on_install) framework.observe(self.on.config_changed, self._on_config_changed) - self.unit.status = ops.MaintenanceStatus("waiting for database relation") # Charm events defined in the database requires charm library. self.database = DatabaseRequires( self, relation_name="postgresql", database_name="postgresql" @@ -61,6 +60,7 @@ def _on_database_created(self, event: DatabaseCreatedEvent) -> None: def _on_install(self, event: ops.InstallEvent): """Install the workload on the machine.""" workload.install(self.charm_dir.absolute().as_posix()) + self.unit.status = ops.MaintenanceStatus("waiting for database relation") def _start(self) -> None: """Start the workload.""" @@ -73,7 +73,8 @@ def _on_config_changed(self, event: ops.ConfigChangedEvent): """Handle config changes.""" # For simplicity, we will just restart the workload on any config change. self.unit.status = ops.MaintenanceStatus("config changed, restarting workload") - self._stop() + if workload.is_running(): + self._stop() self._start() def _stop(self) -> None: diff --git a/machine-charm/src/workload.py b/machine-charm/src/workload.py index 501525b8..142a5c39 100644 --- a/machine-charm/src/workload.py +++ b/machine-charm/src/workload.py @@ -69,6 +69,7 @@ def install(charm_dir: str) -> None: run_command("virtualenv", "/venv") # Install workload python packages + run_command("/venv/bin/python", "-m", "pip", "install", "setuptools") run_command( "/venv/bin/python", "-m", @@ -113,7 +114,13 @@ def start(address: str, workers: str, timeout: str) -> None: def stop() -> None: """Stop the webapp.""" - run_command("pkill", "-f", "gunicorn") + run_command("pkill", "-9", "gunicorn") + + +def is_running() -> bool: + """Return whether the webapp is running.""" + result = subprocess.run(["pgrep", "-f", "gunicorn"], capture_output=True, text=True) + return result.returncode == 0 # def is_installed() -> bool: @@ -121,11 +128,6 @@ def stop() -> None: # return shutil.which("tinyproxy") is not None -# def is_running() -> bool: -# """Return whether tinyproxy is running.""" -# return bool(_get_pid()) - - # def reload_config() -> None: # """Ask tinyproxy to reload config.""" # pid = _get_pid() @@ -136,13 +138,6 @@ def stop() -> None: # os.kill(pid, signal.SIGUSR1) -# def stop() -> None: -# """Stop tinyproxy.""" -# pid = _get_pid() -# if pid: -# os.kill(pid, signal.SIGTERM) - - # def uninstall() -> None: # """Uninstall the tinyproxy executable and remove files.""" # apt.remove_package("tinyproxy-bin") diff --git a/machine-charm/uv.lock b/machine-charm/uv.lock index 1239d526..40345c78 100644 --- a/machine-charm/uv.lock +++ b/machine-charm/uv.lock @@ -179,7 +179,9 @@ dependencies = [ { name = "charmlibs-apt" }, { name = "charmlibs-pathops" }, { name = "ops" }, + { name = "psycopg", extra = ["binary"] }, { name = "pydantic" }, + { name = "setuptools" }, ] [package.dev-dependencies] @@ -203,7 +205,9 @@ requires-dist = [ { name = "charmlibs-apt", specifier = ">=1,<2" }, { name = "charmlibs-pathops", specifier = ">=1.2.1" }, { name = "ops", specifier = ">=3,<4" }, + { name = "psycopg", extras = ["binary"], specifier = "==3.3.3" }, { name = "pydantic", specifier = ">=2.12.5" }, + { name = "setuptools", specifier = ">=82.0.0" }, ] [package.metadata.requires-dev] @@ -295,6 +299,86 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "psycopg" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/b6/379d0a960f8f435ec78720462fd94c4863e7a31237cf81bf76d0af5883bf/psycopg-3.3.3.tar.gz", hash = "sha256:5e9a47458b3c1583326513b2556a2a9473a1001a56c9efe9e587245b43148dd9", size = 165624, upload-time = "2026-02-18T16:52:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/5b/181e2e3becb7672b502f0ed7f16ed7352aca7c109cfb94cf3878a9186db9/psycopg-3.3.3-py3-none-any.whl", hash = "sha256:f96525a72bcfade6584ab17e89de415ff360748c766f0106959144dcbb38c698", size = 212768, upload-time = "2026-02-18T16:46:27.365Z" }, +] + +[package.optional-dependencies] +binary = [ + { name = "psycopg-binary", marker = "implementation_name != 'pypy'" }, +] + +[[package]] +name = "psycopg-binary" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/d8/a763308a41e2ecfb6256ba0877d340c2f2b124c8b2746401863d96fa2c7a/psycopg_binary-3.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b3385b58b2fe408a13d084c14b8dcf468cd36cbbe774408250facc128f9fa75c", size = 4609758, upload-time = "2026-02-18T16:46:33.132Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a9/f8a683e85400c1208685e7c895abc049dc13aa0b6ea989e6adf0a3681fe0/psycopg_binary-3.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1bef235a50a80f6aba05147002bc354559657cb6386dbd04d8e1c97d1d7cbe84", size = 4676740, upload-time = "2026-02-18T16:46:42.904Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7d/03512c4aaac8a58fc3b1221f38293aa517a1950d10ef8646c72c49addc7d/psycopg_binary-3.3.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:97c839717bf8c8df3f6d983a20949c4fb22e2a34ee172e3e427ede363feda27b", size = 5496335, upload-time = "2026-02-18T16:46:51.517Z" }, + { url = "https://files.pythonhosted.org/packages/8a/bc/23319b4b1c2c0b810d225e1b6f16efbb16150074fc0ea96bfcabdf59ee09/psycopg_binary-3.3.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:48e500cf1c0984dacf1f28ea482c3cdbb4c2288d51c336c04bc64198ab21fc51", size = 5172032, upload-time = "2026-02-18T16:47:00.878Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c8/6d61dc0a56654c558a37b2d9b2094e470aa12621305cc7935fd769122e32/psycopg_binary-3.3.3-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb36a08859b9432d94ea6b26ec41a2f98f83f14868c91321d0c1e11f672eeae7", size = 6763107, upload-time = "2026-02-18T16:47:11.784Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b5/e2a3c90aa1059f5b5f593379caad7be3cc3c2ce1ddfc7730e39854e174fe/psycopg_binary-3.3.3-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0dde92cfde09293fb63b3f547919ba7d73bd2654573c03502b3263dd0218e44e", size = 5006494, upload-time = "2026-02-18T16:47:17.062Z" }, + { url = "https://files.pythonhosted.org/packages/5d/3e/bf126e0a1f864e191b7f3eeea667ee2ce13d582b036255fb8b12946d1f7a/psycopg_binary-3.3.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:78c9ce98caaf82ac8484d269791c1b403d7598633e0e4e2fa1097baae244e2f1", size = 4533850, upload-time = "2026-02-18T16:47:21.673Z" }, + { url = "https://files.pythonhosted.org/packages/f4/d8/bb5e8d395deb945629aa0c65d12ab90ec3bfcbdf56be89e2a84d001864c9/psycopg_binary-3.3.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d593612758d0041cb13cb0003f7f8d3fabb7ad9319e651e78afae49b1cf5860e", size = 4223316, upload-time = "2026-02-18T16:47:25.82Z" }, + { url = "https://files.pythonhosted.org/packages/c2/70/33eef61b0f0fd41ebf93b9699f44067313a45016827f67b3c8cc41f0a7ab/psycopg_binary-3.3.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:f24e8e17035200a465c178e9ea945527ad0738118694184c450f1192a452ff25", size = 3954515, upload-time = "2026-02-18T16:47:30.434Z" }, + { url = "https://files.pythonhosted.org/packages/ea/db/27c2b3b9698e713e83e11e8540daa27516f9e90390ec21a41091cb15fcaf/psycopg_binary-3.3.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e7b607f0e14f2a4cf7e78a05ebd13df6144acfba87cb90842e70d3f125d9f53f", size = 4260274, upload-time = "2026-02-18T16:47:36.128Z" }, + { url = "https://files.pythonhosted.org/packages/a1/3b/71e5d603059bf5474215f573a3e2d357a4e95672b26e04d41674400d4862/psycopg_binary-3.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b27d3a23c79fa59557d2cc63a7e8bb4c7e022c018558eda36f9d7c4e6b99a6e0", size = 3557375, upload-time = "2026-02-18T16:47:42.799Z" }, + { url = "https://files.pythonhosted.org/packages/be/c0/b389119dd754483d316805260f3e73cdcad97925839107cc7a296f6132b1/psycopg_binary-3.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a89bb9ee11177b2995d87186b1d9fa892d8ea725e85eab28c6525e4cc14ee048", size = 4609740, upload-time = "2026-02-18T16:47:51.093Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e3/9976eef20f61840285174d360da4c820a311ab39d6b82fa09fbb545be825/psycopg_binary-3.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f7d0cf072c6fbac3795b08c98ef9ea013f11db609659dcfc6b1f6cc31f9e181", size = 4676837, upload-time = "2026-02-18T16:47:55.523Z" }, + { url = "https://files.pythonhosted.org/packages/9f/f2/d28ba2f7404fd7f68d41e8a11df86313bd646258244cb12a8dd83b868a97/psycopg_binary-3.3.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:90eecd93073922f085967f3ed3a98ba8c325cbbc8c1a204e300282abd2369e13", size = 5497070, upload-time = "2026-02-18T16:47:59.929Z" }, + { url = "https://files.pythonhosted.org/packages/de/2f/6c5c54b815edeb30a281cfcea96dc93b3bb6be939aea022f00cab7aa1420/psycopg_binary-3.3.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dac7ee2f88b4d7bb12837989ca354c38d400eeb21bce3b73dac02622f0a3c8d6", size = 5172410, upload-time = "2026-02-18T16:48:05.665Z" }, + { url = "https://files.pythonhosted.org/packages/51/75/8206c7008b57de03c1ada46bd3110cc3743f3fd9ed52031c4601401d766d/psycopg_binary-3.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b62cf8784eb6d35beaee1056d54caf94ec6ecf2b7552395e305518ab61eb8fd2", size = 6763408, upload-time = "2026-02-18T16:48:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/d4/5a/ea1641a1e6c8c8b3454b0fcb43c3045133a8b703e6e824fae134088e63bd/psycopg_binary-3.3.3-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a39f34c9b18e8f6794cca17bfbcd64572ca2482318db644268049f8c738f35a6", size = 5006255, upload-time = "2026-02-18T16:48:22.176Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fb/538df099bf55ae1637d52d7ccb6b9620b535a40f4c733897ac2b7bb9e14c/psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:883d68d48ca9ff3cb3d10c5fdebea02c79b48eecacdddbf7cce6e7cdbdc216b8", size = 4532694, upload-time = "2026-02-18T16:48:27.338Z" }, + { url = "https://files.pythonhosted.org/packages/a1/d1/00780c0e187ea3c13dfc53bd7060654b2232cd30df562aac91a5f1c545ac/psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:cab7bc3d288d37a80aa8c0820033250c95e40b1c2b5c57cf59827b19c2a8b69d", size = 4222833, upload-time = "2026-02-18T16:48:31.221Z" }, + { url = "https://files.pythonhosted.org/packages/7a/34/a07f1ff713c51d64dc9f19f2c32be80299a2055d5d109d5853662b922cb4/psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:56c767007ca959ca32f796b42379fc7e1ae2ed085d29f20b05b3fc394f3715cc", size = 3952818, upload-time = "2026-02-18T16:48:35.869Z" }, + { url = "https://files.pythonhosted.org/packages/d3/67/d33f268a7759b4445f3c9b5a181039b01af8c8263c865c1be7a6444d4749/psycopg_binary-3.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:da2f331a01af232259a21573a01338530c6016dcfad74626c01330535bcd8628", size = 4258061, upload-time = "2026-02-18T16:48:41.365Z" }, + { url = "https://files.pythonhosted.org/packages/b4/3b/0d8d2c5e8e29ccc07d28c8af38445d9d9abcd238d590186cac82ee71fc84/psycopg_binary-3.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:19f93235ece6dbfc4036b5e4f6d8b13f0b8f2b3eeb8b0bd2936d406991bcdd40", size = 3558915, upload-time = "2026-02-18T16:48:46.679Z" }, + { url = "https://files.pythonhosted.org/packages/90/15/021be5c0cbc5b7c1ab46e91cc3434eb42569f79a0592e67b8d25e66d844d/psycopg_binary-3.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6698dbab5bcef8fdb570fc9d35fd9ac52041771bfcfe6fd0fc5f5c4e36f1e99d", size = 4591170, upload-time = "2026-02-18T16:48:55.594Z" }, + { url = "https://files.pythonhosted.org/packages/f1/54/a60211c346c9a2f8c6b272b5f2bbe21f6e11800ce7f61e99ba75cf8b63e1/psycopg_binary-3.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:329ff393441e75f10b673ae99ab45276887993d49e65f141da20d915c05aafd8", size = 4670009, upload-time = "2026-02-18T16:49:03.608Z" }, + { url = "https://files.pythonhosted.org/packages/c1/53/ac7c18671347c553362aadbf65f92786eef9540676ca24114cc02f5be405/psycopg_binary-3.3.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:eb072949b8ebf4082ae24289a2b0fd724da9adc8f22743409d6fd718ddb379df", size = 5469735, upload-time = "2026-02-18T16:49:10.128Z" }, + { url = "https://files.pythonhosted.org/packages/7f/c3/4f4e040902b82a344eff1c736cde2f2720f127fe939c7e7565706f96dd44/psycopg_binary-3.3.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:263a24f39f26e19ed7fc982d7859a36f17841b05bebad3eb47bb9cd2dd785351", size = 5152919, upload-time = "2026-02-18T16:49:16.335Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e7/d929679c6a5c212bcf738806c7c89f5b3d0919f2e1685a0e08d6ff877945/psycopg_binary-3.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5152d50798c2fa5bd9b68ec68eb68a1b71b95126c1d70adaa1a08cd5eefdc23d", size = 6738785, upload-time = "2026-02-18T16:49:22.687Z" }, + { url = "https://files.pythonhosted.org/packages/69/b0/09703aeb69a9443d232d7b5318d58742e8ca51ff79f90ffe6b88f1db45e7/psycopg_binary-3.3.3-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9d6a1e56dd267848edb824dbeb08cf5bac649e02ee0b03ba883ba3f4f0bd54f2", size = 4979008, upload-time = "2026-02-18T16:49:27.313Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a6/e662558b793c6e13a7473b970fee327d635270e41eded3090ef14045a6a5/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73eaaf4bb04709f545606c1db2f65f4000e8a04cdbf3e00d165a23004692093e", size = 4508255, upload-time = "2026-02-18T16:49:31.575Z" }, + { url = "https://files.pythonhosted.org/packages/5f/7f/0f8b2e1d5e0093921b6f324a948a5c740c1447fbb45e97acaf50241d0f39/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:162e5675efb4704192411eaf8e00d07f7960b679cd3306e7efb120bb8d9456cc", size = 4189166, upload-time = "2026-02-18T16:49:35.801Z" }, + { url = "https://files.pythonhosted.org/packages/92/ec/ce2e91c33bc8d10b00c87e2f6b0fb570641a6a60042d6a9ae35658a3a797/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:fab6b5e37715885c69f5d091f6ff229be71e235f272ebaa35158d5a46fd548a0", size = 3924544, upload-time = "2026-02-18T16:49:41.129Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2f/7718141485f73a924205af60041c392938852aa447a94c8cbd222ff389a1/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a4aab31bd6d1057f287c96c0effca3a25584eb9cc702f282ecb96ded7814e830", size = 4235297, upload-time = "2026-02-18T16:49:46.726Z" }, + { url = "https://files.pythonhosted.org/packages/57/f9/1add717e2643a003bbde31b1b220172e64fbc0cb09f06429820c9173f7fc/psycopg_binary-3.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:59aa31fe11a0e1d1bcc2ce37ed35fe2ac84cd65bb9036d049b1a1c39064d0f14", size = 3547659, upload-time = "2026-02-18T16:49:52.999Z" }, + { url = "https://files.pythonhosted.org/packages/03/0a/cac9fdf1df16a269ba0e5f0f06cac61f826c94cadb39df028cdfe19d3a33/psycopg_binary-3.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05f32239aec25c5fb15f7948cffdc2dc0dac098e48b80a140e4ba32b572a2e7d", size = 4590414, upload-time = "2026-02-18T16:50:01.441Z" }, + { url = "https://files.pythonhosted.org/packages/9c/c0/d8f8508fbf440edbc0099b1abff33003cd80c9e66eb3a1e78834e3fb4fb9/psycopg_binary-3.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c84f9d214f2d1de2fafebc17fa68ac3f6561a59e291553dfc45ad299f4898c1", size = 4669021, upload-time = "2026-02-18T16:50:08.803Z" }, + { url = "https://files.pythonhosted.org/packages/04/05/097016b77e343b4568feddf12c72171fc513acef9a4214d21b9478569068/psycopg_binary-3.3.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e77957d2ba17cada11be09a5066d93026cdb61ada7c8893101d7fe1c6e1f3925", size = 5467453, upload-time = "2026-02-18T16:50:14.985Z" }, + { url = "https://files.pythonhosted.org/packages/91/23/73244e5feb55b5ca109cede6e97f32ef45189f0fdac4c80d75c99862729d/psycopg_binary-3.3.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:42961609ac07c232a427da7c87a468d3c82fee6762c220f38e37cfdacb2b178d", size = 5151135, upload-time = "2026-02-18T16:50:24.82Z" }, + { url = "https://files.pythonhosted.org/packages/11/49/5309473b9803b207682095201d8708bbc7842ddf3f192488a69204e36455/psycopg_binary-3.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae07a3114313dd91fce686cab2f4c44af094398519af0e0f854bc707e1aeedf1", size = 6737315, upload-time = "2026-02-18T16:50:35.106Z" }, + { url = "https://files.pythonhosted.org/packages/d4/5d/03abe74ef34d460b33c4d9662bf6ec1dd38888324323c1a1752133c10377/psycopg_binary-3.3.3-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d257c58d7b36a621dcce1d01476ad8b60f12d80eb1406aee4cf796f88b2ae482", size = 4979783, upload-time = "2026-02-18T16:50:42.067Z" }, + { url = "https://files.pythonhosted.org/packages/f0/6c/3fbf8e604e15f2f3752900434046c00c90bb8764305a1b81112bff30ba24/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:07c7211f9327d522c9c47560cae00a4ecf6687f4e02d779d035dd3177b41cb12", size = 4509023, upload-time = "2026-02-18T16:50:50.116Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6b/1a06b43b7c7af756c80b67eac8bfaa51d77e68635a8a8d246e4f0bb7604a/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8e7e9eca9b363dbedeceeadd8be97149d2499081f3c52d141d7cd1f395a91f83", size = 4185874, upload-time = "2026-02-18T16:50:55.97Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d3/bf49e3dcaadba510170c8d111e5e69e5ae3f981c1554c5bb71c75ce354bb/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:cb85b1d5702877c16f28d7b92ba030c1f49ebcc9b87d03d8c10bf45a2f1c7508", size = 3925668, upload-time = "2026-02-18T16:51:03.299Z" }, + { url = "https://files.pythonhosted.org/packages/f8/92/0aac830ed6a944fe334404e1687a074e4215630725753f0e3e9a9a595b62/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4d4606c84d04b80f9138d72f1e28c6c02dc5ae0c7b8f3f8aaf89c681ce1cd1b1", size = 4234973, upload-time = "2026-02-18T16:51:09.097Z" }, + { url = "https://files.pythonhosted.org/packages/2e/96/102244653ee5a143ece5afe33f00f52fe64e389dfce8dbc87580c6d70d3d/psycopg_binary-3.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:74eae563166ebf74e8d950ff359be037b85723d99ca83f57d9b244a871d6c13b", size = 3551342, upload-time = "2026-02-18T16:51:13.892Z" }, + { url = "https://files.pythonhosted.org/packages/a2/71/7a57e5b12275fe7e7d84d54113f0226080423a869118419c9106c083a21c/psycopg_binary-3.3.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:497852c5eaf1f0c2d88ab74a64a8097c099deac0c71de1cbcf18659a8a04a4b2", size = 4607368, upload-time = "2026-02-18T16:51:19.295Z" }, + { url = "https://files.pythonhosted.org/packages/c7/04/cb834f120f2b2c10d4003515ef9ca9d688115b9431735e3936ae48549af8/psycopg_binary-3.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:258d1ea53464d29768bf25930f43291949f4c7becc706f6e220c515a63a24edd", size = 4687047, upload-time = "2026-02-18T16:51:23.84Z" }, + { url = "https://files.pythonhosted.org/packages/40/e9/47a69692d3da9704468041aa5ed3ad6fc7f6bb1a5ae788d261a26bbca6c7/psycopg_binary-3.3.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:111c59897a452196116db12e7f608da472fbff000693a21040e35fc978b23430", size = 5487096, upload-time = "2026-02-18T16:51:29.645Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b6/0e0dd6a2f802864a4ae3dbadf4ec620f05e3904c7842b326aafc43e5f464/psycopg_binary-3.3.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:17bb6600e2455993946385249a3c3d0af52cd70c1c1cdbf712e9d696d0b0bf1b", size = 5168720, upload-time = "2026-02-18T16:51:36.499Z" }, + { url = "https://files.pythonhosted.org/packages/6f/0d/977af38ac19a6b55d22dff508bd743fd7c1901e1b73657e7937c7cccb0a3/psycopg_binary-3.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:642050398583d61c9856210568eb09a8e4f2fe8224bf3be21b67a370e677eead", size = 6762076, upload-time = "2026-02-18T16:51:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/34/40/912a39d48322cf86895c0eaf2d5b95cb899402443faefd4b09abbba6b6e1/psycopg_binary-3.3.3-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:533efe6dc3a7cba5e2a84e38970786bb966306863e45f3db152007e9f48638a6", size = 4997623, upload-time = "2026-02-18T16:51:47.707Z" }, + { url = "https://files.pythonhosted.org/packages/98/0c/c14d0e259c65dc7be854d926993f151077887391d5a081118907a9d89603/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5958dbf28b77ce2033482f6cb9ef04d43f5d8f4b7636e6963d5626f000efb23e", size = 4532096, upload-time = "2026-02-18T16:51:51.421Z" }, + { url = "https://files.pythonhosted.org/packages/39/21/8b7c50a194cfca6ea0fd4d1f276158307785775426e90700ab2eba5cd623/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:a6af77b6626ce92b5817bf294b4d45ec1a6161dba80fc2d82cdffdd6814fd023", size = 4208884, upload-time = "2026-02-18T16:51:57.336Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2c/a4981bf42cf30ebba0424971d7ce70a222ae9b82594c42fc3f2105d7b525/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:47f06fcbe8542b4d96d7392c476a74ada521c5aebdb41c3c0155f6595fc14c8d", size = 3944542, upload-time = "2026-02-18T16:52:04.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/e9/b7c29b56aa0b85a4e0c4d89db691c1ceef08f46a356369144430c155a2f5/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e7800e6c6b5dc4b0ca7cc7370f770f53ac83886b76afda0848065a674231e856", size = 4254339, upload-time = "2026-02-18T16:52:10.444Z" }, + { url = "https://files.pythonhosted.org/packages/98/5a/291d89f44d3820fffb7a04ebc8f3ef5dda4f542f44a5daea0c55a84abf45/psycopg_binary-3.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:165f22ab5a9513a3d7425ffb7fcc7955ed8ccaeef6d37e369d6cc1dff1582383", size = 3652796, upload-time = "2026-02-18T16:52:14.02Z" }, +] + [[package]] name = "pydantic" version = "2.12.5" @@ -538,6 +622,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/24/3c/21cf283d67af33a8e6ed242396863af195a8a6134ec581524fd22b9811b6/ruff-0.12.10-py3-none-win_arm64.whl", hash = "sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc", size = 12074225, upload-time = "2025-08-21T18:23:20.137Z" }, ] +[[package]] +name = "setuptools" +version = "82.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/f3/748f4d6f65d1756b9ae577f329c951cda23fb900e4de9f70900ced962085/setuptools-82.0.0.tar.gz", hash = "sha256:22e0a2d69474c6ae4feb01951cb69d515ed23728cf96d05513d36e42b62b37cb", size = 1144893, upload-time = "2026-02-08T15:08:40.206Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/c6/76dc613121b793286a3f91621d7b75a2b493e0390ddca50f11993eadf192/setuptools-82.0.0-py3-none-any.whl", hash = "sha256:70b18734b607bd1da571d097d236cfcfacaf01de45717d59e6e04b96877532e0", size = 1003468, upload-time = "2026-02-08T15:08:38.723Z" }, +] + [[package]] name = "tomli" version = "2.2.1" @@ -598,6 +691,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] +[[package]] +name = "tzdata" +version = "2025.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, +] + [[package]] name = "websocket-client" version = "1.8.0" From d323feb4bfbea36654f11bc60de8602403c72383 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Fri, 27 Feb 2026 19:14:32 +0300 Subject: [PATCH 07/17] feat: pin setuptools --- machine-charm/src/charm.py | 7 ++++++- machine-charm/src/workload.py | 10 ++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/machine-charm/src/charm.py b/machine-charm/src/charm.py index 47bde377..e2cd881d 100755 --- a/machine-charm/src/charm.py +++ b/machine-charm/src/charm.py @@ -66,7 +66,12 @@ def _start(self) -> None: """Start the workload.""" self.unit.status = ops.MaintenanceStatus("starting workload") config = self.load_config(WorkloadConfig) - workload.start(config.bind_address, config.workers, config.timeout) + workload.start( + self.charm_dir.absolute().as_posix(), + config.bind_address, + config.workers, + config.timeout, + ) self.unit.status = ops.ActiveStatus() def _on_config_changed(self, event: ops.ConfigChangedEvent): diff --git a/machine-charm/src/workload.py b/machine-charm/src/workload.py index 142a5c39..3b311663 100644 --- a/machine-charm/src/workload.py +++ b/machine-charm/src/workload.py @@ -62,14 +62,16 @@ def install(charm_dir: str) -> None: # Install apt packages apt.update() apt.add_package("libsodium-dev", "1.0.18-1ubuntu0.24.04.1") - apt.add_package("python3-virtualenv", "20.25.0+ds-2") + apt.add_package("python3-venv", "3.12.3-0ubuntu2.1") apt.add_package("postgresql-16", "16.11-0ubuntu0.24.04.1") # Create a virtual environment - run_command("virtualenv", "/venv") + run_command("python3", "-m", "venv", "/venv") + + # Install setuptools to support pkg_resources + run_command("/venv/bin/python", "-m", "pip", "install", "setuptools==80.10.2") # Install workload python packages - run_command("/venv/bin/python", "-m", "pip", "install", "setuptools") run_command( "/venv/bin/python", "-m", @@ -96,7 +98,7 @@ def migrate(charm_dir: str, database_url: str) -> None: ) -def start(address: str, workers: str, timeout: str) -> None: +def start(charm_dir: str, address: str, workers: str, timeout: str) -> None: """Start the webapp.""" run_command( "/venv/bin/python", From dd14daa9795af6dcb4f418ce4d310e1c10dadcc5 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Mon, 2 Mar 2026 14:39:02 +0300 Subject: [PATCH 08/17] feat: added upload actions --- machine-charm/README.md | 22 +- machine-charm/charmcraft.yaml | 31 ++- machine-charm/src/charm.py | 186 ++++++++++++++--- machine-charm/src/workload.py | 238 +++++++++++++--------- machine-charm/tests/unit/test_charm.py | 209 +++++++++++++++++-- machine-charm/tests/unit/test_workload.py | 152 ++++++++++++++ 6 files changed, 692 insertions(+), 146 deletions(-) create mode 100644 machine-charm/tests/unit/test_workload.py diff --git a/machine-charm/README.md b/machine-charm/README.md index dae18f7f..9a270b9f 100644 --- a/machine-charm/README.md +++ b/machine-charm/README.md @@ -15,7 +15,7 @@ More information: https://charmhub.io/ubuntu-security-api-vm This is a machine charm to run the ubuntu-security-api on virtual machines. -## Development +## Running locally This charm requires postgres, which can be added using @@ -37,6 +37,26 @@ juju deploy ./ubuntu-security-api-vm_amd64.charm juju relate ubuntu-security-api-vm postgresql ``` +### Configuring the charm + +You'll also need to set the secret is required in the config file before the application can run. e.g. + +To create + +```bash +juju add-secret secret-key secret-key= +juju add-secret oauth-token-salt secret-key= +``` + +Then grant permissions, and set the config + +```bash +juju grant-secret secret-key ubuntu-security-api-vm +juju grant-secret oauth-token-salt ubuntu-security-api-vm +juju config ubuntu-security-api-vm oauth-token-salt=secret:d6id5jn91c5s41im2dtg +juju config ubuntu-security-api-vm secret-key=secret:d6idfa791c5s41im2dug +``` + ## Other resources - See the [Juju documentation](https://documentation.ubuntu.com/juju/3.6/howto/manage-charms/) for more information about developing and improving charms. diff --git a/machine-charm/charmcraft.yaml b/machine-charm/charmcraft.yaml index 9db9e716..f7e4fa6d 100644 --- a/machine-charm/charmcraft.yaml +++ b/machine-charm/charmcraft.yaml @@ -50,15 +50,30 @@ requires: config: options: - database_url: + secret-key: type: secret - description: "Primary connection string" - replica-one: - type: secret - description: "First read-replica connection string" - replica-two: - type: secret - description: "Second read-replica connection string" + description: "Secret key used to encode and decode JWT tokens" oauth-token-salt: type: secret description: "Salt used to encode and decode OAuth tokens" + +actions: + upload-database: + description: Upload a postgresql database snapshot. + params: + filename: + type: string + description: The name of the database.sql file, stored in /tmp/.sql. + required: + - filename + additionalProperties: false + show-install-logs: + description: Show logs from the install process. + params: {} + required: [] + additionalProperties: false + show-gunicorn-logs: + description: Show the gunicorn logs. + params: {} + required: [] + additionalProperties: false \ No newline at end of file diff --git a/machine-charm/src/charm.py b/machine-charm/src/charm.py index e2cd881d..7cd33397 100755 --- a/machine-charm/src/charm.py +++ b/machine-charm/src/charm.py @@ -20,17 +20,8 @@ class WorkloadConfig(pydantic.BaseModel): - database: str = pydantic.Field("", description="Configures the database url") - replica_one: str = pydantic.Field( - "", description="Configures the database url for a replica if available." - ) - replica_two: str = pydantic.Field( - "", description="Configures the database url for a second replica if available." - ) - oauth_token_salt: str = pydantic.Field( - "", description="Salt used to encode and decode OAuth tokens" - ) - bind_address: str = pydantic.Field("0.0.0.0:8000", description="Address to bind the webapp to") + """Pydantic model for charm configuration.""" + workers: str = pydantic.Field("3", description="Number of workers for the webapp") timeout: str = pydantic.Field("30", description="Worker timeout for the webapp") @@ -41,52 +32,191 @@ class MachineCharmCharm(ops.CharmBase): def __init__(self, framework: ops.Framework): super().__init__(framework) framework.observe(self.on.install, self._on_install) + framework.observe(self.on.start, self._on_start) + framework.observe(self.on.stop, self._on_stop) framework.observe(self.on.config_changed, self._on_config_changed) # Charm events defined in the database requires charm library. self.database = DatabaseRequires( self, relation_name="postgresql", database_name="postgresql" ) framework.observe(self.database.on.database_created, self._on_database_created) + framework.observe(self.on["upload-database"].action, self._on_upload_database_action) + framework.observe(self.on["show-install-logs"].action, self._show_install_logs) + framework.observe(self.on["show-gunicorn-logs"].action, self._show_gunicorn_logs) + + def _get_database_uri(self) -> str: + """Get the writeable database URI from the relation data. + + This reads from the relation data bag on every event, since charm + instances are ephemeral and cannot persist state across events. + """ + if not self.database.relations: + return "" + + relation = self.database.relations[0] + relation_id = relation.id + relation_data = self.database.fetch_relation_data([relation_id]).get(relation_id, {}) + + uris = relation_data.get("uris", "") + if not uris: + return "" + + # Filter out read-only URIs to get the writeable one + uri_set = set(uris.split(",")) + read_only_uris = relation_data.get("read-only-uris", "") + if len(uri_set) > 1 and read_only_uris: + uri_set -= set(read_only_uris.split(",")) + + return uri_set.pop() if uri_set else "" + + @staticmethod + def _get_writeable_uri(event: DatabaseCreatedEvent) -> str: + """Extract the writeable database URI from a database event. + + When multiple URIs are provided, the read-only URIs are subtracted + to determine the writeable one. + """ + if not event.uris: + raise RuntimeError("No database URIs provided in the event.") + + uri_list = set(event.uris.split(",")) + if len(uri_list) > 1 and event.read_only_uris: + read_only_uris = set(event.read_only_uris.split(",")) + writeable_uris = uri_list - read_only_uris + return writeable_uris.pop() + return event.uris + + def _on_upload_database_action(self, event: ops.ActionEvent) -> None: + """Handle the upload-database action.""" + params = event.load_params(UploadDatabaseAction, errors="fail") + event.log(f"Uploading database file: {params.filename}") + + if not self.database.is_resource_created(): + event.fail("Database relation is not ready") + return + + # Dump new database from the uploaded file + self.database.relation_data + + workload.restore_database_from_file(params.filename, self.database) + try: + # Run migrations to verify the database is in a good state after the restore. + workload.migrate(self.charm_dir.absolute().as_posix(), self._get_database_uri()) + event.set_results({"message": "Database migrated successfully"}) + except Exception as e: + logger.error("Failed to migrate database: %s", e) + event.fail(f"Failed to migrate database: {e}") + + def _on_install(self, event: ops.InstallEvent): + """Install the workload on the machine.""" + workload.install(self.charm_dir.absolute().as_posix()) + if not self.database.is_resource_created(): + self.unit.status = ops.BlockedStatus("waiting for database relation") + return def _on_database_created(self, event: DatabaseCreatedEvent) -> None: """Handle database created event.""" - logger.info("Database created with connection string: %s", event.connection_string) + logger.info("Database created with name: %s", event.database) + + connection_string = self._get_writeable_uri(event) self.unit.status = ops.MaintenanceStatus("running migrations") + # Run migrations before starting the workload to ensure the database is ready. - workload.migrate(self.charm_dir.absolute().as_posix(), event.connection_string) + workload.migrate(self.charm_dir.absolute().as_posix(), connection_string) # Start workload when database is ready. - self._start() - - def _on_install(self, event: ops.InstallEvent): - """Install the workload on the machine.""" - workload.install(self.charm_dir.absolute().as_posix()) - self.unit.status = ops.MaintenanceStatus("waiting for database relation") + self._restart() def _start(self) -> None: """Start the workload.""" self.unit.status = ops.MaintenanceStatus("starting workload") - config = self.load_config(WorkloadConfig) + if not self.database.is_resource_created(): + self.unit.status = ops.BlockedStatus("waiting for database relation") + return + + database_uri = self._get_database_uri() + if not database_uri: + self.unit.status = ops.BlockedStatus("waiting for database URI") + return + + try: + oauth_token_salt_id: str = self.config.get("oauth-token-salt") # type: ignore + secret_key_id: str = self.config.get("secret-key") # type: ignore + + oauth_token_salt = ( + self.model.get_secret(id=oauth_token_salt_id) + .get_content(refresh=True) + .get("oauth-token-salt", "") + ) + secret_key = ( + self.model.get_secret(id=secret_key_id) + .get_content(refresh=True) + .get("secret-key", "") + ) + except ops.SecretNotFoundError as e: + logger.error("Required secret not found: %s", e) + self.unit.status = ops.BlockedStatus("missing secret: %s" % str(e)) + return + workload.start( self.charm_dir.absolute().as_posix(), - config.bind_address, - config.workers, - config.timeout, + self.config.get("workers", "3"), # type: ignore + self.config.get("timeout", "60"), # type: ignore + secret_key, + oauth_token_salt, + database_uri, ) - self.unit.status = ops.ActiveStatus() + # Expose the webapp port. + self.unit.set_ports(8000) + + if workload.is_running(): + self.unit.status = ops.ActiveStatus("application has started") + else: + self.unit.status = ops.BlockedStatus("failed to start workload") + logger.error("Failed to start workload") def _on_config_changed(self, event: ops.ConfigChangedEvent): """Handle config changes.""" # For simplicity, we will just restart the workload on any config change. - self.unit.status = ops.MaintenanceStatus("config changed, restarting workload") - if workload.is_running(): - self._stop() - self._start() + self.unit.status = ops.MaintenanceStatus("config changed") + self.load_config(WorkloadConfig) # Validate config before restarting + self._restart() def _stop(self) -> None: """Stop the workload.""" self.unit.status = ops.MaintenanceStatus("stopping workload") workload.stop() + def _restart(self) -> None: + """Restart the workload.""" + self.unit.status = ops.MaintenanceStatus("restarting workload") + if workload.is_running(): + self._stop() + self._start() + + def _on_start(self, event: ops.StartEvent): + """Handle the start event.""" + self._start() + + def _on_stop(self, event: ops.StopEvent): + """Handle the stop event.""" + self._stop() + + def _show_install_logs(self, event: ops.ActionEvent) -> None: + """Show logs from the install process.""" + with open(workload.INSTALL_LOG_FILE, "r") as log_file: + event.set_results({"install_logs": log_file.read()}) + + def _show_gunicorn_logs(self, event: ops.ActionEvent) -> None: + """Show the gunicorn logs.""" + with open(workload.GUNICORN_LOG_FILE, "r") as log_file: + event.set_results({"gunicorn_logs": log_file.read()}) + + +class UploadDatabaseAction(pydantic.BaseModel): + """Upload a zip file containing a snapshot of the application data to the machine.""" + + filename: str = pydantic.Field(description="The name of the snapshot file.") + if __name__ == "__main__": # pragma: nocover ops.main(MachineCharmCharm) diff --git a/machine-charm/src/workload.py b/machine-charm/src/workload.py index 3b311663..4790d047 100644 --- a/machine-charm/src/workload.py +++ b/machine-charm/src/workload.py @@ -16,6 +16,9 @@ logger = logging.getLogger(__name__) +GUNICORN_LOG_FILE = "/var/log/gunicorn.log" +INSTALL_LOG_FILE = "/var/log/install.log" + @contextmanager def use_path(path: str) -> Generator: @@ -28,33 +31,22 @@ def use_path(path: str) -> Generator: os.chdir(cwd) -def run_command(*args) -> None: +def run_command(*args, cwd=None, log_file=None) -> None: """Run a subprocess and raise a RuntimeError if the subprocess result indicates an error. We do this to bubble up the error message from the subprocess to the debug-log. """ try: - subprocess.run(args, check=True, capture_output=True, text=True) - except subprocess.CalledProcessError as e: - raise RuntimeError(e.stderr) from e - - -def ensure_config(port: int, slug: str) -> bool: - """Ensure that required config variables are supplied.""" - if os.getenv("DATABASE_URL"): - return True - return False - - -def run_migrations() -> None: - """Run database migrations.""" - with use_path("../.."): subprocess.run( - ["flask", "--app", "webapp.app", "db", "upgrade"], + args, check=True, - capture_output=True, text=True, + cwd=cwd, + stdout=log_file or subprocess.PIPE, + stderr=log_file or subprocess.PIPE, ) + except subprocess.CalledProcessError as e: + raise RuntimeError(str(e)) from e def install(charm_dir: str) -> None: @@ -65,53 +57,92 @@ def install(charm_dir: str) -> None: apt.add_package("python3-venv", "3.12.3-0ubuntu2.1") apt.add_package("postgresql-16", "16.11-0ubuntu0.24.04.1") - # Create a virtual environment - run_command("python3", "-m", "venv", "/venv") + with open(INSTALL_LOG_FILE, "a") as log_file: + # Create a virtual environment + run_command("python3", "-m", "venv", "/venv", log_file=log_file) - # Install setuptools to support pkg_resources - run_command("/venv/bin/python", "-m", "pip", "install", "setuptools==80.10.2") + # Install setuptools to support pkg_resources + run_command( + "/venv/bin/python", "-m", "pip", "install", "setuptools==80.10.2", log_file=log_file + ) - # Install workload python packages - run_command( - "/venv/bin/python", - "-m", - "pip", - "install", - "-r", - f"{charm_dir}/src/flask/app/requirements.txt", - ) + # Install workload python packages + run_command( + "/venv/bin/python", + "-m", + "pip", + "install", + "-r", + f"{charm_dir}/src/flask/app/requirements.txt", + log_file=log_file, + ) def migrate(charm_dir: str, database_url: str) -> None: """Run database migrations.""" - # Install the pg_trgm extension - run_command("psql", "-c", "CREATE EXTENSION IF NOT EXISTS pg_trgm;", database_url) - # Then run migrations - run_command( - "/venv/bin/python", - "-m", - "flask", - "--app", - f"{charm_dir}/src/flask/app/webapp.app", - "db", - "upgrade", - ) + with open(INSTALL_LOG_FILE, "a") as log_file: + # Install the pg_trgm extension + run_command( + "psql", + "-c", + "CREATE EXTENSION IF NOT EXISTS pg_trgm;", + database_url, + log_file=log_file, + ) + # Then run migrations + os.environ["DATABASE_URL"] = database_url + os.environ["SECRET_KEY"] = ( + "placeholder" # SECRET_KEY must be set, the actual value is not relevant for migrations + ) + run_command( + "/venv/bin/python", + "-m", + "flask", + "--app", + f"{charm_dir}/src/flask/app/webapp.app", + "db", + "upgrade", + cwd=f"{charm_dir}/src/flask/app/", + log_file=log_file, + ) -def start(charm_dir: str, address: str, workers: str, timeout: str) -> None: +def start( + charm_dir: str, + workers: str, + timeout: str, + secret_key: str, + oauth_token_salt: str, + database_url: str, +) -> None: """Start the webapp.""" - run_command( - "/venv/bin/python", - "-m", - "talisker.gunicorn", - "webapp.app:app", - "--bind", - address, - "--workers", - workers, - "--timeout", - timeout, - ) + if not database_url: + raise RuntimeError("DATABASE_URL must be provided to start the workload") + if not secret_key: + raise RuntimeError("SECRET_KEY must be provided to start the workload") + if not oauth_token_salt: + raise RuntimeError("OAUTH_TOKEN_SALT must be provided to start the workload") + os.environ["SECRET_KEY"] = secret_key + os.environ["DATABASE_URL"] = database_url + os.environ["OAUTH_TOKEN_SALT"] = oauth_token_salt + + # Add logging for gunicorn + with open(GUNICORN_LOG_FILE, "a") as log_file: + run_command( + "/venv/bin/python", + "-m", + "talisker.gunicorn", + "webapp.app:app", + "--chdir", + f"{charm_dir}/src/flask/app/", + "--bind", + "0.0.0.0:8000", + "--workers", + workers, + "--timeout", + timeout, + log_file=log_file, + ) def stop() -> None: @@ -121,41 +152,60 @@ def stop() -> None: def is_running() -> bool: """Return whether the webapp is running.""" - result = subprocess.run(["pgrep", "-f", "gunicorn"], capture_output=True, text=True) - return result.returncode == 0 - - -# def is_installed() -> bool: -# """Return whether the tinyproxy executable is available.""" -# return shutil.which("tinyproxy") is not None - - -# def reload_config() -> None: -# """Ask tinyproxy to reload config.""" -# pid = _get_pid() -# if not pid: -# raise RuntimeError("tinyproxy is not running") -# # Sending signal SIGUSR1 doesn't terminate the process. It asks the process to reload config. -# # See https://manpages.ubuntu.com/manpages/jammy/en/man8/tinyproxy.8.html#signals -# os.kill(pid, signal.SIGUSR1) - - -# def uninstall() -> None: -# """Uninstall the tinyproxy executable and remove files.""" -# apt.remove_package("tinyproxy-bin") -# PID_FILE.unlink(missing_ok=True) -# CONFIG_FILE.unlink(missing_ok=True) -# CONFIG_FILE.parent.rmdir() - - -# def _get_pid() -> int | None: -# """Return the PID of the tinyproxy process, or None if the process can't be found.""" -# if not PID_FILE.exists(): -# return None -# pid = int(PID_FILE.read_text()) -# try: -# # Sending signal 0 doesn't terminate the process. It just checks whether the PID exists. -# os.kill(pid, 0) -# except ProcessLookupError: -# return None -# return pid + try: + run_command("pgrep", "-f", "gunicorn") + except RuntimeError: + return False + + return True + + +def restore_database_from_file(file_name: str, database_url: str) -> None: + """Restore the database from a file.""" + if is_running(): + stop() + + file_path = f"/tmp/{file_name}" + if not os.path.exists(file_path): + raise RuntimeError(f"Database file {file_path} does not exist") + + # Terminate existing connections to the database to allow for restore. + run_command( + "psql", + database_url, + "-c", + "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = current_database()" + " AND pid <> pg_backend_pid();", + ) + + # Backup the existing database before restoring the new one, in case we need to roll back. + backup_file_path = f"/tmp/backup_{file_name}" + run_command( + "pg_dump", + database_url, + "-Fc", + "-f", + backup_file_path, + ) + try: + # Clear out the existing database before restoring the new one. + run_command( + "psql", + database_url, + "-c", + "DROP SCHEMA public CASCADE; CREATE SCHEMA public;", + ) + # Upload the new file. + run_command("psql", database_url, "-f", file_path) + except Exception as e: + # If restore fails, restore the original database from the backup file. + run_command( + "pg_restore", + database_url, + "-Fc", + backup_file_path, + ) + raise RuntimeError("Failed to restore the database from the uploaded file") from e + finally: + # Clean up the backup file. + os.remove(backup_file_path) diff --git a/machine-charm/tests/unit/test_charm.py b/machine-charm/tests/unit/test_charm.py index 7df11810..6eebca72 100644 --- a/machine-charm/tests/unit/test_charm.py +++ b/machine-charm/tests/unit/test_charm.py @@ -1,7 +1,9 @@ # Copyright 2026 Samuel Olwe # See LICENSE file for licensing details. -# -# To learn more about testing, see https://documentation.ubuntu.com/ops/latest/explanation/testing/ + +"""Unit tests for the MachineCharmCharm.""" + +from unittest.mock import MagicMock import pytest from ops import testing @@ -9,18 +11,195 @@ from charm import MachineCharmCharm -def mock_get_version(): - """Get a mock version string without executing the workload code.""" - return "1.0.0" +def _db_relation(*, uris: str = "postgresql://host:5432/db") -> testing.Relation: + """Return a Relation pre-populated with database credentials.""" + return testing.Relation( + endpoint="postgresql", + interface="postgresql_client", + remote_app_data={ + "username": "user", + "password": "pass", + "endpoints": "host:5432", + "database": "postgresql", + "uris": uris, + }, + ) + + +def _full_config() -> dict: + """Return a config dict with all required options set.""" + return { + "oauth-token-salt": "test-salt-value", + "secret_key": "test-secret-key", + "workers": "2", + "timeout": "15", + } + + +@pytest.fixture(autouse=True) +def mock_workload(monkeypatch: pytest.MonkeyPatch): + """Patch all workload functions used by the charm.""" + monkeypatch.setattr("charm.workload.install", lambda *a, **kw: None) + monkeypatch.setattr("charm.workload.migrate", lambda *a, **kw: None) + monkeypatch.setattr("charm.workload.start", lambda *a, **kw: None) + monkeypatch.setattr("charm.workload.stop", lambda *a, **kw: None) + monkeypatch.setattr("charm.workload.is_running", lambda: True) + + +class TestInstallEvent: + """Tests for the _on_install handler.""" + + def test_install_no_database_sets_blocked(self): + """Without a database relation, install sets BlockedStatus.""" + ctx = testing.Context(MachineCharmCharm) + state_in = testing.State() + + state_out = ctx.run(ctx.on.install(), state_in) + + assert state_out.unit_status == testing.BlockedStatus("waiting for database relation") + + def test_install_calls_workload_install(self, monkeypatch: pytest.MonkeyPatch): + """Verify workload.install is called during the install event.""" + install_called = False + + def fake_install(charm_dir): + nonlocal install_called + install_called = True + + monkeypatch.setattr("charm.workload.install", fake_install) + + ctx = testing.Context(MachineCharmCharm) + state_in = testing.State() + + ctx.run(ctx.on.install(), state_in) + assert install_called + + +class TestStartEvent: + """Tests for the _on_start / _start handler.""" + + def test_start_without_database_sets_blocked(self): + """Start without a database relation should set BlockedStatus.""" + ctx = testing.Context(MachineCharmCharm) + state_in = testing.State() + + state_out = ctx.run(ctx.on.start(), state_in) + + assert state_out.unit_status == testing.BlockedStatus("waiting for database relation") + + def test_start_not_running_sets_blocked(self, monkeypatch: pytest.MonkeyPatch): + """When workload fails to start, status should be BlockedStatus.""" + monkeypatch.setattr("charm.workload.is_running", lambda: False) + + ctx = testing.Context(MachineCharmCharm) + state_in = testing.State() + + state_out = ctx.run(ctx.on.start(), state_in) + + assert isinstance(state_out.unit_status, testing.BlockedStatus) + + +class TestStopEvent: + """Tests for the _on_stop handler.""" + + def test_stop_calls_workload_stop(self, monkeypatch: pytest.MonkeyPatch): + """Verify workload.stop is called during the stop event.""" + stop_called = False + + def fake_stop(): + nonlocal stop_called + stop_called = True + + monkeypatch.setattr("charm.workload.stop", fake_stop) + + ctx = testing.Context(MachineCharmCharm) + state_in = testing.State() + + ctx.run(ctx.on.stop(), state_in) + assert stop_called + + def test_stop_sets_maintenance_status(self): + """Stop event should transition through MaintenanceStatus.""" + ctx = testing.Context(MachineCharmCharm) + state_in = testing.State() + + state_out = ctx.run(ctx.on.stop(), state_in) + + assert state_out.unit_status == testing.MaintenanceStatus("stopping workload") + + +class TestConfigChangedEvent: + """Tests for the _on_config_changed handler.""" + + def test_config_changed_triggers_stop(self, monkeypatch: pytest.MonkeyPatch): + """Config changed should stop the workload if it is running.""" + calls = [] + monkeypatch.setattr("charm.workload.stop", lambda: calls.append("stop")) + monkeypatch.setattr("charm.workload.start", lambda *a, **kw: calls.append("start")) + monkeypatch.setattr("charm.workload.is_running", lambda: True) + + ctx = testing.Context(MachineCharmCharm) + state_in = testing.State() + + ctx.run(ctx.on.config_changed(), state_in) + + assert "stop" in calls + + def test_config_changed_without_database_sets_blocked(self): + """Config changed without DB relation should end up blocked.""" + ctx = testing.Context(MachineCharmCharm) + state_in = testing.State() + + state_out = ctx.run(ctx.on.config_changed(), state_in) + + assert isinstance(state_out.unit_status, testing.BlockedStatus) + + +class TestGetWriteableUri: + """Tests for the _get_writeable_uri static method.""" + + def test_single_uri(self): + """With a single URI, it should be returned as-is.""" + event = MagicMock() + event.uris = "postgresql://primary:5432/db" + event.read_only_uris = None + + result = MachineCharmCharm._get_writeable_uri(event) + + assert result == "postgresql://primary:5432/db" + + def test_multiple_uris_filters_readonly(self): + """With multiple URIs and read-only URIs, return only the writeable one.""" + event = MagicMock() + event.uris = "postgresql://primary:5432/db,postgresql://replica:5432/db" + event.read_only_uris = "postgresql://replica:5432/db" + + result = MachineCharmCharm._get_writeable_uri(event) + + assert result == "postgresql://primary:5432/db" + + def test_no_uris_raises(self): + """With no URIs, a RuntimeError should be raised.""" + event = MagicMock() + event.uris = "" + + with pytest.raises(RuntimeError, match="No database URIs"): + MachineCharmCharm._get_writeable_uri(event) + + def test_no_uris_none_raises(self): + """With None URIs, a RuntimeError should be raised.""" + event = MagicMock() + event.uris = None + + with pytest.raises(RuntimeError, match="No database URIs"): + MachineCharmCharm._get_writeable_uri(event) + + def test_multiple_uris_no_readonly(self): + """With multiple URIs but no read-only info, return the full uris string.""" + event = MagicMock() + event.uris = "postgresql://a:5432/db,postgresql://b:5432/db" + event.read_only_uris = "" + result = MachineCharmCharm._get_writeable_uri(event) -def test_start(monkeypatch: pytest.MonkeyPatch): - """Test that the charm has the correct state after handling the start event.""" - # Arrange: - ctx = testing.Context(MachineCharmCharm) - monkeypatch.setattr("charm.workload.get_version", mock_get_version) - # Act: - state_out = ctx.run(ctx.on.start(), testing.State()) - # Assert: - assert state_out.workload_version is not None - assert state_out.unit_status == testing.ActiveStatus() + assert result == "postgresql://a:5432/db,postgresql://b:5432/db" diff --git a/machine-charm/tests/unit/test_workload.py b/machine-charm/tests/unit/test_workload.py new file mode 100644 index 00000000..162def65 --- /dev/null +++ b/machine-charm/tests/unit/test_workload.py @@ -0,0 +1,152 @@ +# Copyright 2026 Samuel Olwe +# See LICENSE file for licensing details. + +"""Unit tests for the workload module.""" + +import os +from unittest.mock import patch + +import pytest + +import workload + + +class TestInstall: + """Tests for the install function.""" + + @patch("workload.run_command") + @patch("workload.apt") + def test_install_calls_apt_update(self, mock_apt, mock_run_cmd): + """Install should call apt.update first.""" + workload.install("/charm") + + mock_apt.update.assert_called_once() + + @patch("workload.run_command") + @patch("workload.apt") + def test_install_adds_required_packages(self, mock_apt, mock_run_cmd): + """Install should install the three required apt packages.""" + workload.install("/charm") + + assert mock_apt.add_package.call_count == 3 + package_names = [c.args[0] for c in mock_apt.add_package.call_args_list] + assert "libsodium-dev" in package_names + assert "python3-venv" in package_names + assert "postgresql-16" in package_names + + @patch("workload.run_command") + @patch("workload.apt") + def test_install_creates_venv(self, mock_apt, mock_run_cmd): + """Install should create a virtual environment.""" + workload.install("/charm") + + mock_run_cmd.assert_any_call("python3", "-m", "venv", "/venv") + + @patch("workload.run_command") + @patch("workload.apt") + def test_install_installs_setuptools(self, mock_apt, mock_run_cmd): + """Install should install setuptools in the venv.""" + workload.install("/charm") + + mock_run_cmd.assert_any_call( + "/venv/bin/python", "-m", "pip", "install", "setuptools==80.10.2" + ) + + @patch("workload.run_command") + @patch("workload.apt") + def test_install_installs_requirements(self, mock_apt, mock_run_cmd): + """Install should pip install from the app requirements.txt.""" + workload.install("/charm") + + mock_run_cmd.assert_any_call( + "/venv/bin/python", + "-m", + "pip", + "install", + "-r", + "/charm/src/flask/app/requirements.txt", + ) + + +class TestMigrate: + """Tests for the migrate function.""" + + @patch("workload.run_command") + def test_migrate_creates_pg_trgm_extension(self, mock_run_cmd): + """Migrate should install the pg_trgm extension first.""" + workload.migrate("/charm", "postgresql://host/db") + + mock_run_cmd.assert_any_call( + "psql", "-c", "CREATE EXTENSION IF NOT EXISTS pg_trgm;", "postgresql://host/db" + ) + + @patch("workload.run_command") + def test_migrate_sets_environment_variables(self, mock_run_cmd): + """Migrate should set DATABASE_URL and SECRET_KEY env vars.""" + workload.migrate("/charm", "postgresql://host/db") + + assert os.environ["DATABASE_URL"] == "postgresql://host/db" + assert os.environ["SECRET_KEY"] == "placeholder" + + +class TestStart: + """Tests for the start function.""" + + @patch("workload.run_command") + def test_start_sets_environment_variables(self, mock_run_cmd): + """Start should set SECRET_KEY, DATABASE_URL and OAUTH_TOKEN_SALT env vars.""" + workload.start("/charm", "4", "60", "my-secret", "my-salt", "postgresql://host/db") + + assert os.environ["SECRET_KEY"] == "my-secret" + assert os.environ["DATABASE_URL"] == "postgresql://host/db" + assert os.environ["OAUTH_TOKEN_SALT"] == "my-salt" + + @patch("workload.run_command") + def test_start_runs_gunicorn(self, mock_run_cmd): + """Start should invoke talisker.gunicorn with correct arguments.""" + workload.start("/charm", "4", "60", "my-secret", "my-salt", "postgresql://host/db") + + mock_run_cmd.assert_called_once_with( + "/venv/bin/python", + "-m", + "talisker.gunicorn", + "webapp.app:app", + "--chdir", + "/charm/src/flask/app/", + "--bind", + "0.0.0.0:8000", + "--workers", + "4", + "--timeout", + "60", + ) + + @patch("workload.run_command") + def test_start_passes_workers_and_timeout(self, mock_run_cmd): + """Start should forward workers and timeout to gunicorn.""" + workload.start("/charm", "8", "120", "key", "salt", "postgres://x/y") + + args = mock_run_cmd.call_args.args + # Find the position of --workers and --timeout flags + assert "--workers" in args + assert args[args.index("--workers") + 1] == "8" + assert "--timeout" in args + assert args[args.index("--timeout") + 1] == "120" + + @patch("workload.run_command") + def test_start_missing_database_url_raises(self, mock_run_cmd): + """Start without database_url should raise RuntimeError.""" + with pytest.raises(RuntimeError, match="DATABASE_URL"): + workload.start("/charm", "4", "60", "key", "salt", "") + + @patch("workload.run_command") + def test_start_missing_secret_key_raises(self, mock_run_cmd): + """Start without secret_key should raise RuntimeError.""" + with pytest.raises(RuntimeError, match="SECRET_KEY"): + workload.start("/charm", "4", "60", "", "salt", "postgres://x/y") + + @patch("workload.run_command") + def test_start_missing_oauth_token_salt_raises(self, mock_run_cmd): + """Start without oauth_token_salt should raise RuntimeError.""" + with pytest.raises(RuntimeError, match="OAUTH_TOKEN_SALT"): + workload.start("/charm", "4", "60", "key", "", "postgres://x/y") From 7f1d07bb743b387ceef3dbf7687a9a8119d3cd31 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Mon, 2 Mar 2026 17:13:51 +0300 Subject: [PATCH 09/17] feat: run gunicorn detached --- machine-charm/src/workload.py | 33 +++++++++++++++++++++++---------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/machine-charm/src/workload.py b/machine-charm/src/workload.py index 4790d047..db1ef70c 100644 --- a/machine-charm/src/workload.py +++ b/machine-charm/src/workload.py @@ -31,21 +31,30 @@ def use_path(path: str) -> Generator: os.chdir(cwd) -def run_command(*args, cwd=None, log_file=None) -> None: +def run_command(*args, cwd=None, log_file=None, detached=False) -> None: """Run a subprocess and raise a RuntimeError if the subprocess result indicates an error. We do this to bubble up the error message from the subprocess to the debug-log. """ try: - subprocess.run( - args, - check=True, - text=True, - cwd=cwd, - stdout=log_file or subprocess.PIPE, - stderr=log_file or subprocess.PIPE, - ) - except subprocess.CalledProcessError as e: + if detached: + subprocess.Popen( + args, + text=True, + cwd=cwd, + stdout=log_file or subprocess.PIPE, + stderr=log_file or subprocess.PIPE, + ) + else: + subprocess.run( + args, + check=True, + text=True, + cwd=cwd, + stdout=log_file or subprocess.PIPE, + stderr=log_file or subprocess.PIPE, + ) + except Exception as e: raise RuntimeError(str(e)) from e @@ -137,11 +146,15 @@ def start( f"{charm_dir}/src/flask/app/", "--bind", "0.0.0.0:8000", + "--daemon", "--workers", workers, "--timeout", timeout, + "--log-file", + GUNICORN_LOG_FILE, log_file=log_file, + detached=True, ) From fd675560ca64732fcf324d9f9ddc19c32eb9c9cc Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Mon, 2 Mar 2026 19:17:01 +0300 Subject: [PATCH 10/17] feat: added action and docs --- machine-charm/README.md | 62 ++++++++++++++++++++++++++++---- machine-charm/src/charm.py | 46 ++++++++++++------------ machine-charm/src/workload.py | 68 ++++++++++++++++++++++++----------- 3 files changed, 126 insertions(+), 50 deletions(-) diff --git a/machine-charm/README.md b/machine-charm/README.md index 9a270b9f..6c7c203a 100644 --- a/machine-charm/README.md +++ b/machine-charm/README.md @@ -34,27 +34,75 @@ Then deploy the charm, and integrate with the postgresql charm. ```bash juju deploy ./ubuntu-security-api-vm_amd64.charm -juju relate ubuntu-security-api-vm postgresql +juju integrate ubuntu-security-api-vm postgresql ``` ### Configuring the charm -You'll also need to set the secret is required in the config file before the application can run. e.g. +You'll also need to set the secrets required in the config before the application can run. -To create +Create the secrets: ```bash juju add-secret secret-key secret-key= -juju add-secret oauth-token-salt secret-key= +juju add-secret oauth-token-salt oauth-token-salt= ``` -Then grant permissions, and set the config +Then grant permissions and set the config: ```bash juju grant-secret secret-key ubuntu-security-api-vm juju grant-secret oauth-token-salt ubuntu-security-api-vm -juju config ubuntu-security-api-vm oauth-token-salt=secret:d6id5jn91c5s41im2dtg -juju config ubuntu-security-api-vm secret-key=secret:d6idfa791c5s41im2dug +juju config ubuntu-security-api-vm secret-key=secret: +juju config ubuntu-security-api-vm oauth-token-salt=secret: +``` + +## Using charm actions + +This charm exposes three actions: + +- `upload-database` +- `show-install-logs` +- `show-gunicorn-logs` + +You can list available actions with: + +```bash +juju actions ubuntu-security-api-vm +``` + +### `upload-database` + +Use this action to restore a PostgreSQL snapshot from a file already present on the unit in `/tmp`. + +1. Copy the database file to the unit: + +```bash +juju scp ./database.sql ubuntu-security-api-vm/0:/tmp/database.sql +``` + +2. Run the action: + +```bash +juju run ubuntu-security-api-vm/0 upload-database filename=database.sql --wait +``` + +The action restores the database, runs migrations, and returns a success or failure message. + +### `show-install-logs` + +Use this action to retrieve installation logs from `/var/log/install.log`: + +```bash +juju run ubuntu-security-api-vm/0 show-install-logs --wait +``` + +### `show-gunicorn-logs` + +Use this action to retrieve application logs from `/var/log/gunicorn.log`: + +```bash +juju run ubuntu-security-api-vm/0 show-gunicorn-logs --wait ``` ## Other resources diff --git a/machine-charm/src/charm.py b/machine-charm/src/charm.py index 7cd33397..0b12eee7 100755 --- a/machine-charm/src/charm.py +++ b/machine-charm/src/charm.py @@ -86,27 +86,6 @@ def _get_writeable_uri(event: DatabaseCreatedEvent) -> str: return writeable_uris.pop() return event.uris - def _on_upload_database_action(self, event: ops.ActionEvent) -> None: - """Handle the upload-database action.""" - params = event.load_params(UploadDatabaseAction, errors="fail") - event.log(f"Uploading database file: {params.filename}") - - if not self.database.is_resource_created(): - event.fail("Database relation is not ready") - return - - # Dump new database from the uploaded file - self.database.relation_data - - workload.restore_database_from_file(params.filename, self.database) - try: - # Run migrations to verify the database is in a good state after the restore. - workload.migrate(self.charm_dir.absolute().as_posix(), self._get_database_uri()) - event.set_results({"message": "Database migrated successfully"}) - except Exception as e: - logger.error("Failed to migrate database: %s", e) - event.fail(f"Failed to migrate database: {e}") - def _on_install(self, event: ops.InstallEvent): """Install the workload on the machine.""" workload.install(self.charm_dir.absolute().as_posix()) @@ -201,15 +180,36 @@ def _on_stop(self, event: ops.StopEvent): """Handle the stop event.""" self._stop() + def _on_upload_database_action(self, event: ops.ActionEvent) -> None: + """Handle the upload-database action.""" + params = event.load_params(UploadDatabaseAction, errors="fail") + event.log(f"Uploading database file: {params.filename}") + + if not self.database.is_resource_created(): + event.fail("Database relation is not ready") + return + + # Dump new database from the uploaded file + database_uri = self._get_database_uri() + + workload.restore_database_from_file(params.filename, database_uri) + try: + # Run migrations to verify the database is in a good state after the restore. + workload.migrate(self.charm_dir.absolute().as_posix(), database_uri) + event.set_results({"message": "Database migrated successfully"}) + except Exception as e: + logger.error("Failed to migrate database: %s", e) + event.fail(f"Failed to migrate database: {e}") + def _show_install_logs(self, event: ops.ActionEvent) -> None: """Show logs from the install process.""" with open(workload.INSTALL_LOG_FILE, "r") as log_file: - event.set_results({"install_logs": log_file.read()}) + event.set_results({"install-logs": log_file.read()}) def _show_gunicorn_logs(self, event: ops.ActionEvent) -> None: """Show the gunicorn logs.""" with open(workload.GUNICORN_LOG_FILE, "r") as log_file: - event.set_results({"gunicorn_logs": log_file.read()}) + event.set_results({"gunicorn-logs": log_file.read()}) class UploadDatabaseAction(pydantic.BaseModel): diff --git a/machine-charm/src/workload.py b/machine-charm/src/workload.py index db1ef70c..f1829f25 100644 --- a/machine-charm/src/workload.py +++ b/machine-charm/src/workload.py @@ -9,6 +9,7 @@ import logging import os import subprocess +import time from contextlib import contextmanager from typing import Generator @@ -101,8 +102,21 @@ def migrate(charm_dir: str, database_url: str) -> None: # Then run migrations os.environ["DATABASE_URL"] = database_url os.environ["SECRET_KEY"] = ( - "placeholder" # SECRET_KEY must be set, the actual value is not relevant for migrations + "placeholder" # SECRET_KEY must be set, the actual value is not relevant for migration ) + # Check whether the database is consistent before running migrations + run_command( + "/venv/bin/python", + "-m", + "flask", + "--app", + f"{charm_dir}/src/flask/app/webapp.app", + "db", + "current", + cwd=f"{charm_dir}/src/flask/app/", + log_file=log_file, + ) + time.sleep(5) run_command( "/venv/bin/python", "-m", @@ -114,6 +128,8 @@ def migrate(charm_dir: str, database_url: str) -> None: cwd=f"{charm_dir}/src/flask/app/", log_file=log_file, ) + # Give the database a moment to settle after migrations + time.sleep(5) def start( @@ -137,25 +153,37 @@ def start( # Add logging for gunicorn with open(GUNICORN_LOG_FILE, "a") as log_file: - run_command( - "/venv/bin/python", - "-m", - "talisker.gunicorn", - "webapp.app:app", - "--chdir", - f"{charm_dir}/src/flask/app/", - "--bind", - "0.0.0.0:8000", - "--daemon", - "--workers", - workers, - "--timeout", - timeout, - "--log-file", - GUNICORN_LOG_FILE, - log_file=log_file, - detached=True, - ) + # Restart if the start fails + limit = 5 + for attempt in range(1, limit + 1): + try: + run_command( + "/venv/bin/python", + "-m", + "talisker.gunicorn", + "webapp.app:app", + "--chdir", + f"{charm_dir}/src/flask/app/", + "--bind", + "0.0.0.0:8000", + "--daemon", + "--workers", + workers, + "--timeout", + timeout, + "--log-file", + GUNICORN_LOG_FILE, + log_file=log_file, + detached=True, + ) + # Give gunicorn a moment to start + time.sleep(5) + if is_running(): + break + except Exception as e: + logger.error("Failed to start workload (attempt %d/%d): %s", attempt, limit, e) + if attempt == limit: + raise RuntimeError("Exceeded maximum start attempts") from e def stop() -> None: From 881ccc9b31e9c334c448579f75e888f5fa93f190 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Tue, 3 Mar 2026 20:20:54 +0300 Subject: [PATCH 11/17] chore: update config types --- machine-charm/README.md | 35 +++++- machine-charm/charmcraft.yaml | 8 ++ machine-charm/src/charm.py | 43 +++++-- machine-charm/src/workload.py | 225 +++++++++++++++++----------------- 4 files changed, 187 insertions(+), 124 deletions(-) diff --git a/machine-charm/README.md b/machine-charm/README.md index 6c7c203a..ce4317fc 100644 --- a/machine-charm/README.md +++ b/machine-charm/README.md @@ -15,6 +15,12 @@ More information: https://charmhub.io/ubuntu-security-api-vm This is a machine charm to run the ubuntu-security-api on virtual machines. +## Prerequisites + +- **Juju 3.x** (see [Juju installation docs](https://documentation.ubuntu.com/juju/3.6/howto/manage-juju/)) +- **Ubuntu 24.04** host (required for `--destructive-mode` packing) +- A Juju model bootstrapped and ready for deployment + ## Running locally This charm requires postgres, which can be added using @@ -23,7 +29,7 @@ This charm requires postgres, which can be added using juju deploy postgresql --channel 16/stable ``` -In order to add the files included in the repo, we have to pack the charm using destructive mode, i.e. +In order to add the files included in the repo, we have to pack the charm using destructive mode on an Ubuntu 24.04 host: ```bash charmcraft pack --destructive-mode @@ -57,6 +63,31 @@ juju config ubuntu-security-api-vm secret-key=secret: juju config ubuntu-security-api-vm oauth-token-salt=secret: ``` +Once the deployment settles, the application status will change to `application has started`. You can now make http requests to the charm application, using its ip address e.g. + +```bash +juju status +. . . +ubuntu-security-api-vm/2* active idle 3 10.191.230.150 8000/tcp application has started +. . . +curl -v 10.191.230.150:8000/security/cves.json +``` + +### Optional configuration + +You can also tune the Gunicorn worker settings: + +```bash +juju config ubuntu-security-api-vm workers=4 +juju config ubuntu-security-api-vm timeout=300 +``` + +| Option | Type | Default | Description | +| --------- | ------- | ------- | ------------------------------------------------ | +| `workers` | integer | 3 | Number of Gunicorn worker processes | +| `timeout` | integer | 500 | Seconds before a non-responsive worker is killed | + + ## Using charm actions This charm exposes three actions: @@ -75,6 +106,8 @@ juju actions ubuntu-security-api-vm Use this action to restore a PostgreSQL snapshot from a file already present on the unit in `/tmp`. +> **Note:** The gunicorn service is stopped during the database restore and migration process. Expect brief downtime while the action runs. The service is restarted automatically once the restore completes. + 1. Copy the database file to the unit: ```bash diff --git a/machine-charm/charmcraft.yaml b/machine-charm/charmcraft.yaml index f7e4fa6d..6f7e2cf7 100644 --- a/machine-charm/charmcraft.yaml +++ b/machine-charm/charmcraft.yaml @@ -56,6 +56,14 @@ config: oauth-token-salt: type: secret description: "Salt used to encode and decode OAuth tokens" + workers: + type: int + default: 3 + description: "The number of worker processes for handling requests. Adjust based on your server's CPU cores and expected load." + timeout: + type: int + default: 500 + description: "The number of seconds to wait for a worker to respond before restarting it." actions: upload-database: diff --git a/machine-charm/src/charm.py b/machine-charm/src/charm.py index 0b12eee7..a5a636b3 100755 --- a/machine-charm/src/charm.py +++ b/machine-charm/src/charm.py @@ -18,12 +18,15 @@ logger = logging.getLogger(__name__) +DEFAULT_WORKERS = 3 +DEFAULT_TIMEOUT = 500 + class WorkloadConfig(pydantic.BaseModel): """Pydantic model for charm configuration.""" - workers: str = pydantic.Field("3", description="Number of workers for the webapp") - timeout: str = pydantic.Field("30", description="Worker timeout for the webapp") + workers: int = pydantic.Field(DEFAULT_WORKERS, description="Number of workers for the webapp") + timeout: int = pydantic.Field(DEFAULT_TIMEOUT, description="Worker timeout for the webapp") class MachineCharmCharm(ops.CharmBase): @@ -121,6 +124,14 @@ def _start(self) -> None: oauth_token_salt_id: str = self.config.get("oauth-token-salt") # type: ignore secret_key_id: str = self.config.get("secret-key") # type: ignore + if not secret_key_id: + self.unit.status = ops.BlockedStatus("missing secret config: set secret-key") + return + + if not oauth_token_salt_id: + self.unit.status = ops.BlockedStatus("missing secret config: set oauth-token-salt") + return + oauth_token_salt = ( self.model.get_secret(id=oauth_token_salt_id) .get_content(refresh=True) @@ -138,12 +149,13 @@ def _start(self) -> None: workload.start( self.charm_dir.absolute().as_posix(), - self.config.get("workers", "3"), # type: ignore - self.config.get("timeout", "60"), # type: ignore + str(self.config.get("workers", DEFAULT_WORKERS)), + str(self.config.get("timeout", DEFAULT_TIMEOUT)), secret_key, oauth_token_salt, database_uri, ) + # Expose the webapp port. self.unit.set_ports(8000) @@ -163,7 +175,7 @@ def _on_config_changed(self, event: ops.ConfigChangedEvent): def _stop(self) -> None: """Stop the workload.""" self.unit.status = ops.MaintenanceStatus("stopping workload") - workload.stop() + workload.stop_gunicorn() def _restart(self) -> None: """Restart the workload.""" @@ -172,11 +184,11 @@ def _restart(self) -> None: self._stop() self._start() - def _on_start(self, event: ops.StartEvent): + def _on_start(self, event: ops.StartEvent) -> None: """Handle the start event.""" self._start() - def _on_stop(self, event: ops.StopEvent): + def _on_stop(self, event: ops.StopEvent) -> None: """Handle the stop event.""" self._stop() @@ -192,6 +204,7 @@ def _on_upload_database_action(self, event: ops.ActionEvent) -> None: # Dump new database from the uploaded file database_uri = self._get_database_uri() + self._stop() workload.restore_database_from_file(params.filename, database_uri) try: # Run migrations to verify the database is in a good state after the restore. @@ -200,16 +213,24 @@ def _on_upload_database_action(self, event: ops.ActionEvent) -> None: except Exception as e: logger.error("Failed to migrate database: %s", e) event.fail(f"Failed to migrate database: {e}") + finally: + self._restart() def _show_install_logs(self, event: ops.ActionEvent) -> None: """Show logs from the install process.""" - with open(workload.INSTALL_LOG_FILE, "r") as log_file: - event.set_results({"install-logs": log_file.read()}) + try: + with open(workload.INSTALL_LOG_FILE, "r") as log_file: + event.set_results({"install-logs": log_file.read()}) + except FileNotFoundError: + event.fail(f"Log file {workload.INSTALL_LOG_FILE} not found") def _show_gunicorn_logs(self, event: ops.ActionEvent) -> None: """Show the gunicorn logs.""" - with open(workload.GUNICORN_LOG_FILE, "r") as log_file: - event.set_results({"gunicorn-logs": log_file.read()}) + try: + with open(workload.GUNICORN_LOG_FILE, "r") as log_file: + event.set_results({"gunicorn-logs": log_file.read()}) + except FileNotFoundError: + event.fail(f"Log file {workload.GUNICORN_LOG_FILE} not found") class UploadDatabaseAction(pydantic.BaseModel): diff --git a/machine-charm/src/workload.py b/machine-charm/src/workload.py index f1829f25..2e20e64a 100644 --- a/machine-charm/src/workload.py +++ b/machine-charm/src/workload.py @@ -10,8 +10,6 @@ import os import subprocess import time -from contextlib import contextmanager -from typing import Generator from charmlibs import apt @@ -21,40 +19,20 @@ INSTALL_LOG_FILE = "/var/log/install.log" -@contextmanager -def use_path(path: str) -> Generator: - """Execute a function within the specified directory.""" - cwd = os.getcwd() - os.chdir(path) - try: - yield - finally: - os.chdir(cwd) - - -def run_command(*args, cwd=None, log_file=None, detached=False) -> None: +def run_command(*args, cwd=None, log_file=None) -> None: """Run a subprocess and raise a RuntimeError if the subprocess result indicates an error. We do this to bubble up the error message from the subprocess to the debug-log. """ try: - if detached: - subprocess.Popen( - args, - text=True, - cwd=cwd, - stdout=log_file or subprocess.PIPE, - stderr=log_file or subprocess.PIPE, - ) - else: - subprocess.run( - args, - check=True, - text=True, - cwd=cwd, - stdout=log_file or subprocess.PIPE, - stderr=log_file or subprocess.PIPE, - ) + subprocess.run( + args, + check=True, + text=True, + cwd=cwd, + stdout=log_file or subprocess.PIPE, + stderr=log_file or subprocess.STDOUT, + ) except Exception as e: raise RuntimeError(str(e)) from e @@ -100,36 +78,95 @@ def migrate(charm_dir: str, database_url: str) -> None: log_file=log_file, ) # Then run migrations - os.environ["DATABASE_URL"] = database_url - os.environ["SECRET_KEY"] = ( - "placeholder" # SECRET_KEY must be set, the actual value is not relevant for migration - ) - # Check whether the database is consistent before running migrations - run_command( - "/venv/bin/python", - "-m", - "flask", - "--app", - f"{charm_dir}/src/flask/app/webapp.app", - "db", - "current", - cwd=f"{charm_dir}/src/flask/app/", - log_file=log_file, - ) - time.sleep(5) - run_command( - "/venv/bin/python", - "-m", - "flask", - "--app", - f"{charm_dir}/src/flask/app/webapp.app", - "db", - "upgrade", - cwd=f"{charm_dir}/src/flask/app/", - log_file=log_file, + try: + os.environ["DATABASE_URL"] = database_url + os.environ["SECRET_KEY"] = ( + "placeholder" # SECRET_KEY must be set, the actual value is not relevant for migration + ) + # Check whether the database is consistent before running migrations + run_command( + "/venv/bin/python", + "-m", + "flask", + "--app", + f"{charm_dir}/src/flask/app/webapp.app", + "db", + "current", + cwd=f"{charm_dir}/src/flask/app/", + log_file=log_file, + ) + run_command( + "/venv/bin/python", + "-m", + "flask", + "--app", + f"{charm_dir}/src/flask/app/webapp.app", + "db", + "upgrade", + cwd=f"{charm_dir}/src/flask/app/", + log_file=log_file, + ) + finally: + # Clean up environment variables + del os.environ["DATABASE_URL"] + del os.environ["SECRET_KEY"] + + +def is_running(): + """Return whether the webapp is running.""" + result = subprocess.run(["systemctl", "is-active", "gunicorn"], capture_output=True, text=True) + return result.stdout.strip() == "active" + + +SYSTEMD_UNIT = """[Unit] +Description=Gunicorn Python Application +After=network.target postgresql.service + +[Service] +User=root +Group=root +WorkingDirectory={charm_dir}/src/flask/app +Environment="DATABASE_URL={database_url}" +Environment="SECRET_KEY={secret_key}" +Environment="OAUTH_TOKEN_SALT={oauth_token_salt}" +ExecStart=/venv/bin/python -m gunicorn webapp.app:app --bind 0.0.0.0:8000 --workers {workers} --timeout {timeout} --access-logfile {gunicorn_log} --error-logfile {gunicorn_log} +ExecReload=/bin/kill -s HUP $MAINPID +Restart=on-failure +RestartSec=5 + +[Install] +WantedBy=multi-user.target +""" + + +def install_systemd_service( + charm_dir, workers, timeout, secret_key, oauth_token_salt, database_url +): + """Install the systemd service for the workload.""" + # delete existing service if it exists + if os.path.exists("/etc/systemd/system/gunicorn.service"): + os.remove("/etc/systemd/system/gunicorn.service") + + with open(GUNICORN_LOG_FILE, "a") as log_file: + unit_content = SYSTEMD_UNIT.format( + charm_dir=charm_dir, + database_url=database_url, + secret_key=secret_key, + oauth_token_salt=oauth_token_salt, + workers=workers, + timeout=timeout, + gunicorn_log=GUNICORN_LOG_FILE, ) - # Give the database a moment to settle after migrations - time.sleep(5) + with open("/etc/systemd/system/gunicorn.service", "w") as f: + f.write(unit_content) + + run_command("systemctl", "daemon-reload", log_file=log_file) + + +def start_gunicorn(): + """Start the gunicorn service.""" + with open(GUNICORN_LOG_FILE, "a") as log_file: + run_command("systemctl", "start", "gunicorn", log_file=log_file) def start( @@ -147,65 +184,25 @@ def start( raise RuntimeError("SECRET_KEY must be provided to start the workload") if not oauth_token_salt: raise RuntimeError("OAUTH_TOKEN_SALT must be provided to start the workload") - os.environ["SECRET_KEY"] = secret_key - os.environ["DATABASE_URL"] = database_url - os.environ["OAUTH_TOKEN_SALT"] = oauth_token_salt - - # Add logging for gunicorn - with open(GUNICORN_LOG_FILE, "a") as log_file: - # Restart if the start fails - limit = 5 - for attempt in range(1, limit + 1): - try: - run_command( - "/venv/bin/python", - "-m", - "talisker.gunicorn", - "webapp.app:app", - "--chdir", - f"{charm_dir}/src/flask/app/", - "--bind", - "0.0.0.0:8000", - "--daemon", - "--workers", - workers, - "--timeout", - timeout, - "--log-file", - GUNICORN_LOG_FILE, - log_file=log_file, - detached=True, - ) - # Give gunicorn a moment to start - time.sleep(5) - if is_running(): - break - except Exception as e: - logger.error("Failed to start workload (attempt %d/%d): %s", attempt, limit, e) - if attempt == limit: - raise RuntimeError("Exceeded maximum start attempts") from e - - -def stop() -> None: - """Stop the webapp.""" - run_command("pkill", "-9", "gunicorn") + install_systemd_service( + charm_dir, workers, timeout, secret_key, oauth_token_salt, database_url + ) + stop_gunicorn() + start_gunicorn() + time.sleep(5) # Give the service a moment to start + if not is_running(): + raise RuntimeError("Failed to start the workload, check gunicorn logs for details") -def is_running() -> bool: - """Return whether the webapp is running.""" - try: - run_command("pgrep", "-f", "gunicorn") - except RuntimeError: - return False - return True +def stop_gunicorn() -> None: + """Stop the webapp.""" + if is_running(): + run_command("systemctl", "stop", "gunicorn") def restore_database_from_file(file_name: str, database_url: str) -> None: """Restore the database from a file.""" - if is_running(): - stop() - file_path = f"/tmp/{file_name}" if not os.path.exists(file_path): raise RuntimeError(f"Database file {file_path} does not exist") @@ -242,6 +239,7 @@ def restore_database_from_file(file_name: str, database_url: str) -> None: # If restore fails, restore the original database from the backup file. run_command( "pg_restore", + "--dbname", database_url, "-Fc", backup_file_path, @@ -250,3 +248,6 @@ def restore_database_from_file(file_name: str, database_url: str) -> None: finally: # Clean up the backup file. os.remove(backup_file_path) + + # Start the service again after restoring the database. + start_gunicorn() From 70e834564a362d4976234735ca682836381e067e Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Wed, 4 Mar 2026 18:08:56 +0300 Subject: [PATCH 12/17] feat: cleanup parts before packing --- machine-charm/charmcraft.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/machine-charm/charmcraft.yaml b/machine-charm/charmcraft.yaml index 6f7e2cf7..1e572464 100644 --- a/machine-charm/charmcraft.yaml +++ b/machine-charm/charmcraft.yaml @@ -19,10 +19,11 @@ parts: source: ../ source-type: local override-pull: | + rm -rf overlay parts prime stage craftctl default rm -rf .git .venv .env *.charm .mypy_cache .ruff_cache machine-charm k8s-charm konf node_modules yarn.lock __pycache__ docker-entrypoint-initdb.d .github .gitignore docker-compose.yaml Dockerfile entrypoint migrate.sh package.json rockcraft.yaml run organize: - "*": ./src/flask/app + "*": ./src/flask/app/ prime: - src/flask/app/app.py - src/flask/app/migrations/* From 73f0552c5fb5a6d75b1606ff880896f25dec29e8 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Thu, 5 Mar 2026 12:08:32 +0300 Subject: [PATCH 13/17] feat: update logs --- machine-charm/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/machine-charm/README.md b/machine-charm/README.md index ce4317fc..39cc121d 100644 --- a/machine-charm/README.md +++ b/machine-charm/README.md @@ -117,7 +117,7 @@ juju scp ./database.sql ubuntu-security-api-vm/0:/tmp/database.sql 2. Run the action: ```bash -juju run ubuntu-security-api-vm/0 upload-database filename=database.sql --wait +juju run ubuntu-security-api-vm/0 upload-database filename=database.sql --wait=10m ``` The action restores the database, runs migrations, and returns a success or failure message. @@ -127,7 +127,7 @@ The action restores the database, runs migrations, and returns a success or fail Use this action to retrieve installation logs from `/var/log/install.log`: ```bash -juju run ubuntu-security-api-vm/0 show-install-logs --wait +juju run ubuntu-security-api-vm/0 show-install-logs ``` ### `show-gunicorn-logs` @@ -135,7 +135,7 @@ juju run ubuntu-security-api-vm/0 show-install-logs --wait Use this action to retrieve application logs from `/var/log/gunicorn.log`: ```bash -juju run ubuntu-security-api-vm/0 show-gunicorn-logs --wait +juju run ubuntu-security-api-vm/0 show-gunicorn-logs ``` ## Other resources From 7fa45e6485dcdd822b5f7063e30346be82bd14ae Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Thu, 5 Mar 2026 14:02:52 +0300 Subject: [PATCH 14/17] chore: added unit tests --- machine-charm/tests/unit/test_charm.py | 39 +--- machine-charm/tests/unit/test_workload.py | 238 ++++++++++++---------- 2 files changed, 133 insertions(+), 144 deletions(-) diff --git a/machine-charm/tests/unit/test_charm.py b/machine-charm/tests/unit/test_charm.py index 6eebca72..b9d6519d 100644 --- a/machine-charm/tests/unit/test_charm.py +++ b/machine-charm/tests/unit/test_charm.py @@ -42,7 +42,6 @@ def mock_workload(monkeypatch: pytest.MonkeyPatch): monkeypatch.setattr("charm.workload.install", lambda *a, **kw: None) monkeypatch.setattr("charm.workload.migrate", lambda *a, **kw: None) monkeypatch.setattr("charm.workload.start", lambda *a, **kw: None) - monkeypatch.setattr("charm.workload.stop", lambda *a, **kw: None) monkeypatch.setattr("charm.workload.is_running", lambda: True) @@ -102,24 +101,10 @@ def test_start_not_running_sets_blocked(self, monkeypatch: pytest.MonkeyPatch): class TestStopEvent: """Tests for the _on_stop handler.""" - def test_stop_calls_workload_stop(self, monkeypatch: pytest.MonkeyPatch): - """Verify workload.stop is called during the stop event.""" - stop_called = False - - def fake_stop(): - nonlocal stop_called - stop_called = True - - monkeypatch.setattr("charm.workload.stop", fake_stop) - - ctx = testing.Context(MachineCharmCharm) - state_in = testing.State() - - ctx.run(ctx.on.stop(), state_in) - assert stop_called - - def test_stop_sets_maintenance_status(self): + def test_stop_sets_maintenance_status(self, monkeypatch: pytest.MonkeyPatch): """Stop event should transition through MaintenanceStatus.""" + monkeypatch.setattr("charm.workload.stop_gunicorn", lambda: None) + ctx = testing.Context(MachineCharmCharm) state_in = testing.State() @@ -131,22 +116,12 @@ def test_stop_sets_maintenance_status(self): class TestConfigChangedEvent: """Tests for the _on_config_changed handler.""" - def test_config_changed_triggers_stop(self, monkeypatch: pytest.MonkeyPatch): - """Config changed should stop the workload if it is running.""" - calls = [] - monkeypatch.setattr("charm.workload.stop", lambda: calls.append("stop")) - monkeypatch.setattr("charm.workload.start", lambda *a, **kw: calls.append("start")) - monkeypatch.setattr("charm.workload.is_running", lambda: True) - - ctx = testing.Context(MachineCharmCharm) - state_in = testing.State() - - ctx.run(ctx.on.config_changed(), state_in) + def test_config_changed_without_database_sets_blocked(self, monkeypatch: pytest.MonkeyPatch): + """Config changed without DB relation should end up blocked.""" - assert "stop" in calls + monkeypatch.setattr("charm.workload.start", lambda: None) + monkeypatch.setattr("charm.workload.stop_gunicorn", lambda: None) - def test_config_changed_without_database_sets_blocked(self): - """Config changed without DB relation should end up blocked.""" ctx = testing.Context(MachineCharmCharm) state_in = testing.State() diff --git a/machine-charm/tests/unit/test_workload.py b/machine-charm/tests/unit/test_workload.py index 162def65..00b894ec 100644 --- a/machine-charm/tests/unit/test_workload.py +++ b/machine-charm/tests/unit/test_workload.py @@ -4,149 +4,163 @@ """Unit tests for the workload module.""" import os -from unittest.mock import patch +import subprocess +from unittest.mock import MagicMock, call, mock_open, patch import pytest import workload -class TestInstall: - """Tests for the install function.""" +class TestRunCommand: + """Tests for the run_command function.""" - @patch("workload.run_command") - @patch("workload.apt") - def test_install_calls_apt_update(self, mock_apt, mock_run_cmd): - """Install should call apt.update first.""" - workload.install("/charm") + @patch("workload.subprocess.run") + def test_run_command_success(self, mock_run): + """A successful command should complete without error.""" + mock_run.return_value = MagicMock(returncode=0) - mock_apt.update.assert_called_once() + workload.run_command("echo", "hello") - @patch("workload.run_command") - @patch("workload.apt") - def test_install_adds_required_packages(self, mock_apt, mock_run_cmd): - """Install should install the three required apt packages.""" - workload.install("/charm") + mock_run.assert_called_once_with( + ("echo", "hello"), + check=True, + text=True, + cwd=None, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) - assert mock_apt.add_package.call_count == 3 - package_names = [c.args[0] for c in mock_apt.add_package.call_args_list] - assert "libsodium-dev" in package_names - assert "python3-venv" in package_names - assert "postgresql-16" in package_names + @patch("workload.subprocess.run", side_effect=subprocess.CalledProcessError(1, "fail")) + def test_run_command_raises_runtime_error_on_failure(self, mock_run): + """run_command should raise RuntimeError when the subprocess fails.""" + with pytest.raises(RuntimeError): + workload.run_command("fail") - @patch("workload.run_command") - @patch("workload.apt") - def test_install_creates_venv(self, mock_apt, mock_run_cmd): - """Install should create a virtual environment.""" - workload.install("/charm") - mock_run_cmd.assert_any_call("python3", "-m", "venv", "/venv") +class TestIsRunning: + """Tests for the is_running function.""" - @patch("workload.run_command") - @patch("workload.apt") - def test_install_installs_setuptools(self, mock_apt, mock_run_cmd): - """Install should install setuptools in the venv.""" - workload.install("/charm") + @patch("workload.subprocess.run") + def test_is_running_returns_true_when_active(self, mock_run): + """is_running should return True when systemctl reports 'active'.""" + mock_run.return_value = MagicMock(stdout="active\n") - mock_run_cmd.assert_any_call( - "/venv/bin/python", "-m", "pip", "install", "setuptools==80.10.2" - ) + assert workload.is_running() is True - @patch("workload.run_command") - @patch("workload.apt") - def test_install_installs_requirements(self, mock_apt, mock_run_cmd): - """Install should pip install from the app requirements.txt.""" - workload.install("/charm") - - mock_run_cmd.assert_any_call( - "/venv/bin/python", - "-m", - "pip", - "install", - "-r", - "/charm/src/flask/app/requirements.txt", - ) + @patch("workload.subprocess.run") + def test_is_running_returns_false_when_inactive(self, mock_run): + """is_running should return False when systemctl reports 'inactive'.""" + mock_run.return_value = MagicMock(stdout="inactive\n") + assert workload.is_running() is False -class TestMigrate: - """Tests for the migrate function.""" + @patch("workload.subprocess.run") + def test_is_running_returns_false_when_failed(self, mock_run): + """is_running should return False when systemctl reports 'failed'.""" + mock_run.return_value = MagicMock(stdout="failed\n") - @patch("workload.run_command") - def test_migrate_creates_pg_trgm_extension(self, mock_run_cmd): - """Migrate should install the pg_trgm extension first.""" - workload.migrate("/charm", "postgresql://host/db") + assert workload.is_running() is False - mock_run_cmd.assert_any_call( - "psql", "-c", "CREATE EXTENSION IF NOT EXISTS pg_trgm;", "postgresql://host/db" - ) + +class TestInstallSystemdService: + """Tests for the install_systemd_service function.""" @patch("workload.run_command") - def test_migrate_sets_environment_variables(self, mock_run_cmd): - """Migrate should set DATABASE_URL and SECRET_KEY env vars.""" - workload.migrate("/charm", "postgresql://host/db") + @patch("builtins.open", mock_open()) + @patch("workload.os.path.exists", return_value=False) + def test_creates_systemd_unit_file(self, mock_exists, mock_run_cmd): + """install_systemd_service should write the gunicorn.service unit file.""" + workload.install_systemd_service( + "/charm", "4", "60", "secret", "salt", "postgresql://host/db" + ) - assert os.environ["DATABASE_URL"] == "postgresql://host/db" - assert os.environ["SECRET_KEY"] == "placeholder" + # open is called twice: once for gunicorn log, once for service file + handle = open() + handle.write.assert_called_once() + written = handle.write.call_args.args[0] + assert "gunicorn" in written.lower() or "Gunicorn" in written class TestStart: """Tests for the start function.""" + @patch("workload.is_running", return_value=True) + @patch("workload.time.sleep") + @patch("workload.start_gunicorn") + @patch("workload.stop_gunicorn") + @patch("workload.install_systemd_service") + def test_start_stops_then_starts_gunicorn( + self, mock_install_svc, mock_stop, mock_start_g, mock_sleep, mock_running + ): + """start should stop gunicorn then start it again.""" + workload.start("/charm", "4", "60", "secret", "salt", "postgresql://host/db") + + mock_stop.assert_called_once() + mock_start_g.assert_called_once() + + @patch("workload.is_running", return_value=False) + @patch("workload.time.sleep") + @patch("workload.start_gunicorn") + @patch("workload.stop_gunicorn") + @patch("workload.install_systemd_service") + def test_start_raises_if_not_running_after_start( + self, mock_install_svc, mock_stop, mock_start_g, mock_sleep, mock_running + ): + """start should raise RuntimeError if the service is not running after starting.""" + with pytest.raises(RuntimeError, match="Failed to start the workload"): + workload.start("/charm", "4", "60", "secret", "salt", "postgresql://host/db") + + +class TestRestoreDatabaseFromFile: + """Tests for the restore_database_from_file function.""" + + @patch("workload.start_gunicorn") + @patch("workload.os.remove") @patch("workload.run_command") - def test_start_sets_environment_variables(self, mock_run_cmd): - """Start should set SECRET_KEY, DATABASE_URL and OAUTH_TOKEN_SALT env vars.""" - workload.start("/charm", "4", "60", "my-secret", "my-salt", "postgresql://host/db") - - assert os.environ["SECRET_KEY"] == "my-secret" - assert os.environ["DATABASE_URL"] == "postgresql://host/db" - assert os.environ["OAUTH_TOKEN_SALT"] == "my-salt" + @patch("workload.os.path.exists", return_value=True) + def test_restore_creates_backup(self, mock_exists, mock_run_cmd, mock_remove, mock_start): + """restore_database_from_file should create a backup before restoring.""" + workload.restore_database_from_file("dump.sql", "postgresql://host/db") - @patch("workload.run_command") - def test_start_runs_gunicorn(self, mock_run_cmd): - """Start should invoke talisker.gunicorn with correct arguments.""" - workload.start("/charm", "4", "60", "my-secret", "my-salt", "postgresql://host/db") - - mock_run_cmd.assert_called_once_with( - "/venv/bin/python", - "-m", - "talisker.gunicorn", - "webapp.app:app", - "--chdir", - "/charm/src/flask/app/", - "--bind", - "0.0.0.0:8000", - "--workers", - "4", - "--timeout", - "60", - ) + backup_calls = [c for c in mock_run_cmd.call_args_list if "pg_dump" in c.args] + assert len(backup_calls) == 1 + assert "/tmp/backup_dump.sql" in backup_calls[0].args + @patch("workload.start_gunicorn") + @patch("workload.os.remove") @patch("workload.run_command") - def test_start_passes_workers_and_timeout(self, mock_run_cmd): - """Start should forward workers and timeout to gunicorn.""" - workload.start("/charm", "8", "120", "key", "salt", "postgres://x/y") + @patch("workload.os.path.exists", return_value=True) + def test_restore_starts_gunicorn_after_success( + self, mock_exists, mock_run_cmd, mock_remove, mock_start + ): + """restore_database_from_file should start gunicorn after a successful restore.""" + workload.restore_database_from_file("dump.sql", "postgresql://host/db") - args = mock_run_cmd.call_args.args - # Find the position of --workers and --timeout flags - assert "--workers" in args - assert args[args.index("--workers") + 1] == "8" - assert "--timeout" in args - assert args[args.index("--timeout") + 1] == "120" - - @patch("workload.run_command") - def test_start_missing_database_url_raises(self, mock_run_cmd): - """Start without database_url should raise RuntimeError.""" - with pytest.raises(RuntimeError, match="DATABASE_URL"): - workload.start("/charm", "4", "60", "key", "salt", "") - - @patch("workload.run_command") - def test_start_missing_secret_key_raises(self, mock_run_cmd): - """Start without secret_key should raise RuntimeError.""" - with pytest.raises(RuntimeError, match="SECRET_KEY"): - workload.start("/charm", "4", "60", "", "salt", "postgres://x/y") + mock_start.assert_called_once() + @patch("workload.start_gunicorn") + @patch("workload.os.remove") @patch("workload.run_command") - def test_start_missing_oauth_token_salt_raises(self, mock_run_cmd): - """Start without oauth_token_salt should raise RuntimeError.""" - with pytest.raises(RuntimeError, match="OAUTH_TOKEN_SALT"): - workload.start("/charm", "4", "60", "key", "", "postgres://x/y") + @patch("workload.os.path.exists", return_value=True) + def test_restore_rolls_back_on_failure( + self, mock_exists, mock_run_cmd, mock_remove, mock_start + ): + """If restore fails, the original database should be restored from the backup.""" + + def side_effect(*args, **kwargs): + # Let terminate, pg_dump, and drop succeed; fail on psql -f (the 4th call) + if args == ("psql", "postgresql://host/db", "-f", "/tmp/dump.sql"): + raise RuntimeError("restore failed") + + mock_run_cmd.side_effect = side_effect + + with pytest.raises(RuntimeError, match="Failed to restore the database"): + workload.restore_database_from_file("dump.sql", "postgresql://host/db") + + # pg_restore should have been called to roll back + pg_restore_calls = [ + c for c in mock_run_cmd.call_args_list if "pg_restore" in c.args + ] + assert len(pg_restore_calls) == 1 + assert "/tmp/backup_dump.sql" in pg_restore_calls[0].args From 48bfb9ce1184bb76febe39a38c01df4302956331 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Thu, 5 Mar 2026 16:13:18 +0300 Subject: [PATCH 15/17] feat: add artifacts download --- .github/workflows/download-artifacts.yaml | 69 +++++++++++++ .gitignore | 1 + README.md | 34 ++++++- scripts/download-artifacts.py | 114 ++++++++++++++++++++++ 4 files changed, 217 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/download-artifacts.yaml create mode 100644 scripts/download-artifacts.py diff --git a/.github/workflows/download-artifacts.yaml b/.github/workflows/download-artifacts.yaml new file mode 100644 index 00000000..6cb0a3b2 --- /dev/null +++ b/.github/workflows/download-artifacts.yaml @@ -0,0 +1,69 @@ +name: Download Artifacts + +on: + push: + branches: + - airgapped-charm + schedule: + - cron: '0 * * * *' + workflow_dispatch: + +jobs: + generate-and-upload-artifacts: + runs-on: + [self-hosted, self-hosted-linux-amd64-jammy-private-endpoint-medium] + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install dependencies + run: | + python3 -m pip install --upgrade pip + pip install beautifulsoup4 requests + + - name: Run artifact generation script + run: python3 scripts/download-artifacts.py --dest "$PWD" + + - name: Upload apt news + uses: actions/upload-artifact@v4 + with: + name: apt-news + path: apt_news.tar.gz + retention-days: 1 + if-no-files-found: error + + - name: Upload CVE data + uses: actions/upload-artifact@v4 + with: + name: oval-notices-data + path: oval_notices.tar.gz + retention-days: 1 + if-no-files-found: error + + - name: Create database dump + env: + DATABASE_URI: ${{ secrets.DATABASE_URI }} + run: | + # Install PostgreSQL client + sudo apt-get update && sudo apt-get install --no-install-recommends -y postgresql-client + echo "Creating database dump..." + if [ -z "$DATABASE_URI" ]; then echo "DATABASE_URI not set"; exit 1; fi + echo "Database dump created." + + - name: Upload database dump + uses: actions/upload-artifact@v4 + with: + name: database-dump + path: database_dump.sql.gz + retention-days: 1 + if-no-files-found: error + +concurrency: + group: download-artifacts + cancel-in-progress: true \ No newline at end of file diff --git a/.gitignore b/.gitignore index f39acc3d..ad2f9e93 100644 --- a/.gitignore +++ b/.gitignore @@ -48,6 +48,7 @@ _site/ *.*.map *.charm *.rock +*.tar.gz # [env] Local environment settings .docker-project diff --git a/README.md b/README.md index a7c8e29a..3208c6da 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,8 @@ API functions under ubuntu.com for querying CVEs and security notices. +For deploying this API as a Juju machine charm, see the [machine-charm README](machine-charm/README.md). + ## Local development The simplest way to run the API locally is using [the dotrun snap](https://github.com/canonical-web-and-design/dotrun): @@ -120,4 +122,34 @@ You'll get a 302 link which you should open to grant authorization, as well as a To use the token, pass the following header: ```bash curl -v -H "Auth-Type: oauth" -H "Authorization: Bearer " -X PUT --data @ http://0.0.0.0:8030/security/updates/cves.json -``` \ No newline at end of file +``` + +## Downloading artifacts + +The `scripts/download-artifacts.py` script downloads the latest apt news and OVAL notices from their respective sources and packages them as `.tar.gz` files. + +### Dependencies + +This script requires the `beautifulsoup4` package: + +```bash +pip install beautifulsoup4 +``` + +### Usage + +```bash +python scripts/download-artifacts.py --dest /path/to/destination +``` + +By default, files are downloaded to `/tmp/`. The script will: + +1. Download the latest apt news from the [Ubuntu MOTD API](https://motd.ubuntu.com/aptnews.json) and save it as `apt_news.tar.gz` +2. Download all OVAL notices from the [Canonical security metadata service](https://security-metadata.canonical.com/oval/) and package them into `oval_notices.tar.gz` + +### Environment variables + +| Variable | Default | Description | +| --- | --- | --- | +| `APT_MOTD_URL` | `https://motd.ubuntu.com/aptnews.json` | URL for the apt news JSON endpoint | +| `OVAL_NEWS_URL` | `https://security-metadata.canonical.com` | Base URL for the OVAL notices XML endpoint | \ No newline at end of file diff --git a/scripts/download-artifacts.py b/scripts/download-artifacts.py new file mode 100644 index 00000000..8f73e732 --- /dev/null +++ b/scripts/download-artifacts.py @@ -0,0 +1,114 @@ +#!/usr/bin/python3 +""" +Download artifacts script for ubuntu-com-security-api. + +This module provides utilities to download the latest apt news and OVAL notices +from their respective sources and package them as .tar.gz files. + +It fetches apt news from the Ubuntu MOTD API and OVAL notices from the +Canonical security metadata service. + +Dependencies: + - beautifulsoup4: Must be installed for parsing HTML/XML content. + Install via `pip install beautifulsoup4`. + +Environment Variables: + APT_MOTD_URL (str): URL for the apt news JSON endpoint. + Defaults to "https://motd.ubuntu.com/aptnews.json". + OVAL_NEWS_URL (str): Base URL for the OVAL notices XML endpoint. + Defaults to "https://security-metadata.canonical.com/oval/". + +Usage: + Run as a standalone script to download artifacts to a specified destination: + + $ python download-artifacts.py --dest /path/to/destination + + Or import individual functions for use in other modules. +""" +import argparse +import io +import os +import shutil +import tarfile + +import requests +from bs4 import BeautifulSoup + + +APT_MOTD_URL = os.getenv("APT_MOTD_URL", "https://motd.ubuntu.com/aptnews.json") +OVAL_NEWS_URL = os.getenv("OVAL_NEWS_URL", "https://security-metadata.canonical.com") + +def download_apt_news(dest_path: str = "/tmp/artifacts") -> None: + """Download the latest apt news.""" + + os.makedirs(dest_path, exist_ok=True) + dest_file = f"{dest_path}/apt_news.tar.gz" + print(f"Downloading apt news from {APT_MOTD_URL} to {dest_file}\n") + + response = requests.get(APT_MOTD_URL, timeout=60) + if response.status_code == 200: + with tarfile.open(dest_file, "w:gz") as tar: + news_data = io.BytesIO(response.content) + tarinfo = tarfile.TarInfo(name="aptnews.json") + tarinfo.size = len(response.content) + tar.addfile(tarinfo, news_data) + else: + print(f"Failed to fetch news. Status code: {response.status_code}") + + +def download_oval_notices(dest_path: str = "/tmp/artifacts") -> None: + """Download the latest OVAL notices.""" + os.makedirs(dest_path, exist_ok=True) + + dest_file = f"{dest_path}/oval_notices.tar.gz" + print(f"Downloading OVAL notices from {OVAL_NEWS_URL}/oval to {dest_file}\n") + + response = requests.get(f"{OVAL_NEWS_URL}/oval", timeout=60) + if response.status_code == 200: + # Parse the HTML response to extract links to OVAL notices + soup = BeautifulSoup(response.content.decode("utf-8"), "html.parser") + rows = soup.body.table.find_all("tr") + links = [] + for row in rows: + if row.a: + links.append(f"{OVAL_NEWS_URL}{row.a['href']}") + + # Download each OVAL notice and save it to the destination path + tars_path = f"{dest_path}/temp_oval/" + os.makedirs(tars_path, exist_ok=True) + for link in links: + file_name = os.path.basename(link) + print(f"Downloading OVAL notice {file_name}") + notice_response = requests.get(link, timeout=60) + + if notice_response.status_code == 200: + # Write file to destination path + with open(f"{tars_path}/{file_name}", "wb") as f: + f.write(notice_response.content) + print(f"Notice {link} downloaded.\n") + else: + raise Exception(f"Failed to fetch OVAL notice {link}. Status code: {notice_response.status_code}") + + # Download all oval notices, and save data to aggregated tar.gz file + with tarfile.open(dest_file, "w:gz") as tar: + # Finally package the aggregated tar.gz file + tar.add(tars_path, arcname="oval_notices/") + print(f"All OVAL notices downloaded and packaged into {dest_file}\n") + shutil.rmtree(tars_path, ignore_errors=True) + else: + print(f"Failed to fetch OVAL notices. Status code: {response.status_code}\n") + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Run this script to download the latest apt news and OVAL notices as .tar.gz files." + ) + parser.add_argument( + "--dest", + type=str, + default="/tmp/artifacts", + help="Destination path for the downloaded .tar.gz files (default location: /tmp/artifacts)", + ) + + args = parser.parse_args() + download_apt_news(dest_path=args.dest) + download_oval_notices(dest_path=args.dest) From f77c9c605920ca687a33c961b4d200a0c9a8d0c1 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Thu, 5 Mar 2026 16:22:59 +0300 Subject: [PATCH 16/17] feat: add pg_dump to script --- .github/workflows/download-artifacts.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/download-artifacts.yaml b/.github/workflows/download-artifacts.yaml index 6cb0a3b2..075bb298 100644 --- a/.github/workflows/download-artifacts.yaml +++ b/.github/workflows/download-artifacts.yaml @@ -54,6 +54,7 @@ jobs: sudo apt-get update && sudo apt-get install --no-install-recommends -y postgresql-client echo "Creating database dump..." if [ -z "$DATABASE_URI" ]; then echo "DATABASE_URI not set"; exit 1; fi + pg_dump "$DATABASE_URI" -Fc | gzip > database_dump.sql.gz echo "Database dump created." - name: Upload database dump From b0cb33230efe2b00450b4aec4869cdd08fcdaf44 Mon Sep 17 00:00:00 2001 From: Samuel Olwe Date: Thu, 5 Mar 2026 16:57:44 +0300 Subject: [PATCH 17/17] feat: unpin package versions --- machine-charm/src/workload.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/machine-charm/src/workload.py b/machine-charm/src/workload.py index 2e20e64a..73b1121e 100644 --- a/machine-charm/src/workload.py +++ b/machine-charm/src/workload.py @@ -41,9 +41,9 @@ def install(charm_dir: str) -> None: """Install the workload (by installing a snap, for example).""" # Install apt packages apt.update() - apt.add_package("libsodium-dev", "1.0.18-1ubuntu0.24.04.1") - apt.add_package("python3-venv", "3.12.3-0ubuntu2.1") - apt.add_package("postgresql-16", "16.11-0ubuntu0.24.04.1") + apt.add_package("libsodium-dev", update_cache=False) + apt.add_package("python3-venv", update_cache=False) + apt.add_package("postgresql-16", update_cache=False) with open(INSTALL_LOG_FILE, "a") as log_file: # Create a virtual environment