diff --git a/cognite_toolkit/_cdf_tk/client/_toolkit_client.py b/cognite_toolkit/_cdf_tk/client/_toolkit_client.py index 1218a0509f..f44b13da7e 100644 --- a/cognite_toolkit/_cdf_tk/client/_toolkit_client.py +++ b/cognite_toolkit/_cdf_tk/client/_toolkit_client.py @@ -10,6 +10,8 @@ from .api.agents import AgentsAPI from .api.alerts import AlertsAPI from .api.annotations import AnnotationsAPI +from .api.app_versions import AppVersionsAPI +from .api.apps import AppsAPI from .api.assets import AssetsAPI from .api.canvas import IndustrialCanvasAPI from .api.cognite_files import CogniteFilesAPI @@ -64,6 +66,8 @@ class ToolAPI: def __init__(self, http_client: HTTPClient, console: Console) -> None: self.http_client = http_client self.agents = AgentsAPI(http_client) + self.apps = AppsAPI(http_client) + self.app_versions = AppVersionsAPI(http_client) self.annotations = AnnotationsAPI(http_client) self.assets = AssetsAPI(http_client) self.cognite_files = CogniteFilesAPI(http_client) diff --git a/cognite_toolkit/_cdf_tk/client/api/app_versions.py b/cognite_toolkit/_cdf_tk/client/api/app_versions.py new file mode 100644 index 0000000000..67c6895e62 --- /dev/null +++ b/cognite_toolkit/_cdf_tk/client/api/app_versions.py @@ -0,0 +1,119 @@ +"""AppVersionsAPI: Version management for custom apps via the CDF App Hosting API.""" + +import json +from collections.abc import Iterable, Sequence + +from cognite_toolkit._cdf_tk.client.http_client import HTTPClient, RequestMessage, ToolkitAPIError +from cognite_toolkit._cdf_tk.client.http_client._data_classes import FailedResponse, SuccessResponse +from cognite_toolkit._cdf_tk.client.identifiers import AppVersionId +from cognite_toolkit._cdf_tk.client.resource_classes.app_version import AppVersionResponse + + +class AppVersionsAPI: + """Client for the CDF App Hosting Versions API (POST /apphosting/apps/{externalId}/versions/...).""" + + def __init__(self, http_client: HTTPClient) -> None: + self._http_client = http_client + + def _url(self, path: str) -> str: + return self._http_client.config.create_api_url(path) + + def upload( + self, + external_id: str, + version: str, + entrypoint: str, + zip_bytes: bytes, + ) -> None: + """POST /apphosting/apps/{externalId}/versions — multipart/form-data upload of the zipped app.""" + result = self._http_client.request_multipart_retries( + url=self._url(f"/apphosting/apps/{external_id}/versions"), + files={"file": ("app.zip", zip_bytes, "application/zip")}, + form_fields={"version": version, "entryPath": entrypoint}, + ) + if isinstance(result, FailedResponse): + raise ToolkitAPIError(message=result.body, code=result.status_code) + + def update(self, external_id: str, version: str, patch: dict) -> None: + """POST /apphosting/apps/{externalId}/versions/update — apply a lifecycle/alias patch to a version.""" + request = RequestMessage( + endpoint_url=self._url(f"/apphosting/apps/{external_id}/versions/update"), + method="POST", + body_content={"items": [{"version": version, "update": patch}]}, + ) + self._http_client.request_single_retries(request).get_success_or_raise(request) + + def retrieve(self, items: Sequence[AppVersionId], ignore_unknown_ids: bool = False) -> list[AppVersionResponse]: + """GET /apphosting/apps/{externalId}/versions/{version} — retrieve version metadata.""" + results: list[AppVersionResponse] = [] + for item in items: + request = RequestMessage( + endpoint_url=self._url(f"/apphosting/apps/{item.app_external_id}/versions/{item.version}"), + method="GET", + ) + result = self._http_client.request_single_retries(request) + if not isinstance(result, SuccessResponse): + if isinstance(result, FailedResponse) and result.status_code in (400, 404) and ignore_unknown_ids: + continue + result.get_success_or_raise(request) + continue + data = json.loads(result.body) + results.append(AppVersionResponse( + app_external_id=data.get("appExternalId", item.app_external_id), + version=data.get("version", item.version), + lifecycle_state=data.get("lifecycleState", "DRAFT"), + alias=data.get("alias"), + entrypoint=data.get("entrypoint", "index.html"), + )) + return results + + def iterate(self, limit: int | None = 100) -> Iterable[list[AppVersionResponse]]: + """POST /apphosting/versions/list — paginated list of all versions across all apps.""" + cursor: str | None = None + page_limit = min(limit, 1000) if limit is not None else 1000 + fetched = 0 + while True: + body: dict = {"limit": page_limit} + if cursor: + body["cursor"] = cursor + request = RequestMessage( + endpoint_url=self._url("/apphosting/versions/list"), + method="POST", + body_content=body, + ) + result = self._http_client.request_single_retries(request) + if not isinstance(result, SuccessResponse): + result.get_success_or_raise(request) + break + + data = json.loads(result.body) + page_items = [ + AppVersionResponse( + app_external_id=item["appExternalId"], + version=item["version"], + lifecycle_state=item.get("lifecycleState", "DRAFT"), + alias=item.get("alias"), + entrypoint=item.get("entrypoint", "index.html"), + ) + for item in data.get("items", []) + ] + if page_items: + yield page_items + fetched += len(page_items) + + cursor = data.get("nextCursor") + if not cursor or (limit is not None and fetched >= limit): + break + + def delete(self, versions: Sequence[AppVersionId]) -> None: + """POST /apphosting/apps/{externalId}/versions/delete — delete specific versions, grouped by app.""" + by_app: dict[str, list[AppVersionId]] = {} + for version_id in versions: + by_app.setdefault(version_id.app_external_id, []).append(version_id) + for app_external_id, app_versions in by_app.items(): + request = RequestMessage( + endpoint_url=self._url(f"/apphosting/apps/{app_external_id}/versions/delete"), + method="POST", + body_content={"items": [{"version": v.version} for v in app_versions]}, + ) + self._http_client.request_single_retries(request).get_success_or_raise(request) diff --git a/cognite_toolkit/_cdf_tk/client/api/apps.py b/cognite_toolkit/_cdf_tk/client/api/apps.py new file mode 100644 index 0000000000..39a967edef --- /dev/null +++ b/cognite_toolkit/_cdf_tk/client/api/apps.py @@ -0,0 +1,41 @@ +"""AppsAPI: Custom apps deployed via the CDF App Hosting API.""" + +from collections.abc import Sequence + +from cognite_toolkit._cdf_tk.client.cdf_client import CDFResourceAPI, PagedResponse +from cognite_toolkit._cdf_tk.client.cdf_client.api import Endpoint +from cognite_toolkit._cdf_tk.client.http_client import HTTPClient, ItemsSuccessResponse, RequestMessage, SuccessResponse +from cognite_toolkit._cdf_tk.client.http_client._data_classes import FailedResponse +from cognite_toolkit._cdf_tk.client.resource_classes.app import AppRequest, AppResponse + + +class AppsAPI(CDFResourceAPI[AppResponse]): + """Client for the CDF App Hosting API (/apphosting/apps).""" + + def __init__(self, http_client: HTTPClient) -> None: + super().__init__( + http_client=http_client, + method_endpoint_map={ + "create": Endpoint(method="POST", path="/apphosting/apps", item_limit=1), + }, + ) + + def _validate_page_response( + self, response: SuccessResponse | ItemsSuccessResponse + ) -> PagedResponse[AppResponse]: + return PagedResponse[AppResponse].model_validate_json(response.body) + + def create(self, items: Sequence[AppRequest]) -> list[AppResponse]: + """POST /apphosting/apps — create apps.""" + return self._request_item_response(items, "create") + + def retrieve(self, external_id: str) -> AppResponse | None: + """GET /apphosting/apps/{externalId} — fetch app-level metadata (name, description).""" + request = RequestMessage( + endpoint_url=self._make_url(f"/apphosting/apps/{external_id}"), + method="GET", + ) + result = self._http_client.request_single_retries(request) + if isinstance(result, FailedResponse) and result.status_code == 404: + return None + return AppResponse.model_validate_json(result.get_success_or_raise(request).body) diff --git a/cognite_toolkit/_cdf_tk/client/http_client/_client.py b/cognite_toolkit/_cdf_tk/client/http_client/_client.py index d7d70f7970..7e98eefeea 100644 --- a/cognite_toolkit/_cdf_tk/client/http_client/_client.py +++ b/cognite_toolkit/_cdf_tk/client/http_client/_client.py @@ -259,38 +259,26 @@ def _handle_error_single(self, e: Exception, request: RequestMessage) -> Request return FailedRequest(error=error_msg) - def request_raw_retries( + def _execute_raw_with_retries( self, - method: Literal["GET", "POST", "PUT", "DELETE"], + method: str, url: str, - content: bytes | Iterable[bytes], + max_retries: int, + content: bytes | Iterable[bytes] | None = None, + files: dict[str, tuple[str, bytes, str]] | None = None, + data: dict[str, str] | None = None, headers: dict[str, str] | None = None, - max_retries: int | None = None, ) -> SuccessResponse | FailedResponse: - """Send a raw HTTP request with retry logic but without authentication headers. - - This is useful for uploading to signed URLs (e.g., GCS signed URLs) where - authentication is embedded in the URL and adding auth headers would cause errors. - - Args: - method: HTTP method to use. - url: The URL to send the request to. - content: The content to send. Can be bytes or an iterable of bytes for streaming. - headers: Optional headers to include in the request. - max_retries: Maximum number of retries. Defaults to the client's max_retries setting. - - Returns: - HTTPResult: The result of the HTTP request, either SuccessResponse or FailedResponse. - """ - retries = max_retries if max_retries is not None else self._max_retries attempt = 0 last_error_code: int = -1 - while attempt <= retries: + while attempt <= max_retries: try: response = self.session.request( method=method, url=url, content=content, + files=files, + data=data, headers=headers, follow_redirects=False, ) @@ -301,22 +289,16 @@ def request_raw_retries( content=response.content, ) last_error_code = response.status_code - # Check if we should retry based on status code if response.status_code in self._retry_status_codes: retry_after = self._get_retry_after_in_header(response) - if retry_after is not None: - time.sleep(retry_after) - else: - time.sleep(self._backoff_time(attempt)) + time.sleep(retry_after if retry_after is not None else self._backoff_time(attempt)) attempt += 1 continue - # Non-retryable error return FailedResponse( status_code=response.status_code, body=response.text, - error=ErrorDetails(code=response.status_code, message=response.text), + error=ErrorDetails.from_response(response), ) - except ( httpx.ReadTimeout, httpx.TimeoutException, @@ -325,22 +307,64 @@ def request_raw_retries( httpx.ConnectTimeout, ) as e: attempt += 1 - if attempt <= retries: + if attempt <= max_retries: time.sleep(self._backoff_time(attempt)) continue return FailedResponse( status_code=last_error_code, body=f"Request failed after {attempt} attempts: {e!s}", - error=ErrorDetails(code=last_error_code, message=f"Request failed after {attempt} attempts: {e!s}"), + error=ErrorDetails( + code=last_error_code, + message=f"Request failed after {attempt} attempts: {e!s}", + ), ) - - # Should not reach here, but just in case return FailedResponse( status_code=last_error_code, body=f"Request failed after {attempt} attempts.", error=ErrorDetails(code=last_error_code, message=f"Request failed after {attempt} attempts."), ) + def request_raw_retries( + self, + method: Literal["GET", "POST", "PUT", "DELETE"], + url: str, + content: bytes | Iterable[bytes], + headers: dict[str, str] | None = None, + max_retries: int | None = None, + ) -> SuccessResponse | FailedResponse: + """Send a raw HTTP request with retry logic but without authentication headers. + + This is useful for uploading to signed URLs (e.g., GCS signed URLs) where + authentication is embedded in the URL and adding auth headers would cause errors. + """ + retries = max_retries if max_retries is not None else self._max_retries + return self._execute_raw_with_retries(method, url, retries, content=content, headers=headers) + + def request_multipart_retries( + self, + url: str, + files: dict[str, tuple[str, bytes, str]], + form_fields: dict[str, str], + api_version: str | None = None, + ) -> SuccessResponse | FailedResponse: + """POST multipart/form-data to a CDF endpoint with auth headers and retry logic. + + Uses httpx's native multipart encoder — Content-Type (with boundary) and + Content-Length are set automatically. Unlike request_raw_retries, CDF auth + headers are included because this method targets CDF endpoints, not signed URLs. + """ + auth_name, auth_value = self.config.credentials.authorization_header() + # Content-Type is intentionally absent — httpx sets it from the multipart body (including boundary). + headers: dict[str, str] = { + "User-Agent": f"httpx/{httpx.__version__} {get_user_agent()}", + auth_name: auth_value, + "accept": "application/json", + "x-cdp-sdk": f"CogniteToolkit:{get_current_toolkit_version()}", + "x-cdp-app": self.config.client_name, + "cdf-version": api_version or self.config.api_subversion, + } + return self._execute_raw_with_retries("POST", url, self._max_retries, files=files, data=form_fields, headers=headers) + def request_items(self, message: ItemsRequest) -> Sequence[ItemsRequest | ItemsResultMessage]: """Send an HTTP request with multiple items and return the response. diff --git a/cognite_toolkit/_cdf_tk/client/resource_classes/app.py b/cognite_toolkit/_cdf_tk/client/resource_classes/app.py index 99e344cbd2..f9a4db5afb 100644 --- a/cognite_toolkit/_cdf_tk/client/resource_classes/app.py +++ b/cognite_toolkit/_cdf_tk/client/resource_classes/app.py @@ -1,40 +1,22 @@ -from typing import Any, Literal - from cognite_toolkit._cdf_tk.client._resource_base import BaseModelObject, RequestResource, ResponseResource -from cognite_toolkit._cdf_tk.client.identifiers import AppVersionId +from cognite_toolkit._cdf_tk.client.identifiers import ExternalId class App(BaseModelObject): external_id: str - version: str name: str description: str | None = None - lifecycle_state: Literal["DRAFT", "PUBLISHED", "DEPRECATED", "ARCHIVED"] = "PUBLISHED" - alias: Literal["ACTIVE", "PREVIEW"] | None = None - entrypoint: str = "index.html" class AppRequest(App, RequestResource): - """Local representation of a custom app version for App Hosting deployment.""" - - def as_id(self) -> AppVersionId: - return AppVersionId(app_external_id=self.external_id, version=self.version) + """Write resource for POST /apphosting/apps.""" - def dump( - self, camel_case: bool = True, exclude_extra: bool = False, context: Literal["api", "toolkit"] = "api" - ) -> dict[str, Any]: - if context == "toolkit": - return super().dump(camel_case=camel_case, exclude_extra=exclude_extra) - # Body for POST /apphosting/apps (ensure-app call) - key = "externalId" if camel_case else "external_id" - body: dict[str, Any] = {key: self.external_id, "name": self.name} - if self.description is not None: - body["description"] = self.description - return body + def as_id(self) -> ExternalId: + return ExternalId(external_id=self.external_id) class AppResponse(App, ResponseResource[AppRequest]): - """Response from App Hosting after a successful deploy.""" + """Response from GET/POST /apphosting/apps.""" @classmethod def request_cls(cls) -> type[AppRequest]: diff --git a/cognite_toolkit/_cdf_tk/client/resource_classes/app_version.py b/cognite_toolkit/_cdf_tk/client/resource_classes/app_version.py new file mode 100644 index 0000000000..d31fe965b8 --- /dev/null +++ b/cognite_toolkit/_cdf_tk/client/resource_classes/app_version.py @@ -0,0 +1,40 @@ +from typing import Literal + +from cognite_toolkit._cdf_tk.client._resource_base import BaseModelObject, RequestResource, ResponseResource +from cognite_toolkit._cdf_tk.client.identifiers import AppVersionId +from cognite_toolkit._cdf_tk.client.resource_classes.app import App + + +class AppVersion(BaseModelObject): + version: str + lifecycle_state: Literal["DRAFT", "PUBLISHED", "DEPRECATED", "ARCHIVED"] = "PUBLISHED" + alias: Literal["ACTIVE", "PREVIEW"] | None = None + entrypoint: str = "index.html" + + +class AppVersionRequest(App, AppVersion, RequestResource): + """Toolkit write class — the union of App (externalId/name/description) and AppVersion + (version/lifecycleState/alias/entrypoint) fields, matching the single-YAML user experience. + + The App Hosting API splits these across two endpoints: POST /apphosting/apps and + POST /apphosting/apps/{id}/versions. AppIO._deploy splits this object into both calls. + AppVersionResponse uses app_external_id (not external_id) and omits name/description because + the versions API wire format differs from the user-facing YAML representation. + """ + + def as_id(self) -> AppVersionId: + return AppVersionId(app_external_id=self.external_id, version=self.version) + + +class AppVersionResponse(AppVersion, ResponseResource[AppVersionRequest]): + """Response from the App Hosting versions API (GET/POST /apphosting/apps/{id}/versions/...). + + Uses app_external_id (not external_id) because the wire format returns `appExternalId` to + refer to the parent app's ID. App versions themselves do not have an unique `externalId` field. + """ + + app_external_id: str + + @classmethod + def request_cls(cls) -> type[AppVersionRequest]: + return AppVersionRequest diff --git a/cognite_toolkit/_cdf_tk/client/testing.py b/cognite_toolkit/_cdf_tk/client/testing.py index 1cc0b26480..3a6eacc10c 100644 --- a/cognite_toolkit/_cdf_tk/client/testing.py +++ b/cognite_toolkit/_cdf_tk/client/testing.py @@ -33,6 +33,8 @@ from . import ToolkitClientConfig from ._toolkit_client import ToolAPI from .api.agents import AgentsAPI +from .api.app_versions import AppVersionsAPI +from .api.apps import AppsAPI from .api.assets import AssetsAPI from .api.chart_scheduled_calculations import ChartScheduledCalculationsAPI from .api.charts_monitoring_job import ChartMonitoringJobsAPI @@ -178,6 +180,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.tool = MagicMock(spec=ToolAPI) self.tool.agents = MagicMock(spec=AgentsAPI) + self.tool.apps = MagicMock(spec=AppsAPI) + self.tool.app_versions = MagicMock(spec=AppVersionsAPI) self.tool.datapoint_subscriptions = MagicMock(spec=DatapointSubscriptionsAPI) self.tool.three_d = MagicMock(spec=ThreeDAPI) self.tool.three_d.models_classic = MagicMock(spec_set=ThreeDClassicModelsAPI) diff --git a/cognite_toolkit/_cdf_tk/feature_flags.py b/cognite_toolkit/_cdf_tk/feature_flags.py index 048d47eaea..e2ed22eaa1 100644 --- a/cognite_toolkit/_cdf_tk/feature_flags.py +++ b/cognite_toolkit/_cdf_tk/feature_flags.py @@ -82,6 +82,10 @@ class Flags(Enum): visible=True, description="Enables the entity-matching command family under the dev plugin", ) + CUSTOM_APPS = FlagMetadata( + visible=False, + description="Enables support for custom app resources (App Hosting API deployment)", + ) def is_enabled(self) -> bool: return FeatureFlag.is_enabled(self) diff --git a/cognite_toolkit/_cdf_tk/resource_ios/__init__.py b/cognite_toolkit/_cdf_tk/resource_ios/__init__.py index 9261e97e72..f022ef1c4b 100644 --- a/cognite_toolkit/_cdf_tk/resource_ios/__init__.py +++ b/cognite_toolkit/_cdf_tk/resource_ios/__init__.py @@ -21,6 +21,7 @@ from ._data_cruds import DatapointsCRUD, FileCRUD, RawFileCRUD from ._resource_ios import ( AgentIO, + AppIO, AssetIO, CogniteFileCRUD, ContainerCRUD, @@ -102,6 +103,8 @@ _EXCLUDED_CRUDS.add(DataProductVersionIO) _EXCLUDED_CRUDS.add(RuleSetIO) _EXCLUDED_CRUDS.add(RuleSetVersionIO) +if not FeatureFlag.is_enabled(Flags.CUSTOM_APPS): + _EXCLUDED_CRUDS.add(AppIO) CRUDS_BY_FOLDER_NAME_INCLUDE_ALPHA: defaultdict[str, list[type[Loader]]] = defaultdict(list) CRUDS_BY_FOLDER_NAME: defaultdict[str, list[type[Loader]]] = defaultdict(list) @@ -150,6 +153,7 @@ ResourceTypes: TypeAlias = Literal[ "3dmodels", "agents", + "apps", "auth", "cdf_applications", "classic", @@ -198,6 +202,7 @@ def get_crud(resource_dir: str, kind: str) -> type[Loader]: "RESOURCE_DATA_CRUD_LIST", "_EXCLUDED_CRUDS", "AgentIO", + "AppIO", "AssetIO", "CogniteFileCRUD", "ContainerCRUD", diff --git a/cognite_toolkit/_cdf_tk/resource_ios/_resource_ios/__init__.py b/cognite_toolkit/_cdf_tk/resource_ios/_resource_ios/__init__.py index 7da3605dfd..17a3973073 100644 --- a/cognite_toolkit/_cdf_tk/resource_ios/_resource_ios/__init__.py +++ b/cognite_toolkit/_cdf_tk/resource_ios/_resource_ios/__init__.py @@ -1,4 +1,5 @@ from .agent import AgentIO +from .app import AppIO from .auth import GroupAllScopedCRUD, GroupIO, SecurityCategoryIO from .classic import AssetIO, EventIO, SequenceIO, SequenceRowIO from .configuration import SearchConfigIO @@ -54,6 +55,7 @@ __all__ = [ "AgentIO", + "AppIO", "AssetIO", "CogniteFileCRUD", "ContainerCRUD", diff --git a/cognite_toolkit/_cdf_tk/resource_ios/_resource_ios/app.py b/cognite_toolkit/_cdf_tk/resource_ios/_resource_ios/app.py new file mode 100644 index 0000000000..2d812c545b --- /dev/null +++ b/cognite_toolkit/_cdf_tk/resource_ios/_resource_ios/app.py @@ -0,0 +1,358 @@ +import io +import json +import os +import zipfile +from collections.abc import Hashable, Iterable, Sequence +from pathlib import Path +from typing import Any, Literal, final + +from rich.console import Console + +from cognite_toolkit._cdf_tk.client import ToolkitClient +from cognite_toolkit._cdf_tk.client._resource_base import Identifier +from cognite_toolkit._cdf_tk.client.http_client import ToolkitAPIError +from cognite_toolkit._cdf_tk.client.identifiers import AppVersionId +from cognite_toolkit._cdf_tk.client.resource_classes.app import AppRequest +from cognite_toolkit._cdf_tk.client.resource_classes.app_version import AppVersionRequest, AppVersionResponse +from cognite_toolkit._cdf_tk.client.resource_classes.group import ( + AclType, + AllScope, + AppHostingAcl, + ScopeDefinition, +) +from cognite_toolkit._cdf_tk.exceptions import ToolkitRequiredValueError, ToolkitValueError +from cognite_toolkit._cdf_tk.resource_ios._base_ios import FailedReadExtra, ReadExtra, ResourceIO, SuccessExtra +from cognite_toolkit._cdf_tk.utils.hashing import calculate_directory_hash +from cognite_toolkit._cdf_tk.yaml_classes import AppsYAML + +from .auth import GroupAllScopedCRUD + +_EXCLUDE_DIRS = {"__pycache__", "node_modules", ".git"} + +_LIFECYCLE_ORDER = ["DRAFT", "PUBLISHED", "DEPRECATED", "ARCHIVED"] + + +def _zip_app_directory(source_dir: Path, extra_root_files: list[Path]) -> bytes: + buffer = io.BytesIO() + with zipfile.ZipFile(buffer, "w", strict_timestamps=False) as zf: + for root, dirs, files in os.walk(source_dir): + dirs[:] = [d for d in dirs if d not in _EXCLUDE_DIRS] + root_path = Path(root) + arc_root = root_path.relative_to(source_dir) + zf.write(root_path, arcname=str(arc_root)) + for filename in files: + file_path = root_path / filename + zf.write(file_path, arcname=str(file_path.relative_to(source_dir))) + for extra_file in extra_root_files: + zf.write(extra_file, arcname=extra_file.name) + return buffer.getvalue() + + +@final +class AppIO(ResourceIO[AppVersionId, AppVersionRequest, AppVersionResponse]): + support_drop = True + folder_name = "apps" + resource_cls = AppVersionResponse + resource_write_cls = AppVersionRequest + kind = "App" + yaml_cls = AppsYAML + dependencies = frozenset({GroupAllScopedCRUD}) + _doc_url = "Apps/operation/appsCreate" + support_update = True + + def __init__(self, client: ToolkitClient, build_path: Path | None, console: Console | None): + super().__init__(client, build_path, console) + self.zip_path_by_version_id: dict[AppVersionId, Path] = {} + + @property + def display_name(self) -> str: + return "apps" + + @classmethod + def get_minimum_scope(cls, items: Sequence[AppVersionRequest]) -> ScopeDefinition: + return AllScope() + + @classmethod + def create_acl(cls, actions: set[Literal["READ", "WRITE"]], scope: ScopeDefinition) -> Iterable[AclType]: + if isinstance(scope, AllScope): + yield AppHostingAcl(actions=sorted(actions), scope=scope) + + @classmethod + def get_id(cls, item: AppVersionResponse | AppVersionRequest | dict) -> AppVersionId: + if isinstance(item, dict): + ext = ( + item.get("appExternalId") + or item.get("app_external_id") + or item.get("externalId") + or item.get("external_id") + ) + version = item.get("version") + if ext is None: + raise ToolkitRequiredValueError("App YAML must define externalId.") + if version is None: + raise ToolkitRequiredValueError("App YAML must define version.") + return AppVersionId(app_external_id=ext, version=version) + if isinstance(item, AppVersionRequest): + return item.as_id() + return AppVersionId(app_external_id=item.app_external_id, version=item.version) + + @classmethod + def dump_id(cls, identifier: AppVersionId) -> dict[str, Any]: + return identifier.dump() + + @classmethod + def as_str(cls, identifier: AppVersionId) -> str: + return str(identifier) + + @classmethod + def get_dependent_items(cls, item: dict) -> Iterable[tuple[type[ResourceIO], Hashable]]: + return [] + + @classmethod + def get_dependencies(cls, resource: AppsYAML) -> Iterable[tuple[type[ResourceIO], Identifier]]: + return [] + + @classmethod + def get_extra_files(cls, filepath: Path, identifier: AppVersionId, item: dict[str, Any]) -> Iterable[ReadExtra]: + app_external_id = identifier.app_external_id + source_path_str = item.get("sourcePath") or item.get("source_path") + if source_path_str is not None: + app_root = (filepath.parent / source_path_str).resolve() + else: + app_root = filepath.with_name(app_external_id) + + if not app_root.is_dir(): + yield FailedReadExtra( + code="MISSING", + error=( + f"App directory not found for externalId {app_external_id!r}. " + f"Expected {app_root.as_posix()} to exist." + ), + source_path=app_root, + ) + return + + entrypoint = item.get("entrypoint") or "index.html" + dist_dir = app_root / "dist" + if (dist_dir / entrypoint).is_file(): + source_dir = dist_dir + elif (app_root / "src").is_dir() and (app_root / "package.json").is_file(): + yield FailedReadExtra( + code="MISSING", + error=( + f"App {app_external_id!r} looks like an unbuilt web project: " + f"Run `npm run build` (or your project's build command) in {app_root.as_posix()} " + f"before deploying with Toolkit." + ), + source_path=app_root, + ) + return + elif (app_root / entrypoint).is_file(): + source_dir = app_root + else: + yield FailedReadExtra( + code="MISSING", + error=( + f"Could not locate entrypoint {entrypoint!r} for app {app_external_id!r}. " + f"Expected {(dist_dir / entrypoint).as_posix()} or " + f"{(app_root / entrypoint).as_posix()} to exist. " + f"If your app has a build step, run it before deploying with Toolkit." + ), + source_path=app_root, + ) + return + + package_json = app_root / "package.json" + if not package_json.is_file(): + yield FailedReadExtra( + code="MISSING", + error=( + f"App {app_external_id!r} is missing package.json at {app_root.as_posix()}. " + f"This file is required to deploy to the App Hosting service." + ), + source_path=package_json, + ) + return + + package_lock = app_root / "package-lock.json" + if not package_lock.is_file(): + yield FailedReadExtra( + code="MISSING", + error=( + f"App {app_external_id!r} is missing package-lock.json at {app_root.as_posix()}. " + f"This file is required to deploy to the App Hosting service." + ), + source_path=package_lock, + ) + return + + manifest_json = app_root / "manifest.json" + manifest_file: Path | None = None + if manifest_json.is_file(): + try: + json.loads(manifest_json.read_text(encoding="utf-8")) + except json.JSONDecodeError as error: + yield FailedReadExtra( + code="SYNTAX-ERROR", + error=f"App {app_external_id!r} has an invalid manifest.json at {manifest_json.as_posix()}: {error}", + source_path=manifest_json, + ) + return + manifest_file = manifest_json + + # Files already inside source_dir are captured by the recursive walk; only add those outside it. + extra_root_files = [ + f for f in [package_json, package_lock, manifest_file] if f is not None and not f.is_relative_to(source_dir) + ] + + source_hash = calculate_directory_hash(source_dir) + zip_bytes = _zip_app_directory(source_dir, extra_root_files) + yield SuccessExtra( + source_path=source_dir, + source_hash=source_hash, + suffix=".zip", + byte_content=zip_bytes, + description="app bundle", + ) + + def load_resource_file( + self, filepath: Path, environment_variables: dict[str, str | None] | None = None + ) -> list[dict[str, Any]]: + if filepath.parent.name != self.folder_name: + return [] + + raw_list = super().load_resource_file(filepath, environment_variables) + for item in raw_list: + app_external_id = item.get("externalId") or item.get("external_id") + if not app_external_id: + raise ToolkitRequiredValueError("App YAML must define externalId.") + version = item.get("version") + if not version: + raise ToolkitRequiredValueError("App YAML must define version.") + filestem = filepath.stem.rsplit(".", 1)[0] + version_id = AppVersionId(app_external_id=app_external_id, version=version) + self.zip_path_by_version_id[version_id] = filepath.parent / f"{filestem}.zip" + + return raw_list + + def load_resource(self, resource: dict[str, Any], is_dry_run: bool = False) -> AppVersionRequest: + return AppVersionRequest.model_validate(resource) + + def dump_resource(self, resource: AppVersionResponse, local: dict[str, Any] | None = None) -> dict[str, Any]: + local = local or {} + dumped: dict[str, Any] = { + "externalId": resource.app_external_id, + "version": resource.version, + "lifecycleState": resource.lifecycle_state, + "entrypoint": resource.entrypoint, + } + if resource.alias is not None: + dumped["alias"] = resource.alias + # name and description are app-level and immutable post-create; always use local values to suppress stale diff. + for immutable_key in ("name", "description"): + if immutable_key in local: + dumped[immutable_key] = local[immutable_key] + for local_only_key in ("sourcePath", "source_path"): + if local_only_key in local: + dumped[local_only_key] = local[local_only_key] + return dumped + + def _deploy(self, item: AppVersionRequest) -> AppVersionResponse: + version_id = item.as_id() + zip_path = self.zip_path_by_version_id.get(version_id) + if zip_path is None or not zip_path.exists(): + raise ToolkitRequiredValueError( + f"App zip not found for {item.external_id!r} version {item.version!r}. Ensure build was run first." + ) + try: + self.client.tool.apps.create( + [AppRequest(external_id=item.external_id, name=item.name, description=item.description)] + ) + except ToolkitAPIError as error: + if error.code != 409: + raise + zip_bytes = zip_path.read_bytes() + self.client.tool.app_versions.upload( + external_id=item.external_id, + version=item.version, + entrypoint=item.entrypoint, + zip_bytes=zip_bytes, + ) + + retrieved = self.client.tool.app_versions.retrieve( + [AppVersionId(app_external_id=item.external_id, version=item.version)], ignore_unknown_ids=True + ) + current = retrieved[0] if retrieved else None + current_lifecycle = current.lifecycle_state if current else "DRAFT" + current_alias = current.alias if current else None + + update: dict = {} + + if item.lifecycle_state != current_lifecycle: + current_idx = _LIFECYCLE_ORDER.index(current_lifecycle) if current_lifecycle in _LIFECYCLE_ORDER else 0 + target_idx = _LIFECYCLE_ORDER.index(item.lifecycle_state) if item.lifecycle_state in _LIFECYCLE_ORDER else 0 + if target_idx < current_idx: + raise ToolkitValueError( + f"Cannot transition app {item.external_id!r} version {item.version!r} " + f"from {current_lifecycle!r} to {item.lifecycle_state!r}: lifecycle transitions are forward-only." + ) + update["lifecycleState"] = {"set": item.lifecycle_state} + + alias_explicitly_set = "alias" in item.model_fields_set + if alias_explicitly_set and item.alias != current_alias: + if item.alias is not None and item.lifecycle_state not in ("PUBLISHED",): + raise ToolkitValueError( + f"Cannot set alias {item.alias!r} on app {item.external_id!r} version {item.version!r}: " + f"aliases are only valid on PUBLISHED versions (current lifecycle: {item.lifecycle_state!r})." + ) + update["alias"] = {"setNull": True} if item.alias is None else {"set": item.alias} + + if update: + self.client.tool.app_versions.update(item.external_id, item.version, update) + + return AppVersionResponse( + app_external_id=item.external_id, + version=item.version, + lifecycle_state=item.lifecycle_state, + alias=item.alias, + entrypoint=item.entrypoint, + ) + + def create(self, items: Sequence[AppVersionRequest]) -> list[AppVersionResponse]: + return [self._deploy(item) for item in items] + + def update(self, items: Sequence[AppVersionRequest]) -> list[AppVersionResponse]: + return [self._deploy(item) for item in items] + + def retrieve(self, ids: Sequence[AppVersionId]) -> list[AppVersionResponse]: + results: list[AppVersionResponse] = [] + for version_id in ids: + version_responses = self.client.tool.app_versions.retrieve([version_id], ignore_unknown_ids=True) + if not version_responses: + continue + version_response = version_responses[0] + results.append( + AppVersionResponse( + app_external_id=version_response.app_external_id, + version=version_response.version, + lifecycle_state=version_response.lifecycle_state, + alias=version_response.alias, + entrypoint=version_response.entrypoint, + ) + ) + return results + + def delete(self, ids: Sequence[AppVersionId]) -> int: + if not ids: + return 0 + self.client.tool.app_versions.delete(ids) + return len(ids) + + def _iterate( + self, + data_set_external_id: str | None = None, + space: str | None = None, + parent_ids: Sequence[Hashable] | None = None, + ) -> Iterable[AppVersionResponse]: + for page in self.client.tool.app_versions.iterate(): + yield from page diff --git a/tests/test_integration/test_commands/test_deploy.py b/tests/test_integration/test_commands/test_deploy.py index 6c66cc25d6..31a4acc6d9 100644 --- a/tests/test_integration/test_commands/test_deploy.py +++ b/tests/test_integration/test_commands/test_deploy.py @@ -17,6 +17,7 @@ from cognite_toolkit._cdf_tk.resource_ios import ( CRUDS_BY_FOLDER_NAME, RESOURCE_CRUD_LIST, + AppIO, CogniteFileCRUD, FileMetadataCRUD, FunctionIO, @@ -189,7 +190,7 @@ def get_changed_source_files( # Authentication that causes the diff to fail loader_cls in {HostedExtractorSourceIO, HostedExtractorDestinationIO} # External files that cannot (or not yet supported) be pulled - or loader_cls in {GraphQLCRUD, FunctionIO, StreamlitIO} + or loader_cls in {GraphQLCRUD, FunctionIO, AppIO, StreamlitIO} # Have authentication hashes that is different for each environment or loader_cls in {TransformationIO, FunctionScheduleIO, WorkflowTriggerIO} # LocationFilterLoader needs to split the file into multiple files, so we cannot compare them diff --git a/tests/test_unit/test_cdf_tk/test_client/test_cdf_apis.py b/tests/test_unit/test_cdf_tk/test_client/test_cdf_apis.py index a50cad8183..fb9f46d82c 100644 --- a/tests/test_unit/test_cdf_tk/test_client/test_cdf_apis.py +++ b/tests/test_unit/test_cdf_tk/test_client/test_cdf_apis.py @@ -12,6 +12,8 @@ from cognite_toolkit._cdf_tk.client._resource_base import ResponseResource from cognite_toolkit._cdf_tk.client.api.alert_channels import AlertChannelsAPI from cognite_toolkit._cdf_tk.client.api.annotations import AnnotationsAPI +from cognite_toolkit._cdf_tk.client.api.app_versions import AppVersionsAPI +from cognite_toolkit._cdf_tk.client.api.apps import AppsAPI from cognite_toolkit._cdf_tk.client.api.chart_scheduled_calculations import ChartScheduledCalculationsAPI from cognite_toolkit._cdf_tk.client.api.charts_folders import ChartFoldersAPI from cognite_toolkit._cdf_tk.client.api.charts_monitoring_job import ChartMonitoringJobsAPI @@ -34,10 +36,11 @@ from cognite_toolkit._cdf_tk.client.cdf_client import CDFResourceAPI, PagedResponse from cognite_toolkit._cdf_tk.client.cdf_client.api import APIMethod from cognite_toolkit._cdf_tk.client.http_client import HTTPClient -from cognite_toolkit._cdf_tk.client.identifiers import ExternalId, PrincipalId +from cognite_toolkit._cdf_tk.client.identifiers import AppVersionId, ExternalId, PrincipalId from cognite_toolkit._cdf_tk.client.request_classes.filters import AnnotationFilter from cognite_toolkit._cdf_tk.client.resource_classes.alert_channel import AlertChannelResponse from cognite_toolkit._cdf_tk.client.resource_classes.annotation import AnnotationResponse +from cognite_toolkit._cdf_tk.client.resource_classes.app import AppRequest from cognite_toolkit._cdf_tk.client.resource_classes.chart_folder import ( ChartFolderRequest, ChartFolderResponse, @@ -1226,6 +1229,89 @@ def test_alert_channels_api_list_method( assert len(listed) == 1 assert listed[0].dump() == resource + def test_apps_api_methods(self, toolkit_config: ToolkitClientConfig, respx_mock: respx.MockRouter) -> None: + config = toolkit_config + api = AppsAPI(HTTPClient(config)) + app_external_id = "my-app" + app_request = AppRequest(external_id=app_external_id, name="My App") + + app_json = {"externalId": app_external_id, "name": "My App"} + respx_mock.post(config.create_api_url("/apphosting/apps")).mock( + return_value=httpx.Response(status_code=201, json={"items": [app_json]}) + ) + created = api.create([app_request]) + assert len(created) == 1 + assert created[0].name == "My App" + + # Test retrieve + respx_mock.get(config.create_api_url(f"/apphosting/apps/{app_external_id}")).mock( + return_value=httpx.Response(status_code=200, json=app_json) + ) + result = api.retrieve(app_external_id) + assert result is not None + assert result.name == "My App" + + # Test retrieve with 404 + respx_mock.get(config.create_api_url(f"/apphosting/apps/{app_external_id}")).mock( + return_value=httpx.Response(status_code=404) + ) + assert api.retrieve(app_external_id) is None + + def test_app_versions_api_methods(self, toolkit_config: ToolkitClientConfig, respx_mock: respx.MockRouter) -> None: + config = toolkit_config + api = AppVersionsAPI(HTTPClient(config)) + app_external_id = "my-app" + version = "1.0.0" + version_json = { + "appExternalId": app_external_id, + "version": version, + "lifecycleState": "DRAFT", + "entrypoint": "index.html", + } + # Test upload + respx_mock.post(config.create_api_url(f"/apphosting/apps/{app_external_id}/versions")).mock( + return_value=httpx.Response(status_code=201) + ) + api.upload(app_external_id, version, "index.html", b"fake-zip") + + # Test update + respx_mock.post(config.create_api_url(f"/apphosting/apps/{app_external_id}/versions/update")).mock( + return_value=httpx.Response(status_code=200, json={"items": [version_json]}) + ) + api.update(app_external_id, version, {"lifecycleState": {"set": "PUBLISHED"}}) + + # Test retrieve + respx_mock.get(config.create_api_url(f"/apphosting/apps/{app_external_id}/versions/{version}")).mock( + return_value=httpx.Response(status_code=200, json=version_json) + ) + version_id = AppVersionId(app_external_id=app_external_id, version=version) + retrieved = api.retrieve([version_id]) + assert len(retrieved) == 1 + assert retrieved[0].app_external_id == app_external_id + assert retrieved[0].version == version + assert retrieved[0].lifecycle_state == "DRAFT" + + # Test retrieve with 404 and ignore_unknown_ids + respx_mock.get(config.create_api_url(f"/apphosting/apps/{app_external_id}/versions/{version}")).mock( + return_value=httpx.Response(status_code=404) + ) + assert api.retrieve([version_id], ignore_unknown_ids=True) == [] + + # Test iterate + respx_mock.post(config.create_api_url("/apphosting/versions/list")).mock( + return_value=httpx.Response(status_code=200, json={"items": [version_json]}) + ) + batches = list(api.iterate(limit=10)) + assert len(batches) == 1 + assert batches[0][0].version == version + + # Test delete + respx_mock.post(config.create_api_url(f"/apphosting/apps/{app_external_id}/versions/delete")).mock( + return_value=httpx.Response(status_code=200) + ) + api.delete([AppVersionId(app_external_id=app_external_id, version=version)]) + assert len(respx_mock.calls) >= 1 + def test_task_move_type_to_field_handles_none_validation_data() -> None: """Pydantic may supply ValidationInfo.data as None; avoid 'in' on None (deploy dry-run).""" diff --git a/tests/test_unit/test_cdf_tk/test_cruds/test_app.py b/tests/test_unit/test_cdf_tk/test_cruds/test_app.py new file mode 100644 index 0000000000..f3b951f2ab --- /dev/null +++ b/tests/test_unit/test_cdf_tk/test_cruds/test_app.py @@ -0,0 +1,538 @@ +import io +import zipfile +from pathlib import Path + +import pytest + +from cognite_toolkit._cdf_tk.client.identifiers import AppVersionId +from cognite_toolkit._cdf_tk.client.resource_classes.app import AppRequest +from cognite_toolkit._cdf_tk.client.resource_classes.app_version import AppVersionRequest, AppVersionResponse +from cognite_toolkit._cdf_tk.client.testing import monkeypatch_toolkit_client +from cognite_toolkit._cdf_tk.exceptions import ToolkitRequiredValueError, ToolkitValueError +from cognite_toolkit._cdf_tk.resource_ios._base_ios import FailedReadExtra +from cognite_toolkit._cdf_tk.resource_ios._resource_ios.app import AppIO + + +def _make_app_request( + external_id: str = "my-app", + version: str = "1.0.0", + name: str = "My App", + lifecycle_state: str = "PUBLISHED", + alias: str | None = None, + entrypoint: str = "index.html", +) -> AppVersionRequest: + return AppVersionRequest( + external_id=external_id, + version=version, + name=name, + lifecycle_state=lifecycle_state, + alias=alias, + entrypoint=entrypoint, + ) + + +def _make_app_response( + app_external_id: str = "my-app", + version: str = "1.0.0", + lifecycle_state: str = "PUBLISHED", + alias: str | None = "ACTIVE", +) -> AppVersionResponse: + return AppVersionResponse( + app_external_id=app_external_id, + version=version, + lifecycle_state=lifecycle_state, + alias=alias, + ) + + +def _write_zip(path: Path, filenames: list[str] | None = None) -> None: + if filenames is None: + filenames = ["index.html"] + with zipfile.ZipFile(path, "w") as zf: + for filename in filenames: + zf.writestr(filename, b"content") + + +class TestAppIODeploy: + @pytest.fixture + def app_io_with_zip(self, tmp_path: Path): + with monkeypatch_toolkit_client() as client: + loader = AppIO.create_loader(client, tmp_path) + zip_path = tmp_path / "1-my-app-my-app.zip" + _write_zip(zip_path) + version_id = AppVersionId(app_external_id="my-app", version="1.0.0") + loader.zip_path_by_version_id[version_id] = zip_path + yield loader, client + + def test_create_calls_create_and_upload(self, app_io_with_zip): + loader, client = app_io_with_zip + item = _make_app_request(lifecycle_state="DRAFT", alias=None) + client.tool.app_versions.retrieve.return_value = [] + + loader.create([item]) + + client.tool.apps.create.assert_called_once_with([AppRequest(external_id="my-app", name="My App")]) + client.tool.app_versions.upload.assert_called_once_with( + external_id="my-app", + version="1.0.0", + entrypoint="index.html", + zip_bytes=loader.zip_path_by_version_id[ + AppVersionId(app_external_id="my-app", version="1.0.0") + ].read_bytes(), + ) + + def test_deploy_promotes_draft_to_published_with_active_alias(self, app_io_with_zip): + loader, client = app_io_with_zip + item = _make_app_request(lifecycle_state="PUBLISHED", alias="ACTIVE") + client.tool.app_versions.retrieve.return_value = [] + + loader.create([item]) + + client.tool.app_versions.update.assert_called_once_with( + "my-app", "1.0.0", {"lifecycleState": {"set": "PUBLISHED"}, "alias": {"set": "ACTIVE"}} + ) + + def test_deploy_clears_alias_when_local_alias_is_none(self, app_io_with_zip): + loader, client = app_io_with_zip + item = _make_app_request(lifecycle_state="PUBLISHED", alias=None) + client.tool.app_versions.retrieve.return_value = [_make_app_response(lifecycle_state="PUBLISHED", alias="ACTIVE")] + + loader.create([item]) + + client.tool.app_versions.update.assert_called_once_with("my-app", "1.0.0", {"alias": {"setNull": True}}) + + def test_deploy_swaps_alias_to_preview(self, app_io_with_zip): + loader, client = app_io_with_zip + item = _make_app_request(lifecycle_state="PUBLISHED", alias="PREVIEW") + client.tool.app_versions.retrieve.return_value = [_make_app_response(lifecycle_state="PUBLISHED", alias="ACTIVE")] + + loader.create([item]) + + client.tool.app_versions.update.assert_called_once_with("my-app", "1.0.0", {"alias": {"set": "PREVIEW"}}) + + def test_deploy_noop_when_lifecycle_and_alias_match(self, app_io_with_zip): + loader, client = app_io_with_zip + item = _make_app_request(lifecycle_state="PUBLISHED", alias="ACTIVE") + client.tool.app_versions.retrieve.return_value = [_make_app_response(lifecycle_state="PUBLISHED", alias="ACTIVE")] + + loader.create([item]) + + client.tool.app_versions.update.assert_not_called() + + def test_deploy_rejects_backward_lifecycle_transition(self, app_io_with_zip): + loader, client = app_io_with_zip + item = _make_app_request(lifecycle_state="DRAFT", alias=None) + client.tool.app_versions.retrieve.return_value = [_make_app_response(lifecycle_state="PUBLISHED", alias=None)] + + with pytest.raises(ToolkitValueError, match="forward-only"): + loader.create([item]) + + def test_deploy_rejects_alias_on_non_published_version(self, app_io_with_zip): + loader, client = app_io_with_zip + item = _make_app_request(lifecycle_state="DRAFT", alias="ACTIVE") + client.tool.app_versions.retrieve.return_value = [] + + with pytest.raises(ToolkitValueError, match="alias"): + loader.create([item]) + + def test_deploy_raises_when_zip_missing(self, tmp_path: Path): + with monkeypatch_toolkit_client() as client: + loader = AppIO.create_loader(client, tmp_path) + item = _make_app_request(external_id="missing-app") + with pytest.raises(ToolkitRequiredValueError, match="missing-app"): + loader.create([item]) + + def test_deploy_returns_response_with_correct_fields(self, app_io_with_zip): + loader, _client = app_io_with_zip + item = _make_app_request(lifecycle_state="PUBLISHED", alias="ACTIVE") + _client.tool.app_versions.retrieve.return_value = [] + + results = loader.create([item]) + + assert len(results) == 1 + response = results[0] + assert isinstance(response, AppVersionResponse) + assert response.app_external_id == "my-app" + assert response.version == "1.0.0" + assert response.lifecycle_state == "PUBLISHED" + assert response.alias == "ACTIVE" + + def test_update_calls_create_and_upload(self, app_io_with_zip): + loader, client = app_io_with_zip + item = _make_app_request(version="2.0.0", lifecycle_state="DRAFT", alias=None) + # Register zip for 2.0.0 + zip_path = loader.zip_path_by_version_id[AppVersionId(app_external_id="my-app", version="1.0.0")] + loader.zip_path_by_version_id[AppVersionId(app_external_id="my-app", version="2.0.0")] = zip_path + client.tool.app_versions.retrieve.return_value = [] + + loader.update([item]) + + client.tool.apps.create.assert_called_once_with([AppRequest(external_id="my-app", name="My App")]) + client.tool.app_versions.upload.assert_called_once() + + def test_delete_calls_delete_version_grouped_by_app(self, tmp_path: Path): + with monkeypatch_toolkit_client() as client: + loader = AppIO.create_loader(client, tmp_path) + ids = [ + AppVersionId(app_external_id="my-app", version="1.0.0"), + AppVersionId(app_external_id="my-app", version="2.0.0"), + ] + loader.delete(ids) + + client.tool.app_versions.delete.assert_called_once_with(ids) + + +class TestAppIOGetId: + @pytest.mark.parametrize("ext_key", ["externalId", "appExternalId", "external_id", "app_external_id"]) + def test_from_dict_all_key_variants(self, ext_key: str): + assert AppIO.get_id({ext_key: "my-app", "version": "1.0.0"}) == AppVersionId( + app_external_id="my-app", version="1.0.0" + ) + + @pytest.mark.parametrize( + "item, match", + [ + ({"version": "1.0.0"}, "externalId"), + ({"externalId": "my-app"}, "version"), + ], + ) + def test_from_dict_raises_when_field_missing(self, item: dict, match: str): + with pytest.raises(ToolkitRequiredValueError, match=match): + AppIO.get_id(item) + + @pytest.mark.parametrize( + "item", + [ + AppVersionRequest(external_id="my-app", version="1.0.0", name="My App"), + AppVersionResponse(app_external_id="my-app", version="1.0.0", lifecycle_state="DRAFT"), + ], + ) + def test_from_resource_object(self, item: AppVersionRequest | AppVersionResponse): + assert AppIO.get_id(item) == AppVersionId(app_external_id="my-app", version="1.0.0") + + +class TestAppIOLoadResourceFile: + def test_registers_zip_path_for_valid_yaml(self, tmp_path: Path): + apps_dir = tmp_path / "apps" + apps_dir.mkdir() + yaml_file = apps_dir / "my-app.App.yaml" + yaml_file.write_text("externalId: my-app\nversion: 1.0.0\nname: My App\n") + + with monkeypatch_toolkit_client() as client: + loader = AppIO.create_loader(client, tmp_path) + result = loader.load_resource_file(yaml_file) + + assert result == [{"externalId": "my-app", "version": "1.0.0", "name": "My App"}] + version_id = AppVersionId(app_external_id="my-app", version="1.0.0") + assert version_id in loader.zip_path_by_version_id + assert loader.zip_path_by_version_id[version_id] == apps_dir / "my-app.zip" + + def test_returns_empty_when_parent_not_apps(self, tmp_path: Path): + other_dir = tmp_path / "other" + other_dir.mkdir() + yaml_file = other_dir / "my-app.App.yaml" + yaml_file.write_text("externalId: my-app\nversion: 1.0.0\nname: My App\n") + + with monkeypatch_toolkit_client() as client: + loader = AppIO.create_loader(client, tmp_path) + result = loader.load_resource_file(yaml_file) + + assert result == [] + + +class TestAppIORetrieveAndIterate: + def test_retrieve_returns_matching_responses(self, tmp_path: Path): + with monkeypatch_toolkit_client() as client: + loader = AppIO.create_loader(client, tmp_path) + version_response = _make_app_response(app_external_id="my-app", version="1.0.0") + client.tool.app_versions.retrieve.return_value = [version_response] + ids = [AppVersionId(app_external_id="my-app", version="1.0.0")] + + result = loader.retrieve(ids) + + assert len(result) == 1 + assert result[0].app_external_id == "my-app" + + def test_retrieve_skips_not_found(self, tmp_path: Path): + with monkeypatch_toolkit_client() as client: + loader = AppIO.create_loader(client, tmp_path) + client.tool.app_versions.retrieve.return_value = [] + ids = [AppVersionId(app_external_id="missing", version="1.0.0")] + + result = loader.retrieve(ids) + + assert result == [] + + def test_iterate_yields_all_pages(self, tmp_path: Path): + with monkeypatch_toolkit_client() as client: + loader = AppIO.create_loader(client, tmp_path) + page = [_make_app_response()] + client.tool.app_versions.iterate.return_value = iter([page]) + + result = list(loader._iterate()) + + assert result == page + + def test_delete_empty_list_returns_zero(self, tmp_path: Path): + with monkeypatch_toolkit_client() as client: + loader = AppIO.create_loader(client, tmp_path) + result = loader.delete([]) + + assert result == 0 + client.tool.app_versions.delete.assert_not_called() + + +class TestAppIODumpResource: + def test_uses_local_name_and_description_when_immutable_drift(self): + with monkeypatch_toolkit_client() as client: + loader = AppIO.create_loader(client, None) + + response = AppVersionResponse( + app_external_id="my-app", + version="1.0.0", + lifecycle_state="PUBLISHED", + alias="ACTIVE", + ) + local = {"name": "New local name", "description": "New description"} + + dumped = loader.dump_resource(response, local=local) + + assert dumped["externalId"] == "my-app" + assert dumped["name"] == "New local name" + assert dumped["description"] == "New description" + + def test_copies_source_path_from_local(self): + with monkeypatch_toolkit_client() as client: + loader = AppIO.create_loader(client, None) + + response = AppVersionResponse( + app_external_id="my-app", + version="1.0.0", + lifecycle_state="PUBLISHED", + alias="ACTIVE", + ) + local = {"sourcePath": "../../../../my-custom-app"} + + dumped = loader.dump_resource(response, local=local) + + assert dumped["sourcePath"] == "../../../../my-custom-app" + + +class TestAppIOGetExtraFiles: + def test_yields_zip_with_dist_contents(self, tmp_path: Path): + app_dir = tmp_path / "my-app" + dist_dir = app_dir / "dist" + dist_dir.mkdir(parents=True) + (dist_dir / "index.html").write_text("") + (dist_dir / "bundle.js").write_text("console.log('hi')") + (app_dir / "package.json").write_text("{}") + (app_dir / "package-lock.json").write_text("{}") + + yaml_file = tmp_path / "my-app.App.yaml" + yaml_file.write_text("") + item = {"externalId": "my-app", "version": "1.0.0", "name": "My App"} + + extras = list(AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="my-app", version="1.0.0"), item)) + + assert len(extras) == 1 + extra = extras[0] + assert extra.suffix == ".zip" + assert extra.byte_content is not None + with zipfile.ZipFile(io.BytesIO(extra.byte_content)) as zf: + names = zf.namelist() + assert any("index.html" in n for n in names) + assert any("bundle.js" in n for n in names) + assert "package.json" in names + assert "package-lock.json" in names + + def test_falls_back_to_root_without_dist(self, tmp_path: Path): + app_dir = tmp_path / "my-app" + app_dir.mkdir() + (app_dir / "index.html").write_text("") + (app_dir / "package.json").write_text("{}") + (app_dir / "package-lock.json").write_text("{}") + + yaml_file = tmp_path / "my-app.App.yaml" + yaml_file.write_text("") + item = {"externalId": "my-app", "version": "1.0.0", "name": "My App"} + + extras = list(AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="my-app", version="1.0.0"), item)) + + assert len(extras) == 1 + assert extras[0].suffix == ".zip" + + def test_fails_when_entrypoint_missing_from_root_and_dist(self, tmp_path: Path): + app_dir = tmp_path / "my-app" + app_dir.mkdir() + # No index.html at root, no dist/, no src/+package.json + + yaml_file = tmp_path / "my-app.App.yaml" + yaml_file.write_text("") + item = {"externalId": "my-app", "version": "1.0.0", "name": "My App"} + + extras = list(AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="my-app", version="1.0.0"), item)) + + assert len(extras) == 1 + assert isinstance(extras[0], FailedReadExtra) + assert "index.html" in extras[0].error + + def test_fails_with_build_hint_when_unbuilt_webapp(self, tmp_path: Path): + app_dir = tmp_path / "my-app" + app_dir.mkdir() + (app_dir / "src").mkdir() + (app_dir / "package.json").write_text("{}") + (app_dir / "index.html").write_text("") # Vite template at root + + yaml_file = tmp_path / "my-app.App.yaml" + yaml_file.write_text("") + item = {"externalId": "my-app", "version": "1.0.0", "name": "My App"} + + extras = list(AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="my-app", version="1.0.0"), item)) + + assert len(extras) == 1 + assert isinstance(extras[0], FailedReadExtra) + assert "npm run build" in extras[0].error + + def test_fails_when_app_dir_missing(self, tmp_path: Path): + yaml_file = tmp_path / "missing-app.App.yaml" + yaml_file.write_text("") + item = {"externalId": "missing-app", "version": "1.0.0", "name": "Missing App"} + + extras = list( + AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="missing-app", version="1.0.0"), item) + ) + + assert len(extras) == 1 + assert isinstance(extras[0], FailedReadExtra) + + def test_uses_source_path_field(self, tmp_path: Path): + external_dir = tmp_path / "my-custom-app" + dist_dir = external_dir / "dist" + dist_dir.mkdir(parents=True) + (dist_dir / "index.html").write_text("") + (external_dir / "package.json").write_text("{}") + (external_dir / "package-lock.json").write_text("{}") + + modules_dir = tmp_path / "modules" / "my_module" / "apps" + modules_dir.mkdir(parents=True) + yaml_file = modules_dir / "my-app.App.yaml" + yaml_file.write_text("") + item = { + "externalId": "my-app", + "version": "1.0.0", + "name": "My App", + "sourcePath": "../../../my-custom-app", + } + + extras = list(AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="my-app", version="1.0.0"), item)) + + assert len(extras) == 1 + assert extras[0].suffix == ".zip" + + def test_fails_when_app_dir_missing_from_source_path(self, tmp_path: Path): + yaml_file = tmp_path / "my-app.App.yaml" + yaml_file.write_text("") + item = {"externalId": "my-app", "version": "1.0.0", "name": "My App", "sourcePath": "does-not-exist"} + + extras = list(AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="my-app", version="1.0.0"), item)) + + assert len(extras) == 1 + assert isinstance(extras[0], FailedReadExtra) + + def test_excludes_node_modules_and_git(self, tmp_path: Path): + app_dir = tmp_path / "my-app" + app_dir.mkdir() + (app_dir / "index.html").write_text("") + (app_dir / "package.json").write_text("{}") + (app_dir / "package-lock.json").write_text("{}") + (app_dir / "node_modules").mkdir() + (app_dir / "node_modules" / "pkg.js").write_text("module") + (app_dir / ".git").mkdir() + (app_dir / ".git" / "config").write_text("[core]") + + yaml_file = tmp_path / "my-app.App.yaml" + yaml_file.write_text("") + item = {"externalId": "my-app", "version": "1.0.0", "name": "My App"} + + extras = list(AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="my-app", version="1.0.0"), item)) + + assert len(extras) == 1 + with zipfile.ZipFile(io.BytesIO(extras[0].byte_content)) as zf: # type: ignore[arg-type] + names = zf.namelist() + assert not any("node_modules" in n for n in names) + assert not any(".git" in n for n in names) + assert any("index.html" in n for n in names) + + def test_fails_when_package_json_missing(self, tmp_path: Path): + app_dir = tmp_path / "my-app" + dist_dir = app_dir / "dist" + dist_dir.mkdir(parents=True) + (dist_dir / "index.html").write_text("") + (app_dir / "package-lock.json").write_text("{}") + + yaml_file = tmp_path / "my-app.App.yaml" + yaml_file.write_text("") + item = {"externalId": "my-app", "version": "1.0.0", "name": "My App"} + + extras = list(AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="my-app", version="1.0.0"), item)) + + assert len(extras) == 1 + assert isinstance(extras[0], FailedReadExtra) + assert "package.json" in extras[0].error + + def test_fails_when_package_lock_missing(self, tmp_path: Path): + app_dir = tmp_path / "my-app" + dist_dir = app_dir / "dist" + dist_dir.mkdir(parents=True) + (dist_dir / "index.html").write_text("") + (app_dir / "package.json").write_text("{}") + + yaml_file = tmp_path / "my-app.App.yaml" + yaml_file.write_text("") + item = {"externalId": "my-app", "version": "1.0.0", "name": "My App"} + + extras = list(AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="my-app", version="1.0.0"), item)) + + assert len(extras) == 1 + assert isinstance(extras[0], FailedReadExtra) + assert "package-lock.json" in extras[0].error + + def test_fails_when_manifest_json_invalid(self, tmp_path: Path): + app_dir = tmp_path / "my-app" + dist_dir = app_dir / "dist" + dist_dir.mkdir(parents=True) + (dist_dir / "index.html").write_text("") + (app_dir / "package.json").write_text("{}") + (app_dir / "package-lock.json").write_text("{}") + (app_dir / "manifest.json").write_text("not valid json{") + + yaml_file = tmp_path / "my-app.App.yaml" + yaml_file.write_text("") + item = {"externalId": "my-app", "version": "1.0.0", "name": "My App"} + + extras = list(AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="my-app", version="1.0.0"), item)) + + assert len(extras) == 1 + assert isinstance(extras[0], FailedReadExtra) + assert "manifest.json" in extras[0].error + + def test_includes_valid_manifest_json_in_zip(self, tmp_path: Path): + app_dir = tmp_path / "my-app" + dist_dir = app_dir / "dist" + dist_dir.mkdir(parents=True) + (dist_dir / "index.html").write_text("") + (app_dir / "package.json").write_text("{}") + (app_dir / "package-lock.json").write_text("{}") + (app_dir / "manifest.json").write_text('{"name": "My App"}') + + yaml_file = tmp_path / "my-app.App.yaml" + yaml_file.write_text("") + item = {"externalId": "my-app", "version": "1.0.0", "name": "My App"} + + extras = list(AppIO.get_extra_files(yaml_file, AppVersionId(app_external_id="my-app", version="1.0.0"), item)) + + assert len(extras) == 1 + assert extras[0].byte_content is not None + with zipfile.ZipFile(io.BytesIO(extras[0].byte_content)) as zf: + names = zf.namelist() + assert "manifest.json" in names diff --git a/tests/test_unit/test_cdf_tk/test_cruds/test_base.py b/tests/test_unit/test_cdf_tk/test_cruds/test_base.py index 77805793a6..02834fff75 100644 --- a/tests/test_unit/test_cdf_tk/test_cruds/test_base.py +++ b/tests/test_unit/test_cdf_tk/test_cruds/test_base.py @@ -21,6 +21,7 @@ from pytest import MonkeyPatch from cognite_toolkit._cdf_tk.cdf_toml import CDFToml +from cognite_toolkit._cdf_tk.client.resource_classes.app_version import AppVersionResponse from cognite_toolkit._cdf_tk.client.resource_classes.cognite_file import CogniteFileResponse from cognite_toolkit._cdf_tk.client.resource_classes.filemetadata import FileMetadataResponse from cognite_toolkit._cdf_tk.client.resource_classes.graphql_data_model import GraphQLDataModelResponse @@ -126,6 +127,7 @@ def test_loader_takes_dict( StreamlitResponse, CogniteFileResponse, FileMetadataResponse, + AppVersionResponse, ]: pytest.skip("Skipped loaders that require secondary files") elif loader.resource_cls in [Edge, Node, Destination]: @@ -170,6 +172,7 @@ def test_loader_takes_list( StreamlitResponse, CogniteFileResponse, FileMetadataResponse, + AppVersionResponse, ]: pytest.skip("Skipped loaders that require secondary files") elif loader.resource_cls in [Edge, Node, Destination]: @@ -228,6 +231,8 @@ def test_resource_types_is_up_to_date() -> None: if not FeatureFlag.is_enabled(Flags.DATA_PRODUCTS): extra.discard("data_products") extra.discard("rulesets") + if not FeatureFlag.is_enabled(Flags.CUSTOM_APPS): + extra.discard("apps") if not FeatureFlag.is_enabled(Flags.SIGNALS): extra.discard("signals") assert not missing, f"Missing {missing=}"