diff --git a/.env.test b/.env.test
index 77f2c83..f02c006 100644
--- a/.env.test
+++ b/.env.test
@@ -7,4 +7,4 @@ PYTEST_ADMIN_PASSWORD=start123
PYTEST_DEFAULT_MASTER_IMAGE=python/base
PYTEST_ASYNC_MAX_RETRIES=5
PYTEST_ASYNC_RETRY_DELAY_MILLIS=500
-PYTEST_HUB_VERSION=0.8.21
+PYTEST_HUB_VERSION=0.8.25
diff --git a/README.md b/README.md
index cd7f567..94f589b 100644
--- a/README.md
+++ b/README.md
@@ -80,7 +80,8 @@ print(my_node.model_dump_json(indent=2))
"registry": null,
"registry_project_id": null,
"registry_project": null,
- "robot_id": "200aab68-a686-407c-a6c1-2dd367ff6031",
+ "robot_id": null,
+ "client_id": "2d3e19b4-6708-4279-b2a7-34ad42638e4b",
"created_at": "2025-05-19T15:43:57.859000Z",
"updated_at": "2025-05-19T15:43:57.859000Z"
}
diff --git a/docs/testing.rst b/docs/testing.rst
index 1bc9b9c..45053a7 100644
--- a/docs/testing.rst
+++ b/docs/testing.rst
@@ -9,7 +9,7 @@ Tests for the FLAME Hub Client are implemented with `pytest `_.
-Furthermore, tests require access to a FLAME Hub instance. There are two way of accomplishing this - either by using
+Furthermore, tests require access to a FLAME Hub instance. There are two ways of accomplishing this - either by using
`testcontainers `_ or by deploying your own instance.
@@ -25,13 +25,13 @@ development, it is highly recommended to set up you own Hub instance instead.
Deploying your own Hub instance
===============================
-Grab the `Docker compose file `_
-from the Hub repository and store it somewhere warm and comfy. For ``core``, ``messenger``, ``analysis-manager``,
-``storage`` and ``ui`` services, remove the ``build`` property and replace it with
-``image: ghcr.io/privateaim/hub:HUB_VERSION``. The latest version of the FLAME Hub Client that is tested with the Hub is
-|hub_version|. Now you can run :console:`docker compose up -d` and, after a few minutes, you will be able to access the
-UI at http://localhost:3000.
+Clone the Hub deployment repository :console:`git clone https://github.com/PrivateAIM/hub-deployment.git` and navigate
+to the ``docker-compose`` directory :console:`cd hub-deployment/docker-compose`. Copy the ``.env.example`` file with
+:console:`cp .env.example .env`. Edit the new ``.env`` file and change the ``HUB_IMAGE_TAG`` variable if you need a
+specific version of the Hub. The latest version of the FLAME Hub Client is tested with the Hub version |hub_version|.
+Now you can run :console:`docker compose up -d` and, after a few minutes, you will be able to access the UI at
+http://localhost:3000.
-In order for ``pytest`` to pick up on the locally deployed instance, run :console:`cp .env.test .env` and modify the
-:file:`.env` file such that ``PYTEST_USE_TESTCONTAINERS=0``. This will skip the creation of all test containers and make
-test setup much faster.
+In order for ``pytest`` to pick up on the locally deployed instance, run :console:`cp .env.test .env` inside the
+``hub-python-client`` directory and modify the :file:`.env` file such that ``PYTEST_USE_TESTCONTAINERS=0``. This will
+skip the creation of all test containers and make test setup much faster.
diff --git a/flame_hub/__init__.py b/flame_hub/__init__.py
index 0d5f3ab..c089e98 100644
--- a/flame_hub/__init__.py
+++ b/flame_hub/__init__.py
@@ -12,6 +12,8 @@
"__version_info__",
]
+import warnings
+
from . import auth, types, models
from ._auth_client import AuthClient
@@ -20,3 +22,7 @@
from ._core_client import CoreClient
from ._storage_client import StorageClient
from ._version import __version__, __version_info__
+
+
+# Show deprecation warnings per default.
+warnings.simplefilter("default", DeprecationWarning)
diff --git a/flame_hub/_auth_client.py b/flame_hub/_auth_client.py
index 25eb787..1b18a40 100644
--- a/flame_hub/_auth_client.py
+++ b/flame_hub/_auth_client.py
@@ -43,22 +43,15 @@ class Realm(CreateRealm):
class CreateUser(BaseModel):
name: str
display_name: str | None
- email: t.Annotated[str | None, IsOptionalField]
+ email: t.Annotated[EmailStr, IsOptionalField] = None
active: bool
name_locked: bool
first_name: str | None
last_name: str | None
-class User(BaseModel):
+class User(CreateUser):
id: uuid.UUID
- name: str
- active: bool
- name_locked: bool
- email: t.Annotated[EmailStr, IsOptionalField] = None
- display_name: str | None
- first_name: str | None
- last_name: str | None
avatar: str | None
cover: str | None
realm_id: uuid.UUID
@@ -237,6 +230,43 @@ class RobotRole(CreateRobotRole):
role_realm: t.Annotated[Realm | None, IsIncludable] = None
+class CreateClient(BaseModel):
+ name: str
+ secret: t.Annotated[str | None, IsOptionalField] = None
+ display_name: str | None
+ description: str | None
+ redirect_uri: str | None
+ active: bool
+ is_confidential: bool
+ secret_hashed: bool
+ grant_types: str | None
+ realm_id: t.Annotated[uuid.UUID, Field(), WrapValidator(uuid_validator)]
+
+
+class Client(CreateClient):
+ id: uuid.UUID
+ built_in: bool
+ secret_encrypted: bool
+ scope: str | None
+ base_url: str | None
+ root_url: str | None
+ created_at: datetime
+ updated_at: datetime
+ realm: t.Annotated[Realm, IsIncludable] = None
+
+
+class UpdateClient(BaseModel):
+ name: str | UNSET_T = UNSET
+ secret: str | None | UNSET_T = UNSET
+ display_name: str | None | UNSET_T = UNSET
+ description: str | None | UNSET_T = UNSET
+ redirect_uri: str | None | UNSET_T = UNSET
+ active: bool | UNSET_T = UNSET
+ is_confidential: bool | UNSET_T = UNSET
+ secret_hashed: bool | UNSET_T = UNSET
+ grant_types: str | None | UNSET_T = UNSET
+
+
class AuthClient(BaseClient):
"""The client which implements all auth endpoints.
@@ -642,3 +672,75 @@ def get_robot_roles(self, **params: te.Unpack[GetKwargs]) -> list[RobotRole]:
def find_robot_roles(self, **params: te.Unpack[FindAllKwargs]) -> list[RobotRole]:
return self._find_all_resources(RobotRole, "robot-roles", include=get_includable_names(RobotRole), **params)
+
+ def create_client(
+ self,
+ name: str,
+ realm_id: Realm | str | uuid.UUID,
+ secret: str = None,
+ display_name: str = None,
+ description: str = None,
+ redirect_uri: str = None,
+ active: bool = True,
+ is_confidential: bool = True,
+ secret_hashed: bool = False,
+ grant_types: str = None,
+ ) -> Client:
+ return self._create_resource(
+ Client,
+ CreateClient(
+ name=name,
+ realm_id=realm_id,
+ secret=secret,
+ display_name=display_name,
+ description=description,
+ redirect_uri=redirect_uri,
+ active=active,
+ is_confidential=is_confidential,
+ secret_hashed=secret_hashed,
+ grant_types=grant_types,
+ ),
+ "clients",
+ )
+
+ def delete_client(self, client_id: Client | uuid.UUID | str):
+ self._delete_resource("clients", client_id)
+
+ def get_client(self, client_id: Client | uuid.UUID | str, **params: te.Unpack[GetKwargs]) -> Client | None:
+ return self._get_single_resource(Client, "clients", client_id, include=get_includable_names(Client), **params)
+
+ def get_clients(self, **params: te.Unpack[GetKwargs]) -> list[Client]:
+ return self._get_all_resources(Client, "clients", include=get_includable_names(Client), **params)
+
+ def find_clients(self, **params: te.Unpack[FindAllKwargs]) -> list[Client]:
+ return self._find_all_resources(Client, "clients", include=get_includable_names(Client), **params)
+
+ def update_client(
+ self,
+ client_id: Client | uuid.UUID | str,
+ name: str | UNSET_T = UNSET,
+ secret: str | None | UNSET_T = UNSET,
+ display_name: str | None | UNSET_T = UNSET,
+ description: str | None | UNSET_T = UNSET,
+ redirect_uri: str | None | UNSET_T = UNSET,
+ active: bool | UNSET_T = UNSET,
+ is_confidential: bool | UNSET_T = UNSET,
+ secret_hashed: bool | UNSET_T = UNSET,
+ grant_types: str | None | UNSET_T = UNSET,
+ ) -> Client:
+ return self._update_resource(
+ Client,
+ UpdateClient(
+ name=name,
+ secret=secret,
+ display_name=display_name,
+ description=description,
+ redirect_uri=redirect_uri,
+ active=active,
+ is_confidential=is_confidential,
+ secret_hashed=secret_hashed,
+ grant_types=grant_types,
+ ),
+ "clients",
+ client_id,
+ )
diff --git a/flame_hub/_auth_flows.py b/flame_hub/_auth_flows.py
index 9364b52..cc11fd4 100644
--- a/flame_hub/_auth_flows.py
+++ b/flame_hub/_auth_flows.py
@@ -1,5 +1,6 @@
import time
import typing as t
+import warnings
import httpx
from pydantic import BaseModel
@@ -62,6 +63,11 @@ def __init__(
self._current_token_expires_at_nanos = 0
self._client = client or httpx.Client(base_url=base_url)
+ warnings.warn(
+ "'RobotAuth' is deprecated and will be removed in a future version. Please use 'ClientAuth' instead.",
+ category=DeprecationWarning,
+ )
+
def auth_flow(self, request) -> t.Iterator[httpx.Request]:
"""Executes the robot authentication flow.
@@ -100,6 +106,83 @@ def auth_flow(self, request) -> t.Iterator[httpx.Request]:
yield request
+class ClientAuth(httpx.Auth):
+ """Client authentication for the FLAME Hub.
+
+ This class implements a client authentication flow which is one possible flow that is recognized by the FLAME Hub.
+ It is derived from the ``httpx`` base class for all authentication flows ``httpx.Auth``. For more information about
+ this base class, click
+ `here `_. Note that
+ ``base_url`` is ignored if you pass your own client via the ``client`` keyword argument. An instance of this class
+ could be used for authentication to access the Hub endpoints via the clients.
+
+ Parameters
+ ----------
+ client_id : :py:class:`str`
+ The ID of the client which is used to execute the authentication flow.
+ client_secret : :py:class:`str`
+ The secret which corresponds to the client with ID ``client_id``.
+ base_url : :py:class:`str`, default=\\ :py:const:`~flame_hub._defaults.DEFAULT_AUTH_BASE_URL`
+ The base URL for the authentication flow.
+ client : :py:class:`httpx.Client`
+ Pass your own client to avoid the instantiation of a client while initializing an instance of this class.
+
+ See Also
+ --------
+ :py:class:`.AuthClient`, :py:class:`.CoreClient`, :py:class:`.StorageClient`
+ """
+
+ def __init__(
+ self,
+ client_id: str,
+ client_secret: str,
+ base_url: str = DEFAULT_AUTH_BASE_URL,
+ client: httpx.Client = None,
+ ):
+ self._client_id = client_id
+ self._client_secret = client_secret
+ self._current_token = None
+ self._current_token_expires_at_nanos = 0
+ self._client = client or httpx.Client(base_url=base_url)
+
+ def auth_flow(self, request) -> t.Iterator[httpx.Request]:
+ """Executes the client authentication flow.
+
+ This method checks if the current access token is not set or expired and, if so, requests a new one from the Hub
+ instance. It then yields the authentication request. Click
+ `here `_ for further
+ information on this method.
+
+ See Also
+ --------
+ :py:class:`.AccessToken`
+ """
+
+ # Check if token is not set or current token is expired.
+ if self._current_token is None or time.monotonic_ns() > self._current_token_expires_at_nanos:
+ request_nanos = time.monotonic_ns()
+
+ r = self._client.post(
+ "token",
+ json={
+ "grant_type": "client_credentials",
+ "client_id": self._client_id,
+ "client_secret": self._client_secret,
+ },
+ )
+
+ if r.status_code != httpx.codes.OK.value:
+ raise new_hub_api_error_from_response(r)
+
+ at = AccessToken(**r.json())
+
+ self._current_token = at
+ self._current_token_expires_at_nanos = request_nanos + secs_to_nanos(at.expires_in)
+
+ request.headers["Authorization"] = f"Bearer {self._current_token.access_token}"
+ yield request
+
+
class PasswordAuth(httpx.Auth):
"""Password authentication for the FLAME Hub.
diff --git a/flame_hub/_base_client.py b/flame_hub/_base_client.py
index cd9ebd8..f9f9763 100644
--- a/flame_hub/_base_client.py
+++ b/flame_hub/_base_client.py
@@ -408,13 +408,15 @@ class BaseClient(object):
def __init__(self, base_url: str, auth: PasswordAuth | RobotAuth = None, **kwargs: te.Unpack[ClientKwargs]):
client = kwargs.get("client", None)
- self._client = client or httpx.Client(auth=auth, base_url=base_url)
+ # Set a read timeout of 20 seconds here because the endpoint for registry projects is slow.
+ self._client = client or httpx.Client(auth=auth, base_url=base_url, timeout=httpx.Timeout(5, read=20))
def _get_all_resources(
self,
resource_type: type[ResourceT],
*path: str,
include: IncludeParams = None,
+ expected_code: int = httpx.codes.OK.value,
**params: te.Unpack[GetKwargs],
) -> list[ResourceT] | tuple[list[ResourceT], ResourceListMeta]:
"""Retrieve all resources of a certain type at the specified path from the FLAME Hub.
@@ -431,13 +433,14 @@ def _get_all_resources(
Default pagination parameters are applied as explained in the return section of :py:meth:`_find_all_resources`.
"""
- return self._find_all_resources(resource_type, *path, include=include, **params)
+ return self._find_all_resources(resource_type, *path, include=include, expected_code=expected_code, **params)
def _find_all_resources(
self,
resource_type: type[ResourceT],
*path: str,
include: IncludeParams = None,
+ expected_code: int = httpx.codes.OK.value,
**params: te.Unpack[FindAllKwargs],
) -> list[ResourceT] | tuple[list[ResourceT], ResourceListMeta]:
"""Find all resources at the specified path on the FLAME Hub that match certain criteria.
@@ -451,7 +454,7 @@ def _find_all_resources(
resource_type : :py:class:`type`\\[:py:type:`~flame_hub._base_client.ResourceT`]
A Pydantic subclass used to validate the response from the FLAME Hub. This should be a model that
validates all attributes a resource can have. In other terms, do not pass one of the models that start with
- *Create* or *Update* since this method performs a ``GET`` request.
+ *Create* or *Update*.
*path : :py:class:`str`
A string or multiple strings that define the endpoint.
fields : :py:type:`~flame_hub.types.FieldParams`, optional
@@ -459,6 +462,8 @@ def _find_all_resources(
include : :py:type:`~flame_hub.types.IncludeParams`, optional
Extend the default resource fields by explicitly list resource names to nest in the response. See the
:doc:`model specifications ` which resources can be included in other resources.
+ expected_code : :py:class:`int`, optional
+ The expected status code of the response from the ``GET`` request. This defaults to ``200``.
**params : :py:obj:`~typing.Unpack` [:py:class:`.FindAllKwargs`]
Further keyword arguments to define filtering, sorting and pagination conditions, adding optional fields
to a response and returning meta information.
@@ -474,7 +479,7 @@ def _find_all_resources(
Raises
------
:py:exc:`.HubAPIError`
- If the status code of the response does not match 200.
+ If the status code of the response does not match `expected_code`.
:py:exc:`~pydantic_core._pydantic_core.ValidationError`
If the resources returned by the Hub instance do not validate with the given ``resource_type``.
@@ -500,7 +505,7 @@ def _find_all_resources(
r = self._client.get("/".join(path), params=request_params)
- if r.status_code != httpx.codes.OK.value:
+ if r.status_code != expected_code:
raise new_hub_api_error_from_response(r)
resource_list = ResourceList[resource_type](**r.json())
@@ -510,7 +515,13 @@ def _find_all_resources(
else:
return resource_list.data
- def _create_resource(self, resource_type: type[ResourceT], resource: BaseModel, *path: str) -> ResourceT:
+ def _create_resource(
+ self,
+ resource_type: type[ResourceT],
+ resource: BaseModel,
+ *path: str,
+ expected_code: int = httpx.codes.CREATED.value,
+ ) -> ResourceT:
"""Create a resource of a certain type at the specified path.
The FLAME Hub responds with the created resource which is then validated with ``resource_type`` and returned by
@@ -521,13 +532,15 @@ def _create_resource(self, resource_type: type[ResourceT], resource: BaseModel,
resource_type : :py:class:`type`\\[:py:type:`~flame_hub._base_client.ResourceT`]
A Pydantic subclass used to validate the response from the FLAME Hub. This should be a model that
validates all attributes a resource can have. In other terms, do not pass one of the models that start with
- *Create* or *Update* since this method performs a ``GET`` request.
+ *Create* or *Update*.
resource : :py:class:`~pydantic.BaseModel`
This has to be the corresponding creation model for ``resource_type``. All creation models follow a naming
convention with a prefixed *Create*. See the :doc:`model specifications ` for a list of all
available models.
*path : :py:class:`str`
Path to the endpoint where the resource should be created.
+ expected_code : :py:class:`int`, optional
+ The expected status code of the response from the ``POST`` request. This defaults to ``201``.
Returns
-------
@@ -537,7 +550,7 @@ def _create_resource(self, resource_type: type[ResourceT], resource: BaseModel,
Raises
------
:py:exc:`.HubAPIError`
- If the status code of the response does not match 201.
+ If the status code of the response does not match ``expected_code``.
:py:exc:`~pydantic_core._pydantic_core.ValidationError`
If the resource returned by the Hub instance does not validate with the given ``resource_type``.
"""
@@ -546,7 +559,7 @@ def _create_resource(self, resource_type: type[ResourceT], resource: BaseModel,
json=resource.model_dump(mode="json"),
)
- if r.status_code != httpx.codes.CREATED.value:
+ if r.status_code != expected_code:
raise new_hub_api_error_from_response(r)
return resource_type(**r.json())
@@ -556,6 +569,7 @@ def _get_single_resource(
resource_type: type[ResourceT],
*path: str | UuidIdentifiable,
include: IncludeParams = None,
+ expected_code: int = httpx.codes.OK.value,
**params: te.Unpack[GetKwargs],
) -> ResourceT | None:
"""Get a single resource of a certain type at the specified path.
@@ -578,6 +592,10 @@ def _get_single_resource(
include : :py:type:`~flame_hub.types.IncludeParams`, optional
Extend the default resource fields by explicitly list resource names to nest in the response. See the
:doc:`model specifications ` which resources can be included in other resources.
+ expected_code : :py:class:`int`, optional
+ The expected status code of the response from the ``GET`` request. This defaults to ``200``.
+ **params : :py:obj:`~typing.Unpack` [:py:class:`.GetKwargs`]
+ Further keyword arguments for adding optional fields to a response and returning meta information.
Returns
-------
@@ -588,7 +606,7 @@ def _get_single_resource(
Raises
------
:py:exc:`.HubAPIError`
- If the status code of the response does not match 200 or 404.
+ If the status code of the response does not match ``expected_code`` or 404.
:py:exc:`~pydantic_core._pydantic_core.ValidationError`
If the resource returned by the Hub instance does not validate with the given ``resource_type``.
@@ -609,7 +627,7 @@ def _get_single_resource(
if r.status_code == httpx.codes.NOT_FOUND.value:
return None
- if r.status_code != httpx.codes.OK.value:
+ if r.status_code != expected_code:
raise new_hub_api_error_from_response(r)
return resource_type(**r.json())
@@ -619,6 +637,7 @@ def _update_resource(
resource_type: type[ResourceT],
resource: BaseModel,
*path: str | UuidIdentifiable,
+ expected_code: int = httpx.codes.ACCEPTED.value,
) -> ResourceT:
"""Update a resource of a certain type at the specified path.
@@ -630,7 +649,7 @@ def _update_resource(
resource_type : :py:class:`type`\\[:py:type:`~flame_hub._base_client.ResourceT`]
A Pydantic subclass used to validate the response from the FLAME Hub. This should be a model that validates
all attributes a resource can have. In other terms, do not pass one of the models that start with *Create*
- or *Update* since this method performs a ``GET`` request.
+ or *Update*.
resource : :py:class:`~pydantic.BaseModel`
This has to be the corresponding update model for ``resource_type``. All update models follow a naming
convention with a prefixed *Update*. See the :doc:`model specifications ` for a list of all
@@ -639,6 +658,8 @@ def _update_resource(
A string or multiple strings that define the endpoint. Since the last component of the path is a UUID of
a specific resource, it is also possible to pass in an :py:class:`~uuid.UUID` object or a model with an
``id`` attribute.
+ expected_code : :py:class:`int`, optional
+ The expected status code of the response from the ``POST`` request. This defaults to ``202``.
Returns
-------
@@ -648,7 +669,7 @@ def _update_resource(
Raises
------
:py:exc:`.HubAPIError`
- If the status code of the response does not match 202.
+ If the status code of the response does not match ``expected_code``.
:py:exc:`~pydantic_core._pydantic_core.ValidationError`
If the resource returned by the Hub instance does not validate with the given ``resource_type``.
"""
@@ -658,12 +679,12 @@ def _update_resource(
json=resource.model_dump(mode="json", exclude_defaults=True),
)
- if r.status_code != httpx.codes.ACCEPTED.value:
+ if r.status_code != expected_code:
raise new_hub_api_error_from_response(r)
return resource_type(**r.json())
- def _delete_resource(self, *path: str | UuidIdentifiable):
+ def _delete_resource(self, *path: str | UuidIdentifiable, expected_code: int = httpx.codes.ACCEPTED.value) -> None:
"""Delete a resource of a certain type at the specified path.
Parameters
@@ -672,13 +693,15 @@ def _delete_resource(self, *path: str | UuidIdentifiable):
A string or multiple strings that define the endpoint. Since the last component of the path is a UUID of
a specific resource, it is also possible to pass in an :py:class:`~uuid.UUID` object or a model with an
``id`` attribute.
+ expected_code : :py:class:`int`, optional
+ The expected status code of the response from the ``DELETE`` request. This defaults to ``202``.
Raises
------
:py:exc:`.HubAPIError`
- If the status code of the response does not match 202.
+ If the status code of the response does not match ``expected_code``.
"""
r = self._client.delete("/".join(convert_path(path)))
- if r.status_code != httpx.codes.ACCEPTED.value:
+ if r.status_code != expected_code:
raise new_hub_api_error_from_response(r)
diff --git a/flame_hub/_core_client.py b/flame_hub/_core_client.py
index 0142ae9..af8cc59 100644
--- a/flame_hub/_core_client.py
+++ b/flame_hub/_core_client.py
@@ -1,3 +1,4 @@
+from enum import Enum
import typing as t
import uuid
from datetime import datetime
@@ -24,7 +25,7 @@
from flame_hub._exceptions import new_hub_api_error_from_response
from flame_hub._defaults import DEFAULT_CORE_BASE_URL
from flame_hub._auth_flows import PasswordAuth, RobotAuth
-from flame_hub._storage_client import BucketFile
+from flame_hub._storage_client import Bucket, BucketFile
RegistryCommand = t.Literal["setup", "cleanup"]
@@ -100,7 +101,8 @@ class Node(CreateNode):
registry: t.Annotated[Registry | None, IsIncludable] = None
registry_project_id: uuid.UUID | None
registry_project: t.Annotated[RegistryProject | None, IsIncludable] = None
- robot_id: uuid.UUID
+ robot_id: uuid.UUID | None
+ client_id: uuid.UUID | None
created_at: datetime
updated_at: datetime
@@ -143,11 +145,16 @@ def ensure_position_none(value: t.Any) -> t.Any:
return value
+ProcessStatus = t.Literal["starting", "started", "stopping", "stopped", "finished", "failed"]
+
+
class MasterImage(BaseModel):
id: uuid.UUID
path: str | None
virtual_path: str
group_virtual_path: str
+ build_status: ProcessStatus | None
+ build_progress: int | None
name: str
command: str | None
command_arguments: t.Annotated[list[MasterImageCommandArgument] | None, BeforeValidator(ensure_position_none)]
@@ -205,19 +212,18 @@ class UpdateProjectNode(BaseModel):
LogLevel = t.Literal["emerg", "alert", "crit", "error", "warn", "notice", "info", "debug"]
+LogChannel = t.Literal["http", "websocket", "background", "system"]
class Log(BaseModel):
- time: str | int
- message: str | None
+ time: str
+ message: str
+ service: str
+ channel: LogChannel
level: LogLevel
labels: dict[str, str | None]
-AnalysisBuildStatus = t.Literal["starting", "started", "stopping", "stopped", "finished", "failed"]
-AnalysisRunStatus = t.Literal["starting", "started", "running", "stopping", "stopped", "finished", "failed"]
-
-
class CreateAnalysis(BaseModel):
description: str | None
name: str | None
@@ -234,14 +240,23 @@ class CreateAnalysis(BaseModel):
class Analysis(CreateAnalysis):
id: uuid.UUID
nodes: int
+ nodes_approved: int
configuration_locked: bool
configuration_entrypoint_valid: bool
configuration_image_valid: bool
configuration_node_aggregator_valid: bool
configuration_node_default_valid: bool
configuration_nodes_valid: bool
- build_status: AnalysisBuildStatus | None
- run_status: AnalysisRunStatus | None
+ build_status: ProcessStatus | None
+ build_nodes_valid: bool
+ build_progress: int | None
+ build_hash: str | None
+ build_os: str | None
+ build_size: int | None
+ distribution_status: ProcessStatus | None
+ distribution_progress: int | None
+ execution_status: ProcessStatus | None
+ execution_progress: int | None
created_at: datetime
updated_at: datetime
registry: t.Annotated[Registry | None, IsIncludable] = None
@@ -266,13 +281,13 @@ class UpdateAnalysis(BaseModel):
AnalysisCommand = t.Literal[
- "spinUp",
- "tearDown",
"buildStart",
- "buildStop",
+ "buildCheck",
"configurationLock",
"configurationUnlock",
- "buildStatus",
+ "distributionStart",
+ "distributionCheck",
+ "storageCheck",
]
@@ -282,13 +297,13 @@ class CreateAnalysisNode(BaseModel):
AnalysisNodeApprovalStatus = t.Literal["rejected", "approved"]
-AnalysisNodeRunStatus = t.Literal["starting", "started", "stopping", "stopped", "running", "finished", "failed"]
class AnalysisNode(CreateAnalysisNode):
id: uuid.UUID
approval_status: AnalysisNodeApprovalStatus | None
- run_status: AnalysisNodeRunStatus | None
+ execution_status: ProcessStatus | None
+ execution_progress: int | None
comment: str | None
artifact_tag: str | None
artifact_digest: str | None
@@ -303,7 +318,8 @@ class AnalysisNode(CreateAnalysisNode):
class UpdateAnalysisNode(BaseModel):
comment: str | None | UNSET_T = UNSET
approval_status: AnalysisNodeApprovalStatus | None | UNSET_T = UNSET
- run_status: AnalysisNodeRunStatus | None | UNSET_T = UNSET
+ execution_status: ProcessStatus | None | UNSET_T = UNSET
+ execution_progress: int | None | UNSET_T = UNSET
class CreateAnalysisNodeLog(BaseModel):
@@ -315,24 +331,31 @@ class CreateAnalysisNodeLog(BaseModel):
level: LogLevel
-AnalysisBucketType = t.Literal["CODE", "RESULT", "TEMP"]
+class AnalysisBucketType(str, Enum):
+ CODE = "CODE"
+ RESULT = "RESULT"
+ TEMP = "TEMP"
-class AnalysisBucket(BaseModel):
- id: uuid.UUID
+class CreateAnalysisBucket(BaseModel):
type: AnalysisBucketType
- external_id: uuid.UUID | None
+ bucket_id: t.Annotated[uuid.UUID, Field(), WrapValidator(uuid_validator)]
+ analysis_id: t.Annotated[uuid.UUID, Field(), WrapValidator(uuid_validator)]
+
+
+class AnalysisBucket(CreateAnalysisBucket):
+ id: uuid.UUID
created_at: datetime
updated_at: datetime
- analysis_id: uuid.UUID
analysis: t.Annotated[Analysis, IsIncludable] = None
realm_id: uuid.UUID
class CreateAnalysisBucketFile(BaseModel):
- name: str
- external_id: t.Annotated[uuid.UUID, Field(), WrapValidator(uuid_validator)]
+ path: str
+ bucket_file_id: t.Annotated[uuid.UUID, Field(), WrapValidator(uuid_validator)]
bucket_id: t.Annotated[uuid.UUID, Field(), WrapValidator(uuid_validator)]
+ analysis_bucket_id: t.Annotated[uuid.UUID, Field(), WrapValidator(uuid_validator)]
root: bool
@@ -340,12 +363,13 @@ class AnalysisBucketFile(CreateAnalysisBucketFile):
id: uuid.UUID
created_at: datetime
updated_at: datetime
+ analysis_bucket: t.Annotated[AnalysisBucket, IsIncludable] = None
realm_id: uuid.UUID
user_id: uuid.UUID | None
robot_id: uuid.UUID | None
+ client_id: uuid.UUID | None
analysis_id: uuid.UUID
analysis: t.Annotated[Analysis, IsIncludable] = None
- bucket: t.Annotated[AnalysisBucket, IsIncludable] = None
class UpdateAnalysisBucketFile(BaseModel):
@@ -620,14 +644,16 @@ def update_analysis_node(
analysis_node_id: AnalysisNode | uuid.UUID | str,
comment: str | None | UNSET_T = UNSET,
approval_status: AnalysisNodeApprovalStatus | None | UNSET_T = UNSET,
- run_status: AnalysisNodeRunStatus | None | UNSET_T = UNSET,
+ execution_status: ProcessStatus | None | UNSET_T = UNSET,
+ execution_progress: int | None | UNSET_T = UNSET,
) -> AnalysisNode:
return self._update_resource(
AnalysisNode,
UpdateAnalysisNode(
comment=comment,
approval_status=approval_status,
- run_status=run_status,
+ execution_status=execution_status,
+ execution_progress=execution_progress,
),
"analysis-nodes",
analysis_node_id,
@@ -658,22 +684,20 @@ def create_analysis_node_log(
message: str,
status: str = None,
code: str = None,
- ) -> None:
- """Note that this method returns :any:`None` since the response does not contain the log resource."""
- # TODO: This method should also use _create_resource() from the base client. Therefore creating analysis node
- # TODO: logs have to return a status code of 201 and the response has to contain the log resource itself.
- resource = CreateAnalysisNodeLog(
- analysis_id=analysis_id,
- node_id=node_id,
- code=code,
- status=status,
- message=message,
- level=level,
- )
- r = self._client.post("analysis-node-logs", json=resource.model_dump(mode="json"))
- if r.status_code != httpx.codes.ACCEPTED.value:
- raise new_hub_api_error_from_response(r)
- return None
+ ) -> Log:
+ return self._create_resource(
+ Log,
+ CreateAnalysisNodeLog(
+ analysis_id=analysis_id,
+ node_id=node_id,
+ level=level,
+ message=message,
+ status=status,
+ code=code,
+ ),
+ "analysis-node-logs",
+ expected_code=httpx.codes.ACCEPTED.value,
+ )
def delete_analysis_node_logs(self, analysis_id: Analysis | uuid.UUID | str, node_id: Node | uuid.UUID | str):
r = self._client.delete(
@@ -689,6 +713,25 @@ def delete_analysis_node_logs(self, analysis_id: Analysis | uuid.UUID | str, nod
def find_analysis_node_logs(self, **params: te.Unpack[FindAllKwargs]) -> list[Log]:
return self._find_all_resources(Log, "analysis-node-logs", **params)
+ def create_analysis_bucket(
+ self,
+ bucket_type: AnalysisBucketType,
+ bucket_id: Bucket | uuid.UUID | str,
+ analysis_id: Analysis | uuid.UUID | str,
+ ) -> AnalysisBucket:
+ return self._create_resource(
+ AnalysisBucket,
+ CreateAnalysisBucket(
+ type=bucket_type,
+ bucket_id=bucket_id,
+ analysis_id=analysis_id,
+ ),
+ "analysis-buckets",
+ )
+
+ def delete_analysis_bucket(self, analysis_bucket_id: AnalysisBucket | uuid.UUID | str):
+ self._delete_resource("analysis-buckets", analysis_bucket_id)
+
def get_analysis_buckets(self, **params: te.Unpack[GetKwargs]) -> list[AnalysisBucket]:
return self._get_all_resources(
AnalysisBucket, "analysis-buckets", include=get_includable_names(AnalysisBucket), **params
@@ -731,24 +774,24 @@ def get_analysis_bucket_file(
**params,
)
- def delete_analysis_bucket_file(
- self, analysis_bucket_file_id: AnalysisBucketFile | uuid.UUID | str
- ) -> AnalysisBucketFile | None:
+ def delete_analysis_bucket_file(self, analysis_bucket_file_id: AnalysisBucketFile | uuid.UUID | str):
self._delete_resource("analysis-bucket-files", analysis_bucket_file_id)
def create_analysis_bucket_file(
self,
- name: str,
+ path: str,
bucket_file_id: BucketFile | uuid.UUID | str,
+ bucket_id: Bucket | uuid.UUID | str,
analysis_bucket_id: AnalysisBucket | uuid.UUID | str,
is_entrypoint: bool = False,
) -> AnalysisBucketFile:
return self._create_resource(
AnalysisBucketFile,
CreateAnalysisBucketFile(
- external_id=bucket_file_id,
- bucket_id=analysis_bucket_id,
- name=name,
+ bucket_file_id=bucket_file_id,
+ bucket_id=bucket_id,
+ analysis_bucket_id=analysis_bucket_id,
+ path=path,
root=is_entrypoint,
),
"analysis-bucket-files",
diff --git a/flame_hub/_storage_client.py b/flame_hub/_storage_client.py
index 31e17d9..3bda43e 100644
--- a/flame_hub/_storage_client.py
+++ b/flame_hub/_storage_client.py
@@ -30,8 +30,8 @@ class Bucket(CreateBucket):
id: uuid.UUID
created_at: datetime
updated_at: datetime
- actor_id: uuid.UUID
- actor_type: str
+ actor_id: uuid.UUID | None
+ actor_type: str | None
realm_id: uuid.UUID | None
@@ -46,6 +46,7 @@ class BucketFile(BaseModel):
updated_at: datetime
actor_type: str
actor_id: uuid.UUID
+ realm_id: uuid.UUID
bucket_id: uuid.UUID
bucket: t.Annotated[Bucket, IsIncludable] = None
diff --git a/flame_hub/auth.py b/flame_hub/auth.py
index 54a0e1f..ef49172 100644
--- a/flame_hub/auth.py
+++ b/flame_hub/auth.py
@@ -1,3 +1,3 @@
-__all__ = ["PasswordAuth", "RobotAuth"]
+__all__ = ["PasswordAuth", "RobotAuth", "ClientAuth"]
-from ._auth_flows import PasswordAuth, RobotAuth
+from ._auth_flows import PasswordAuth, RobotAuth, ClientAuth
diff --git a/flame_hub/models.py b/flame_hub/models.py
index 253f811..8df42b6 100644
--- a/flame_hub/models.py
+++ b/flame_hub/models.py
@@ -16,13 +16,12 @@
"UpdateAnalysis",
"AnalysisCommand",
"CreateAnalysisNode",
- "AnalysisNodeApprovalStatus",
- "AnalysisNodeRunStatus",
"AnalysisNode",
"UpdateAnalysisNode",
"CreateAnalysisNodeLog",
"Log",
"AnalysisBucketType",
+ "CreateAnalysisBucket",
"AnalysisBucket",
"CreateAnalysisBucketFile",
"AnalysisBucketFile",
@@ -68,6 +67,9 @@
"ResourceList",
"AccessToken",
"RefreshToken",
+ "CreateClient",
+ "UpdateClient",
+ "Client",
]
from ._core_client import (
@@ -88,13 +90,12 @@
UpdateAnalysis,
AnalysisCommand,
CreateAnalysisNode,
- AnalysisNodeApprovalStatus,
- AnalysisNodeRunStatus,
AnalysisNode,
UpdateAnalysisNode,
CreateAnalysisNodeLog,
Log,
AnalysisBucketType,
+ CreateAnalysisBucket,
AnalysisBucket,
CreateAnalysisBucketFile,
AnalysisBucketFile,
@@ -134,5 +135,8 @@
RobotPermission,
CreateRobotRole,
RobotRole,
+ CreateClient,
+ Client,
+ UpdateClient,
)
from ._auth_flows import AccessToken, RefreshToken
diff --git a/flame_hub/types.py b/flame_hub/types.py
index 752ea4c..a0ab999 100644
--- a/flame_hub/types.py
+++ b/flame_hub/types.py
@@ -12,17 +12,16 @@
"RegistryProjectType",
"MasterImageCommandArgument",
"ProjectNodeApprovalStatus",
- "AnalysisRunStatus",
- "AnalysisBuildStatus",
"AnalysisCommand",
"AnalysisNodeApprovalStatus",
- "AnalysisNodeRunStatus",
"AnalysisBucketType",
"LogLevel",
"UploadFile",
"UuidIdentifiable",
"ResourceT",
"UNSET_T",
+ "ProcessStatus",
+ "LogChannel",
]
from ._base_client import (
@@ -44,12 +43,11 @@
RegistryProjectType,
MasterImageCommandArgument,
ProjectNodeApprovalStatus,
- AnalysisRunStatus,
- AnalysisBuildStatus,
AnalysisCommand,
AnalysisNodeApprovalStatus,
- AnalysisNodeRunStatus,
AnalysisBucketType,
LogLevel,
+ ProcessStatus,
+ LogChannel,
)
from ._storage_client import UploadFile
diff --git a/tests/conftest.py b/tests/conftest.py
index 9d85b63..6ae5f0d 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -299,13 +299,14 @@ def ui(storage, core, authup, network, use_testcontainers, hub_version):
@pytest.fixture(scope="session")
-def nginx(
- ui, core, authup, storage, messenger, network, analysis_manager, use_testcontainers, tmp_path_factory, hub_version
-):
+def nginx(ui, core, authup, storage, messenger, network, analysis_manager, use_testcontainers, tmp_path_factory):
if not use_testcontainers:
yield None
else:
- r = httpx.get(f"https://raw.githubusercontent.com/PrivateAIM/hub/refs/tags/v{hub_version}/nginx.conf")
+ r = httpx.get(
+ "https://raw.githubusercontent.com/PrivateAIM/hub-deployment/a3ef9dedbf857e09695b5f898979caadd3f1f1dc/"
+ "docker-compose/nginx.conf"
+ )
assert r.status_code == 200
nginx_conf_path = tmp_path_factory.mktemp("nginx-") / "nginx.conf"
diff --git a/tests/helpers.py b/tests/helpers.py
index 35971a0..71d3619 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -33,3 +33,12 @@ def next_random_string(charset=string.ascii_letters, length: int = 20):
def next_uuid():
return str(uuid.uuid4())
+
+
+def next_random_number(lower: int | float = 0, upper: int | float = 1, is_integer: bool = False):
+ if is_integer:
+ if not isinstance(lower, int) or not isinstance(upper, int):
+ raise TypeError("'lower' and 'upper' must be integers when is_integer=True.")
+ return random.randint(lower, upper)
+ else:
+ return random.uniform(lower, upper)
diff --git a/tests/test_auth.py b/tests/test_auth.py
index 3c08e78..27ca384 100644
--- a/tests/test_auth.py
+++ b/tests/test_auth.py
@@ -11,6 +11,7 @@
UserRole,
RobotPermission,
RobotRole,
+ Client,
)
from tests.helpers import next_random_string, next_uuid
@@ -142,6 +143,23 @@ def robot_role_includables():
return get_includable_names(RobotRole)
+@pytest.fixture()
+def client(auth_client, realm):
+ new_client = auth_client.create_client(name=next_random_string(), realm_id=realm)
+ yield new_client
+ auth_client.delete_client(client_id=new_client)
+
+
+@pytest.fixture(scope="session")
+def client_includables():
+ return get_includable_names(Client)
+
+
+@pytest.fixture(scope="session")
+def client_fields():
+ return get_field_names(Client)
+
+
def test_get_realm(auth_client, realm):
assert realm == auth_client.get_realm(realm.id)
@@ -461,3 +479,40 @@ def test_find_robot_roles(auth_client, robot_role, robot_role_includables):
assert [robot_role.id] == [rr.id for rr in robot_roles_find]
assert all(includable in rr.model_fields_set for rr in robot_roles_find for includable in robot_role_includables)
+
+
+def test_get_client(auth_client, client, client_includables, client_fields):
+ client_get = auth_client.get_client(client_id=client, fields=client_fields)
+
+ assert client_get.id == client.id
+ assert all(includable in client_get.model_fields_set for includable in client_includables)
+ assert all(field in client_get.model_fields_set for field in client_fields)
+
+
+def test_get_client_not_found(auth_client):
+ assert auth_client.get_client(client_id=next_uuid()) is None
+
+
+def test_get_clients(auth_client, client, client_includables, client_fields):
+ clients_get = auth_client.get_clients(fields=client_fields)
+
+ assert len(clients_get) > 0
+ assert all(includable in c.model_fields_set for c in clients_get for includable in client_includables)
+ assert all(field in c.model_fields_set for c in clients_get for field in client_fields)
+
+
+def test_find_clients(auth_client, client, client_includables, client_fields):
+ # Use "name" for filtering because there is no filter mechanism for attribute "id".
+ clients_find = auth_client.find_clients(filter={"name": client.name}, fields=client_fields)
+
+ assert [client.id] == [c.id for c in clients_find]
+ assert all(includable in c.model_fields_set for c in clients_find for includable in client_includables)
+ assert all(field in c.model_fields_set for c in clients_find for field in client_fields)
+
+
+def test_update_client(auth_client, client):
+ new_name = next_random_string()
+ new_client = auth_client.update_client(client_id=client, name=new_name)
+
+ assert client != new_client
+ assert new_client.name == new_name
diff --git a/tests/test_core.py b/tests/test_core.py
index 2b689e0..9309480 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -6,7 +6,7 @@
import pytest
from flame_hub import HubAPIError, get_field_names, get_includable_names
-from flame_hub.types import NodeType
+from flame_hub.types import NodeType, ProcessStatus, AnalysisBucketType, LogLevel
from flame_hub.models import (
Registry,
RegistryProject,
@@ -18,7 +18,7 @@
AnalysisBucket,
AnalysisBucketFile,
)
-from tests.helpers import next_random_string, next_uuid, assert_eventually
+from tests.helpers import next_random_string, next_uuid, assert_eventually, next_random_number
pytestmark = pytest.mark.integration
@@ -37,15 +37,15 @@ def sync_master_images(core_client):
def master_image(core_client):
default_master_image = os.getenv("PYTEST_DEFAULT_MASTER_IMAGE", "python/base")
- if len(core_client.find_master_images(filter={"path": default_master_image})) != 1:
+ if len(core_client.find_master_images(filter={"virtual_path": default_master_image})) != 1:
sync_master_images(core_client)
def _check_default_master_image_available():
- assert len(core_client.find_master_images(filter={"path": default_master_image})) == 1
+ assert len(core_client.find_master_images(filter={"virtual_path": default_master_image})) == 1
assert_eventually(_check_default_master_image_available, max_retries=10, delay_millis=1000)
- return core_client.find_master_images(filter={"path": default_master_image})[0]
+ return core_client.find_master_images(filter={"virtual_path": default_master_image})[0]
@pytest.fixture(scope="module")
@@ -101,14 +101,9 @@ def project_node_includables():
@pytest.fixture()
def analysis(core_client, project, master_image):
- args = [
- {"value": next_random_string()},
- {"value": next_random_string(), "position": random.choice(("before", "after"))},
- ]
new_analysis = core_client.create_analysis(
project,
master_image_id=master_image.id,
- image_command_arguments=args,
)
yield new_analysis
core_client.delete_analysis(new_analysis)
@@ -132,24 +127,20 @@ def analysis_node_includables():
@pytest.fixture()
-def analysis_buckets(core_client, analysis):
- def _check_analysis_buckets_present():
- all_analysis_bucket_types = {"CODE", "RESULT", "TEMP"}
-
- # Constrain to buckets created for this analysis.
- analysis_buckets = core_client.find_analysis_buckets(filter={"analysis_id": analysis.id})
- assert len(analysis_buckets) == len(all_analysis_bucket_types)
+def analysis_code_bucket(core_client, storage_client, analysis):
+ bucket = storage_client.create_bucket(name=next_random_string())
+ analysis_bucket = core_client.create_analysis_bucket(
+ bucket_type=AnalysisBucketType.CODE,
+ analysis_id=analysis,
+ bucket_id=bucket,
+ )
- # Check that a bucket for each type exists.
- analysis_bucket_types = set(a.type for a in analysis_buckets)
- assert all_analysis_bucket_types == analysis_bucket_types
+ assert analysis_bucket.type == AnalysisBucketType.CODE
- assert_eventually(_check_analysis_buckets_present)
+ yield analysis_bucket
- return {
- analysis_bucket.type: analysis_bucket
- for analysis_bucket in core_client.find_analysis_buckets(filter={"analysis_id": analysis.id})
- }
+ core_client.delete_analysis_bucket(analysis_bucket)
+ storage_client.delete_bucket(bucket)
@pytest.fixture(scope="session")
@@ -158,22 +149,28 @@ def analysis_bucket_includables():
@pytest.fixture()
-def analysis_bucket_file(core_client, storage_client, analysis_buckets, rng_bytes):
+def analysis_bucket_file(core_client, storage_client, analysis_code_bucket, rng_bytes):
# Use the analysis bucket for code files so that the created bucket file can be used as an entrypoint to be able
# to generate analysis logs.
- analysis_bucket = analysis_buckets["CODE"]
# Upload example file to referenced bucket.
+ file_name = next_random_string()
bucket_files = storage_client.upload_to_bucket(
- analysis_bucket.external_id, {"file_name": next_random_string(), "content": rng_bytes}
+ analysis_code_bucket.bucket_id, {"file_name": file_name, "content": rng_bytes}
)
# Link uploaded file to analysis bucket.
+ bucket_file = bucket_files.pop()
new_analysis_bucket_file = core_client.create_analysis_bucket_file(
- next_random_string(), bucket_files.pop(), analysis_bucket, is_entrypoint=True
+ path=file_name,
+ bucket_file_id=bucket_file,
+ analysis_bucket_id=analysis_code_bucket,
+ bucket_id=bucket_file.bucket_id,
+ is_entrypoint=True,
)
yield new_analysis_bucket_file
core_client.delete_analysis_bucket_file(new_analysis_bucket_file)
+ storage_client.delete_bucket_file(bucket_file.id)
@pytest.fixture(scope="session")
@@ -423,11 +420,16 @@ def test_update_analysis(core_client, analysis):
assert new_analysis.image_command_arguments == args # Note that args is modified during updating the analysis.
-def test_create_analysis_without_arguments(core_client, project):
- analysis = core_client.create_analysis(project_id=project.id, image_command_arguments=None)
-
- assert analysis.image_command_arguments == []
-
+def test_create_analysis_with_arguments(core_client, project, master_image):
+ args = [
+ {"value": next_random_string()},
+ {"value": next_random_string(), "position": random.choice(("before", "after"))},
+ ]
+ analysis = core_client.create_analysis(
+ project,
+ master_image_id=master_image.id,
+ image_command_arguments=args,
+ )
core_client.delete_analysis(analysis)
@@ -443,29 +445,40 @@ def test_unlock_analysis(core_client, configured_analysis):
def test_build_analysis(core_client, configured_analysis):
- assert core_client.send_analysis_command(configured_analysis.id, command="buildStart").build_status == "starting"
- assert core_client.send_analysis_command(configured_analysis.id, command="buildStop").build_status == "stopping"
-
-
-def test_build_status_analysis(core_client, configured_analysis):
- core_client.send_analysis_command(configured_analysis.id, command="buildStart")
- core_client.send_analysis_command(configured_analysis.id, command="buildStatus")
+ assert (
+ core_client.send_analysis_command(
+ analysis_id=configured_analysis.id,
+ command="buildStart",
+ ).build_status
+ == "starting"
+ )
- def _check_checking_event_in_logs():
- logs = core_client.find_analysis_logs(filter={"analysis_id": configured_analysis.id})
- assert "configured" in [log.labels.get("event", None) for log in logs]
+ def _wait_for_successful_build():
+ try:
+ analysis = core_client.send_analysis_command(analysis_id=configured_analysis.id, command="buildCheck")
+ except HubAPIError as e:
+ if "The analysis build process has already been successfully completed." in str(e):
+ analysis = core_client.get_analysis(analysis_id=configured_analysis.id)
+ else:
+ raise e
+ assert analysis.build_status == "finished"
+ assert analysis.build_progress == 100
- assert_eventually(_check_checking_event_in_logs)
+ assert_eventually(_wait_for_successful_build)
def test_update_analysis_node(core_client, analysis_node):
+ progress = next_random_number(upper=100, is_integer=True)
+ status = random.choice(t.get_args(ProcessStatus))
new_analysis_node = core_client.update_analysis_node(
analysis_node.id,
- run_status="starting",
+ execution_status=status,
+ execution_progress=progress,
)
assert analysis_node != new_analysis_node
- assert new_analysis_node.run_status == "starting"
+ assert new_analysis_node.execution_status == status
+ assert new_analysis_node.execution_progress == progress
def test_get_analysis_nodes(core_client, analysis_node, analysis_node_includables):
@@ -498,41 +511,53 @@ def test_get_analysis_node_not_found(core_client):
assert core_client.get_analysis_node(next_uuid()) is None
+@pytest.mark.xfail(reason="Deletion of analysis node logs does not work")
def test_analysis_node_logs(core_client, analysis_node):
- core_client.create_analysis_node_log(
- analysis_id=analysis_node.analysis_id, node_id=analysis_node.node_id, level="info", message="test"
+ log = core_client.create_analysis_node_log(
+ analysis_id=analysis_node.analysis_id,
+ node_id=analysis_node.node_id,
+ level=random.choice(t.get_args(LogLevel)),
+ message=next_random_string(),
)
def _check_analysis_node_logs_present():
- assert (
- len(
- core_client.find_analysis_node_logs(
- filter={"analysis_id": analysis_node.analysis_id, "node_id": analysis_node.node_id}
- )
- )
- == 1
+ found_logs = core_client.find_analysis_node_logs(
+ filter={"analysis_id": analysis_node.analysis_id, "node_id": analysis_node.node_id}
)
+ assert len(found_logs) == 1
assert_eventually(_check_analysis_node_logs_present)
- # TODO: Deleting analysis node logs raises am error in the hub.
- # core_client.delete_analysis_node_logs(
- # analysis_id=analysis_node.analysis_id, node_id=analysis_node.node_id
- # )
+ new_log = core_client.find_analysis_node_logs(
+ filter={"analysis_id": analysis_node.analysis_id, "node_id": analysis_node.node_id}
+ )[0]
- # assert len(core_client.find_analysis_node_logs(
- # filter={"analysis_id": analysis_node.analysis_id, "node_id": analysis_node.node_id}
- # )) == 0
+ assert log == new_log
+ core_client.delete_analysis_node_logs(analysis_id=analysis_node.analysis_id, node_id=analysis_node.node_id)
+
+ assert (
+ len(
+ core_client.find_analysis_node_logs(
+ filter={"analysis_id": analysis_node.analysis_id, "node_id": analysis_node.node_id}
+ )
+ )
+ == 0
+ )
-def test_get_analysis_bucket(core_client, analysis_buckets, analysis_bucket_includables):
- analysis_bucket_get = core_client.get_analysis_bucket(analysis_buckets["CODE"].id)
- assert analysis_bucket_get.id == analysis_buckets["CODE"].id
+def test_get_analysis_bucket(core_client, analysis_code_bucket, analysis_bucket_includables):
+ analysis_bucket_get = core_client.get_analysis_bucket(analysis_code_bucket)
+
+ assert analysis_bucket_get.id == analysis_code_bucket.id
assert all(includable in analysis_bucket_get.model_fields_set for includable in analysis_bucket_includables)
-def test_get_analysis_buckets(core_client, analysis_buckets, analysis_bucket_includables):
+def test_get_analysis_bucket_not_found(core_client):
+ assert core_client.get_analysis_bucket(next_uuid()) is None
+
+
+def test_get_analysis_buckets(core_client, analysis_code_bucket, analysis_bucket_includables):
analysis_buckets_get = core_client.get_analysis_buckets()
assert len(analysis_buckets_get) > 0
@@ -541,6 +566,18 @@ def test_get_analysis_buckets(core_client, analysis_buckets, analysis_bucket_inc
)
+def test_find_analysis_buckets(core_client, analysis_code_bucket, analysis_bucket_includables):
+ # Use "analysis_id" instead of "id" because filtering for ids does not work.
+ analysis_buckets_find = core_client.find_analysis_buckets(filter={"analysis_id": analysis_code_bucket.analysis_id})
+
+ assert [analysis_code_bucket.id] == [bucket.id for bucket in analysis_buckets_find]
+ assert all(
+ includable in bucket.model_fields_set
+ for bucket in analysis_buckets_find
+ for includable in analysis_bucket_includables
+ )
+
+
def test_get_analysis_bucket_file(core_client, analysis_bucket_file, analysis_bucket_file_includables):
analysis_bucket_file_get = core_client.get_analysis_bucket_file(analysis_bucket_file.id)
diff --git a/tests/test_flow.py b/tests/test_flow.py
index 4a459b0..0aa9e26 100644
--- a/tests/test_flow.py
+++ b/tests/test_flow.py
@@ -2,7 +2,7 @@
import pytest
from flame_hub import HubAPIError
-from flame_hub.auth import RobotAuth, PasswordAuth
+from flame_hub.auth import RobotAuth, PasswordAuth, ClientAuth
from tests.helpers import next_random_string
pytestmark = pytest.mark.integration
@@ -31,11 +31,15 @@ def test_robot_auth(auth_client, auth_base_url, master_realm):
robot = auth_client.create_robot(next_random_string(), master_realm, robot_secret)
robot_id = str(robot.id)
- robot_auth = RobotAuth(
- robot_id=robot_id,
- robot_secret=robot_secret,
- base_url=auth_base_url,
- )
+ with pytest.warns(
+ DeprecationWarning,
+ match="'RobotAuth' is deprecated and will be removed in a future version. Please use 'ClientAuth' instead.",
+ ):
+ robot_auth = RobotAuth(
+ robot_id=robot_id,
+ robot_secret=robot_secret,
+ base_url=auth_base_url,
+ )
client = httpx.Client(auth=robot_auth)
@@ -46,6 +50,39 @@ def test_robot_auth(auth_client, auth_base_url, master_realm):
auth_client.delete_robot(robot)
+def test_client_auth(auth_client, auth_base_url, master_realm):
+ client_secret = next_random_string(length=64)
+ client_resource = auth_client.create_client(name=next_random_string(), realm_id=master_realm, secret=client_secret)
+ client_id = str(client_resource.id)
+
+ client_auth = ClientAuth(
+ client_id=client_id,
+ client_secret=client_secret,
+ base_url=auth_base_url,
+ )
+
+ client = httpx.Client(auth=client_auth)
+
+ # Check that the auth flow works.
+ r = client.get(auth_base_url)
+ assert r.status_code == httpx.codes.OK.value
+
+ auth_client.delete_client(client_resource)
+
+
+def test_client_auth_raise_error(nginx, auth_base_url):
+ # Use random client_id and client_secret.
+ client_auth = ClientAuth(client_id=next_random_string(), client_secret=next_random_string(), base_url=auth_base_url)
+ client = httpx.Client(auth=client_auth)
+
+ # This call should fail.
+ with pytest.raises(HubAPIError) as e:
+ client.get(auth_base_url)
+
+ assert "The client credentials are invalid" in str(e.value)
+ assert e.value.error_response.status_code == httpx.codes.BAD_REQUEST.value
+
+
def test_password_auth_raise_error(nginx, auth_base_url):
# use random username and password
pw_auth = PasswordAuth(next_random_string(), next_random_string(), auth_base_url)
@@ -60,8 +97,12 @@ def test_password_auth_raise_error(nginx, auth_base_url):
def test_robot_auth_raise_error(nginx, auth_base_url):
- # use random id and secret
- robot_auth = RobotAuth(next_random_string(), next_random_string(), auth_base_url)
+ with pytest.warns(
+ DeprecationWarning,
+ match="'RobotAuth' is deprecated and will be removed in a future version. Please use 'ClientAuth' instead.",
+ ):
+ # use random id and secret
+ robot_auth = RobotAuth(next_random_string(), next_random_string(), auth_base_url)
client = httpx.Client(auth=robot_auth)
# this call should fail