diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 6966298..bd96342 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -32,7 +32,7 @@ jobs:
java-version: 17
distribution: "zulu"
- name: Install the project
- run: uv sync --locked --extra dev_local
+ run: uv sync --locked --extra dev
- name: Run code checks
run: uv run ruff check
- name: Check code formatting
@@ -65,6 +65,10 @@ jobs:
version: 0.260.0
- name: Install the project
run: uv sync --locked --extra dev
+ - name: Install Databricks Connect
+ run: |
+ uv pip uninstall pyspark
+ uv pip install databricks-connect==16.3.5
- name: Check Databricks CLI
run: databricks current-user me
- name: Run tests
diff --git a/README.md b/README.md
index 9ec955a..352ad5e 100644
--- a/README.md
+++ b/README.md
@@ -2,20 +2,18 @@
This project is an example implementation of a [Databricks Asset Bundle](https://docs.databricks.com/aws/en/dev-tools/bundles/) using a [Databricks Free Edition](https://www.databricks.com/learn/free-edition) workspace.
-The project ist configured using `pyproject.toml` (Python specifics) and `databricks.yaml` (Databricks Bundle specifics) and uses [uv](https://docs.astral.sh/uv/) to manage the Python project and dependencies.
+The project is configured using `pyproject.toml` (Python specifics) and `databricks.yaml` (Databricks Bundle specifics) and uses [uv](https://docs.astral.sh/uv/) to manage the Python project and dependencies.
-## Repo Overview
+## Repository Structure
-* `.github/workflows`: CI/CD jobs to test and dpeloy bundle
-* `dab_project`: Python project (Used in Databricks Workflow as Python-Wheel-Task)
-* `dbt`: [dbt](https://github.com/dbt-labs/dbt-core) project (Used in Databricks Workflow as dbt-Task)
- * dbt-Models used from https://github.com/dbt-labs/jaffle_shop_duckdb
-* `resources`: Resources such as Databricks Workflows or Databricks Volumes/Schemas
- * Python-based workflow: https://docs.databricks.com/aws/en/dev-tools/bundles/python
- * YAML-based Workflow: https://docs.databricks.com/aws/en/dev-tools/bundles/resources#job
-* `scripts`: Python script to setup groups, service principals and catalogs used in a Databricks (Free Edition) workspace
-* `tests`: Unit-tests running on Databricks (via Connect) or locally
- * Used in [ci.yml](.github/workflows/ci.yml) jobs
+| Directory | Description |
+|-----------|-------------|
+| `.github/workflows` | CI/CD jobs to test and deploy bundle |
+| `dab_project` | Python project (Used in Databricks Workflow as Python-Wheel-Task) |
+| `dbt` | [dbt](https://github.com/dbt-labs/dbt-core) project
* Used in Databricks Workflow as dbt-Task
* dbt-Models used from https://github.com/dbt-labs/jaffle_shop_duckdb |
+| `resources` | Resources such as Databricks Workflows or Databricks Volumes/Schemas
* Python-based workflow: https://docs.databricks.com/aws/en/dev-tools/bundles/python
* YAML-based Workflow: https://docs.databricks.com/aws/en/dev-tools/bundles/resources#job |
+| `scripts` | Python script to setup groups, service principals and catalogs used in a Databricks (Free Edition) workspace |
+| `tests` | Unit-tests running on Databricks (via Connect) or locally
* Used in [ci.yml](.github/workflows/ci.yml) jobs |
## Databricks Workspace
@@ -52,7 +50,7 @@ Sync entire `uv` environment with dev dependencies:
uv sync --extra dev
```
-> **Note:** `dev` uses Databricks Connect, while `dev_local` uses local Spark
+> **Note:** we install Databricks Connect in a follow-up step
#### (Optional) Activate virtual environment
@@ -66,14 +64,26 @@ Windows:
.venv\Scripts\activate
```
+### Databricks Connect
+
+Install `databricks-connect` in active environment. This requires authentication being set up via Databricks CLI.
+
+```bash
+uv pip uninstall pyspark
+uv pip install databricks-connect==16.3.5
+```
+> **Note:** For Databricks Runtime 16.3
+
+See https://docs.databricks.com/aws/en/dev-tools/vscode-ext/ for using Databricks Connect extension in VS Code.
+
### Unit-Tests
```bash
uv run pytest -v
```
-Based on whether Databricks Connect (the `dev` default) is enabled or not the Unit-Tests try to use a Databricks Cluster or start a local Spark session with Delta support.
-* On Databricks the unit-tests currently assume the catalog `unit_tests` exists (not ideal).
+Based on whether Databricks Connect is enabled or not the Unit-Tests try to use a Databricks Cluster or start a local Spark session with Delta support.
+* On Databricks the unit-tests currently assume the catalog `lake_dev` exists.
> **Note:** For local Spark Java is required. On Windows Spark/Delta requires HADOOP libraries and generally does not run well, opt for `wsl` instead.
@@ -81,15 +91,11 @@ Based on whether Databricks Connect (the `dev` default) is enabled or not the Un
```bash
# Linting
-ruff check --fix
+uv run ruff check --fix
# Formatting
-ruff format
+uv run ruff format
```
-### Databricks Connect
-
-See https://docs.databricks.com/aws/en/dev-tools/vscode-ext/ for using Databricks Connect extension in VS Code.
-
### Setup Databricks Workspace
The following script sets up a Databricks (Free Edition) Workspace for this project with additional catalogs, groups and service principals. It uses both Databricks-SDK and Databricks Connect (Serverless).
@@ -150,7 +156,7 @@ uv run ./scripts/setup_workspace.py
The `dbt` project is based on https://github.com/dbt-labs/jaffle_shop_duckdb with following changes:
* Schema bronze, silver, gold
- * document materialization `use_materialization_v2`
+ * documented materialization `use_materialization_v2`
* Primary, Foreign Key Constraints
## TODO:
diff --git a/pyproject.toml b/pyproject.toml
index 1d23145..c8221b4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -21,21 +21,7 @@ dependencies = [
[project.optional-dependencies]
dev = [
- # Databricks Runtime (connect includes delta/pyspark)
- "databricks-connect~=16.3.0",
- "pydantic==2.8.2",
- # dbt
- "dbt-databricks~=1.10.0",
- # Tooling
- "databricks-bundles~=0.260.0", # For Python-based Workflows
- "mypy", # Type hints
- "pip", # Databricks extension needs it
- "pytest", # Unit testing
- "ruff", # Linting/Formatting
-]
-# Is this really needed?
-dev_local = [
- # Databricks Runtime (connect includes delta/pyspark)
+ # Runtime
"delta-spark>=3.3.0, <4.0.0",
"pydantic==2.8.2",
"pyspark>=3.5.0, <4.0.0",
diff --git a/resources/constants.py b/resources/constants.py
index bae7bf0..56c73c3 100644
--- a/resources/constants.py
+++ b/resources/constants.py
@@ -21,7 +21,8 @@ class Variables:
DEFAULT_ENVIRONMENT = JobEnvironment(
environment_key="default",
spec=Environment(
- environment_version=Variables.serverless_environment_version, dependencies=["./dist/*.whl"]
+ environment_version=Variables.serverless_environment_version,
+ dependencies=["./dist/dab_project*.whl"],
),
)
diff --git a/tests/conftest.py b/tests/conftest.py
index 77169f6..e44c38e 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,5 +1,6 @@
import shutil
import tempfile
+import uuid
from pathlib import Path
from typing import Generator, Optional
@@ -23,7 +24,7 @@ def spark() -> Generator[SparkSession, None, None]:
yield spark
else:
# If databricks-connect is not installed, we use use local Spark session
- warehouse_dir = tempfile.TemporaryDirectory().name
+ warehouse_dir = tempfile.mkdtemp()
_builder = (
SparkSession.builder.master("local[*]")
.config("spark.hive.metastore.warehouse.dir", Path(warehouse_dir).as_uri())
@@ -46,13 +47,38 @@ def spark() -> Generator[SparkSession, None, None]:
@pytest.fixture(scope="session")
-def catalog_name() -> Generator[Optional[str], None, None]:
+def catalog_name() -> Optional[str]:
"""Fixture to provide the catalog name for tests.
- In Databricks, we use the "unit_tests" catalog.
+ In Databricks, we use the "lake_dev" catalog.
Locally we run without a catalog, so we return None.
"""
if DATABRICKS_CONNECT_AVAILABLE:
- yield "unit_tests"
+ return "lake_dev"
else:
- yield None
+ return None
+
+
+@pytest.fixture(scope="module")
+def create_schema(spark, catalog_name, request) -> Generator[str, None, None]:
+ """Fixture to provide a schema for tests.
+
+ Creates a schema with a random name prefixed with the test module name and cleans it up after tests.
+ """
+ module_name = request.module.__name__.split(".")[-1] # Get just the module name without path
+ schema_name = f"pytest_{module_name}_{uuid.uuid4().hex[:8]}"
+
+ if catalog_name is not None:
+ full_schema_name = f"{catalog_name}.{schema_name}"
+ else:
+ full_schema_name = schema_name
+
+ spark.sql(f"CREATE SCHEMA IF NOT EXISTS {full_schema_name}")
+ yield schema_name
+ spark.sql(f"DROP SCHEMA IF EXISTS {full_schema_name} CASCADE")
+
+
+@pytest.fixture(scope="function")
+def table_name(request) -> str:
+ """Fixture to provide a table name based on the test function name."""
+ return request.node.name
diff --git a/tests/test_base_task.py b/tests/test_base_task.py
index 878ca6c..ea7e190 100644
--- a/tests/test_base_task.py
+++ b/tests/test_base_task.py
@@ -21,18 +21,18 @@ def _perform_task(self, catalog_name: str) -> None:
return Task.create_task_factory("TestTask")
-def test_etl_task_run(spark, catalog_name, request):
+def test_etl_task_run(spark, catalog_name, create_schema, table_name):
task = generate_test_task(
- schema_name=__name__,
- table_name=f"table_{request.node.name}",
+ schema_name=create_schema,
+ table_name=table_name,
)
task.run(catalog_name)
# Verify that the data was written to the Delta table
delta_table = DeltaWorker(
catalog_name=catalog_name,
- schema_name=__name__,
- table_name=f"table_{request.node.name}",
+ schema_name=create_schema,
+ table_name=table_name,
)
assert task.get_class_name() == "TestTask"
diff --git a/tests/test_delta.py b/tests/test_delta.py
index 186842c..1c7edca 100644
--- a/tests/test_delta.py
+++ b/tests/test_delta.py
@@ -13,7 +13,7 @@
# spark.sql(f"DROP SCHEMA IF EXISTS {schema_name} CASCADE")
-def test_deltawriter_create_table_if_not_exists(spark, catalog_name, request):
+def test_deltawriter_create_table_if_not_exists(spark, catalog_name, create_schema, table_name):
schema = T.StructType(
[
T.StructField("key", T.IntegerType()),
@@ -22,8 +22,8 @@ def test_deltawriter_create_table_if_not_exists(spark, catalog_name, request):
)
delta_writer = DeltaWorker(
catalog_name=catalog_name,
- schema_name=__name__,
- table_name=f"table_{request.node.name}",
+ schema_name=create_schema,
+ table_name=table_name,
)
delta_writer.drop_table_if_exists()
diff --git a/uv.lock b/uv.lock
index 26b4fc3..0bb1450 100644
--- a/uv.lock
+++ b/uv.lock
@@ -189,16 +189,6 @@ dependencies = [
[package.optional-dependencies]
dev = [
- { name = "databricks-bundles" },
- { name = "databricks-connect" },
- { name = "dbt-databricks" },
- { name = "mypy" },
- { name = "pip" },
- { name = "pydantic" },
- { name = "pytest" },
- { name = "ruff" },
-]
-dev-local = [
{ name = "databricks-bundles" },
{ name = "dbt-databricks" },
{ name = "delta-spark" },
@@ -213,25 +203,17 @@ dev-local = [
[package.metadata]
requires-dist = [
{ name = "databricks-bundles", marker = "extra == 'dev'", specifier = "~=0.260.0" },
- { name = "databricks-bundles", marker = "extra == 'dev-local'", specifier = "~=0.260.0" },
- { name = "databricks-connect", marker = "extra == 'dev'", specifier = "~=16.3.0" },
{ name = "databricks-sdk", specifier = ">=0.41,<0.48.0" },
{ name = "dbt-databricks", marker = "extra == 'dev'", specifier = "~=1.10.0" },
- { name = "dbt-databricks", marker = "extra == 'dev-local'", specifier = "~=1.10.0" },
- { name = "delta-spark", marker = "extra == 'dev-local'", specifier = ">=3.3.0,<4.0.0" },
+ { name = "delta-spark", marker = "extra == 'dev'", specifier = ">=3.3.0,<4.0.0" },
{ name = "mypy", marker = "extra == 'dev'" },
- { name = "mypy", marker = "extra == 'dev-local'" },
{ name = "pip", marker = "extra == 'dev'" },
- { name = "pip", marker = "extra == 'dev-local'" },
{ name = "pydantic", marker = "extra == 'dev'", specifier = "==2.8.2" },
- { name = "pydantic", marker = "extra == 'dev-local'", specifier = "==2.8.2" },
- { name = "pyspark", marker = "extra == 'dev-local'", specifier = ">=3.5.0,<4.0.0" },
+ { name = "pyspark", marker = "extra == 'dev'", specifier = ">=3.5.0,<4.0.0" },
{ name = "pytest", marker = "extra == 'dev'" },
- { name = "pytest", marker = "extra == 'dev-local'" },
{ name = "ruff", marker = "extra == 'dev'" },
- { name = "ruff", marker = "extra == 'dev-local'" },
]
-provides-extras = ["dev", "dev-local"]
+provides-extras = ["dev"]
[[package]]
name = "daff"
@@ -251,27 +233,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6d/a5/abb889b97bc786f28c7b5377922a62e8e3b93cb61f119e2ecc8933fa47fa/databricks_bundles-0.260.0-py3-none-any.whl", hash = "sha256:b49eb277f7a52b227e2352970cd6e5f1e67654c33030120e525585e8ba110ff9", size = 132640, upload-time = "2025-07-16T16:00:28.047Z" },
]
-[[package]]
-name = "databricks-connect"
-version = "16.3.5"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "databricks-sdk" },
- { name = "googleapis-common-protos" },
- { name = "grpcio" },
- { name = "grpcio-status" },
- { name = "numpy" },
- { name = "packaging" },
- { name = "pandas" },
- { name = "py4j" },
- { name = "pyarrow" },
- { name = "setuptools" },
- { name = "six" },
-]
-wheels = [
- { url = "https://files.pythonhosted.org/packages/a2/ed/15e6982c7bed052c78b207eea59dda9ae618c5cbf677acc31a44a7338ae3/databricks_connect-16.3.5-py2.py3-none-any.whl", hash = "sha256:9a6794e38346d13807199660b9af58851f4d38b04aeba0d5843a1e1529b0aeea", size = 2425557, upload-time = "2025-07-11T11:37:01.302Z" },
-]
-
[[package]]
name = "databricks-sdk"
version = "0.47.0"
@@ -521,60 +482,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/17/63/b19553b658a1692443c62bd07e5868adaa0ad746a0751ba62c59568cd45b/google_auth-2.40.3-py2.py3-none-any.whl", hash = "sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca", size = 216137, upload-time = "2025-06-04T18:04:55.573Z" },
]
-[[package]]
-name = "googleapis-common-protos"
-version = "1.70.0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "protobuf" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/39/24/33db22342cf4a2ea27c9955e6713140fedd51e8b141b5ce5260897020f1a/googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", size = 145903, upload-time = "2025-04-14T10:17:02.924Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530, upload-time = "2025-04-14T10:17:01.271Z" },
-]
-
-[[package]]
-name = "grpcio"
-version = "1.73.1"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/79/e8/b43b851537da2e2f03fa8be1aef207e5cbfb1a2e014fbb6b40d24c177cd3/grpcio-1.73.1.tar.gz", hash = "sha256:7fce2cd1c0c1116cf3850564ebfc3264fba75d3c74a7414373f1238ea365ef87", size = 12730355, upload-time = "2025-06-26T01:53:24.622Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/b8/41/456caf570c55d5ac26f4c1f2db1f2ac1467d5bf3bcd660cba3e0a25b195f/grpcio-1.73.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:921b25618b084e75d424a9f8e6403bfeb7abef074bb6c3174701e0f2542debcf", size = 5334621, upload-time = "2025-06-26T01:52:23.602Z" },
- { url = "https://files.pythonhosted.org/packages/2a/c2/9a15e179e49f235bb5e63b01590658c03747a43c9775e20c4e13ca04f4c4/grpcio-1.73.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:277b426a0ed341e8447fbf6c1d6b68c952adddf585ea4685aa563de0f03df887", size = 10601131, upload-time = "2025-06-26T01:52:25.691Z" },
- { url = "https://files.pythonhosted.org/packages/0c/1d/1d39e90ef6348a0964caa7c5c4d05f3bae2c51ab429eb7d2e21198ac9b6d/grpcio-1.73.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:96c112333309493c10e118d92f04594f9055774757f5d101b39f8150f8c25582", size = 5759268, upload-time = "2025-06-26T01:52:27.631Z" },
- { url = "https://files.pythonhosted.org/packages/8a/2b/2dfe9ae43de75616177bc576df4c36d6401e0959833b2e5b2d58d50c1f6b/grpcio-1.73.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f48e862aed925ae987eb7084409a80985de75243389dc9d9c271dd711e589918", size = 6409791, upload-time = "2025-06-26T01:52:29.711Z" },
- { url = "https://files.pythonhosted.org/packages/6e/66/e8fe779b23b5a26d1b6949e5c70bc0a5fd08f61a6ec5ac7760d589229511/grpcio-1.73.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a6c2cce218e28f5040429835fa34a29319071079e3169f9543c3fbeff166d2", size = 6003728, upload-time = "2025-06-26T01:52:31.352Z" },
- { url = "https://files.pythonhosted.org/packages/a9/39/57a18fcef567784108c4fc3f5441cb9938ae5a51378505aafe81e8e15ecc/grpcio-1.73.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:65b0458a10b100d815a8426b1442bd17001fdb77ea13665b2f7dc9e8587fdc6b", size = 6103364, upload-time = "2025-06-26T01:52:33.028Z" },
- { url = "https://files.pythonhosted.org/packages/c5/46/28919d2aa038712fc399d02fa83e998abd8c1f46c2680c5689deca06d1b2/grpcio-1.73.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0a9f3ea8dce9eae9d7cb36827200133a72b37a63896e0e61a9d5ec7d61a59ab1", size = 6749194, upload-time = "2025-06-26T01:52:34.734Z" },
- { url = "https://files.pythonhosted.org/packages/3d/56/3898526f1fad588c5d19a29ea0a3a4996fb4fa7d7c02dc1be0c9fd188b62/grpcio-1.73.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:de18769aea47f18e782bf6819a37c1c528914bfd5683b8782b9da356506190c8", size = 6283902, upload-time = "2025-06-26T01:52:36.503Z" },
- { url = "https://files.pythonhosted.org/packages/dc/64/18b77b89c5870d8ea91818feb0c3ffb5b31b48d1b0ee3e0f0d539730fea3/grpcio-1.73.1-cp312-cp312-win32.whl", hash = "sha256:24e06a5319e33041e322d32c62b1e728f18ab8c9dbc91729a3d9f9e3ed336642", size = 3668687, upload-time = "2025-06-26T01:52:38.678Z" },
- { url = "https://files.pythonhosted.org/packages/3c/52/302448ca6e52f2a77166b2e2ed75f5d08feca4f2145faf75cb768cccb25b/grpcio-1.73.1-cp312-cp312-win_amd64.whl", hash = "sha256:303c8135d8ab176f8038c14cc10d698ae1db9c480f2b2823f7a987aa2a4c5646", size = 4334887, upload-time = "2025-06-26T01:52:40.743Z" },
- { url = "https://files.pythonhosted.org/packages/37/bf/4ca20d1acbefabcaba633ab17f4244cbbe8eca877df01517207bd6655914/grpcio-1.73.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:b310824ab5092cf74750ebd8a8a8981c1810cb2b363210e70d06ef37ad80d4f9", size = 5335615, upload-time = "2025-06-26T01:52:42.896Z" },
- { url = "https://files.pythonhosted.org/packages/75/ed/45c345f284abec5d4f6d77cbca9c52c39b554397eb7de7d2fcf440bcd049/grpcio-1.73.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:8f5a6df3fba31a3485096ac85b2e34b9666ffb0590df0cd044f58694e6a1f6b5", size = 10595497, upload-time = "2025-06-26T01:52:44.695Z" },
- { url = "https://files.pythonhosted.org/packages/a4/75/bff2c2728018f546d812b755455014bc718f8cdcbf5c84f1f6e5494443a8/grpcio-1.73.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:052e28fe9c41357da42250a91926a3e2f74c046575c070b69659467ca5aa976b", size = 5765321, upload-time = "2025-06-26T01:52:46.871Z" },
- { url = "https://files.pythonhosted.org/packages/70/3b/14e43158d3b81a38251b1d231dfb45a9b492d872102a919fbf7ba4ac20cd/grpcio-1.73.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c0bf15f629b1497436596b1cbddddfa3234273490229ca29561209778ebe182", size = 6415436, upload-time = "2025-06-26T01:52:49.134Z" },
- { url = "https://files.pythonhosted.org/packages/e5/3f/81d9650ca40b54338336fd360f36773be8cb6c07c036e751d8996eb96598/grpcio-1.73.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ab860d5bfa788c5a021fba264802e2593688cd965d1374d31d2b1a34cacd854", size = 6007012, upload-time = "2025-06-26T01:52:51.076Z" },
- { url = "https://files.pythonhosted.org/packages/55/f4/59edf5af68d684d0f4f7ad9462a418ac517201c238551529098c9aa28cb0/grpcio-1.73.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ad1d958c31cc91ab050bd8a91355480b8e0683e21176522bacea225ce51163f2", size = 6105209, upload-time = "2025-06-26T01:52:52.773Z" },
- { url = "https://files.pythonhosted.org/packages/e4/a8/700d034d5d0786a5ba14bfa9ce974ed4c976936c2748c2bd87aa50f69b36/grpcio-1.73.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f43ffb3bd415c57224c7427bfb9e6c46a0b6e998754bfa0d00f408e1873dcbb5", size = 6753655, upload-time = "2025-06-26T01:52:55.064Z" },
- { url = "https://files.pythonhosted.org/packages/1f/29/efbd4ac837c23bc48e34bbaf32bd429f0dc9ad7f80721cdb4622144c118c/grpcio-1.73.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:686231cdd03a8a8055f798b2b54b19428cdf18fa1549bee92249b43607c42668", size = 6287288, upload-time = "2025-06-26T01:52:57.33Z" },
- { url = "https://files.pythonhosted.org/packages/d8/61/c6045d2ce16624bbe18b5d169c1a5ce4d6c3a47bc9d0e5c4fa6a50ed1239/grpcio-1.73.1-cp313-cp313-win32.whl", hash = "sha256:89018866a096e2ce21e05eabed1567479713ebe57b1db7cbb0f1e3b896793ba4", size = 3668151, upload-time = "2025-06-26T01:52:59.405Z" },
- { url = "https://files.pythonhosted.org/packages/c2/d7/77ac689216daee10de318db5aa1b88d159432dc76a130948a56b3aa671a2/grpcio-1.73.1-cp313-cp313-win_amd64.whl", hash = "sha256:4a68f8c9966b94dff693670a5cf2b54888a48a5011c5d9ce2295a1a1465ee84f", size = 4335747, upload-time = "2025-06-26T01:53:01.233Z" },
-]
-
-[[package]]
-name = "grpcio-status"
-version = "1.71.2"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "googleapis-common-protos" },
- { name = "grpcio" },
- { name = "protobuf" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/fd/d1/b6e9877fedae3add1afdeae1f89d1927d296da9cf977eca0eb08fb8a460e/grpcio_status-1.71.2.tar.gz", hash = "sha256:c7a97e176df71cdc2c179cd1847d7fc86cca5832ad12e9798d7fed6b7a1aab50", size = 13677, upload-time = "2025-06-28T04:24:05.426Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/67/58/317b0134129b556a93a3b0afe00ee675b5657f0155509e22fcb853bafe2d/grpcio_status-1.71.2-py3-none-any.whl", hash = "sha256:803c98cb6a8b7dc6dbb785b1111aed739f241ab5e9da0bba96888aa74704cfd3", size = 14424, upload-time = "2025-06-28T04:23:42.136Z" },
-]
-
[[package]]
name = "idna"
version = "3.10"
@@ -1406,15 +1313,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99", size = 15221, upload-time = "2022-08-13T16:22:44.457Z" },
]
-[[package]]
-name = "setuptools"
-version = "80.9.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" },
-]
-
[[package]]
name = "six"
version = "1.17.0"