-
Notifications
You must be signed in to change notification settings - Fork 0
QDB-18913 - Add buildkite pipeline #120
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
ffe06f3
7b4d1a5
f52ee5c
3849355
114c83b
1991b8d
8b62202
cb8c484
517be40
8383179
398dd06
e3f9747
5a4f33d
0e08aa2
828c8de
9f704ae
139760a
80ed4c2
d09e8e4
2f0f90f
ee2bdca
de77703
9f0e00a
867a744
76f7f92
f5a8246
9052c28
6e70977
c803b59
2a8f102
ac1c819
851da3d
ae6755a
8cec77a
c26f28d
108dfc9
3377eaa
f89156b
0bf70ae
579cecc
130ee48
03a3b4a
af52089
e2d972f
6bd48c3
f731e75
191d94d
3371980
fbd5398
7d585af
ec0f164
7eebc4c
073d2f2
2cb3af6
c47b569
adb6b05
a0a3050
52c2842
ec5243e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,11 @@ | ||
| # This hook ensures that we don't leave qdbd cluster running even if one of the steps in the pipeline fails. | ||
| # It will stop the cluster if it is running, and ignore any errors that may occur during the stopping process. | ||
|
|
||
| SCRIPT_PATH="scripts/tests/setup/stop-services.sh" | ||
|
|
||
| if [ -f "$SCRIPT_PATH" ]; then | ||
| bash "$SCRIPT_PATH" > /dev/null 2>&1 || true | ||
| echo "Stopped services" | ||
| fi | ||
|
|
||
| exit 0 |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,219 @@ | ||
| #!/usr/bin/env python3 | ||
| """Buildkite dynamic pipeline generator for qdb-api-python. | ||
|
|
||
| Step templates in steps/*.yml define nearly-complete Buildkite steps with | ||
| {placeholder} variables. This script loads them, substitutes variables, and | ||
| overlays environment variables and the Docker plugin per platform. | ||
|
|
||
| Usage: | ||
| python3 pipeline.py # emit pipeline YAML to stdout | ||
| python3 pipeline.py check # validate without emitting | ||
| """ | ||
| from __future__ import annotations | ||
|
|
||
| import dataclasses | ||
| import sys | ||
| from pathlib import Path | ||
|
|
||
| from buildkite_sdk import Pipeline, GroupStep | ||
|
|
||
| sys.path.insert(0, str(Path(__file__).parent / "tools")) | ||
| from qdb_pipeline import ( | ||
| Platform, | ||
| apply_docker_compose, | ||
| load_template, | ||
| merge_env, | ||
| select_platforms, | ||
| validate_pipeline, | ||
| get_git_ref, | ||
| set_artifact_plugin_options, | ||
| ) # noqa: E402 | ||
|
|
||
| STEPS_DIR = Path(__file__).parent / "steps" | ||
|
|
||
| # Quasardb-specific toolchain overlays on top of shared infrastructure platforms. | ||
| _LINUX = dict() | ||
| _WIN = dict() | ||
| _MACOS = dict() | ||
|
|
||
| _OS_OVERLAY = {"linux": _LINUX, "windows": _WIN, "macos": _MACOS} | ||
| PLATFORMS: list[Platform] = [ | ||
| dataclasses.replace(p, **_OS_OVERLAY.get(p.os, {})) | ||
| for p in select_platforms( | ||
| "linux-amd64-core2", | ||
| "linux-aarch64", | ||
| "windows-amd64-core2", | ||
| "macos-aarch64", | ||
| ) | ||
| ] | ||
|
|
||
| BUILD_TYPES = ["Release", "Debug"] | ||
|
|
||
| PYTHON_VERSIONS = [ | ||
| "3.9", | ||
| "3.10", | ||
| "3.11", | ||
| "3.12", | ||
| "3.13", | ||
| "3.14", | ||
| ] | ||
|
|
||
| # Environment variable layering: global → step → os → os+step → platform compilers. | ||
| GLOBAL_ENV: dict[str, str] = { | ||
| "AWS_DEFAULT_REGION": "eu-west-1", | ||
| "JUNIT_XML_FILE": "build/test/pytest.xml", | ||
| "QDB_ENCRYPT_TRAFFIC": "1", | ||
| } | ||
|
|
||
| STEP_ENV: dict[str, dict[str, str]] = {} | ||
|
|
||
| OS_ENV: dict[str, dict[str, str]] = { | ||
| "linux": { | ||
| "PYTHON_EXECUTABLE": "/usr/bin/python3", | ||
| "PYTHON_CMD": "python3", | ||
| }, | ||
| "freebsd": { | ||
| "PYTHON_EXECUTABLE": "/usr/bin/python3", | ||
| "PYTHON_CMD": "python3", | ||
| }, | ||
| "macos": {}, | ||
| "windows": {}, | ||
| } | ||
|
|
||
| OS_STEP_ENV: dict[str, dict[str, str]] = {} | ||
|
|
||
| CPU_ENV: dict[str, dict[str, str]] = { | ||
| "aarch64": {"ARCH": "aarch64"}, | ||
| } | ||
|
|
||
|
|
||
| def _env(p: Platform, step_name: str, build_type: str) -> dict[str, str]: | ||
| """Compose the full environment dict for one step.""" | ||
| return merge_env( | ||
| GLOBAL_ENV, | ||
| STEP_ENV.get(step_name, {}), | ||
| OS_ENV.get(p.os, {}), | ||
| OS_STEP_ENV.get(f"{p.os}/{step_name}", {}), | ||
| CPU_ENV.get(p.cpu, {}), | ||
| {"CMAKE_BUILD_TYPE": build_type}, | ||
| platform=p, | ||
| ) | ||
|
|
||
|
|
||
| def _get_agent_python_env(platform: Platform, python_version: str) -> dict[str, str]: | ||
| """ | ||
| Returns environment variables to set for Python executable on the agent, based on platform and python version. | ||
| Applies to Windows and macOS where we have multiple Python versions installed in different locations. | ||
| """ | ||
| python_version_slug = python_version.replace(".", "") | ||
| if platform.os == "windows": | ||
| return { | ||
| "PYTHON_EXECUTABLE": f"$$QDB_CICD_AGENT_PYTHON_{python_version_slug}_64_EXE", | ||
| "PYTHON_CMD": f"$$QDB_CICD_AGENT_PYTHON_{python_version_slug}_64_EXE", | ||
| } | ||
| elif platform.os == "macos": | ||
| return { | ||
| "PYTHON_EXECUTABLE": f"$$QDB_CICD_AGENT_PYTHON_{python_version_slug}_PATH", | ||
| } | ||
| return {} | ||
|
|
||
|
|
||
| def _apply_doc_command(step: dict, platform: Platform) -> None: | ||
| """ | ||
| Adds a command to the step to generate documentation using pdoc to linux-amd64-core2 platform builds. | ||
| """ | ||
| if platform.os == "linux" and platform.arch == "amd64" and platform.cpu == "core2": | ||
| doc_commands = [ | ||
| 'echo "+++ Build documentation"', | ||
| "bash scripts/teamcity/30.doc.sh", | ||
| ] | ||
| existing_commands = step.get("commands", []) | ||
| existing_commands += doc_commands | ||
|
|
||
|
|
||
| def generate_pipeline() -> Pipeline: | ||
| """Load templates, expand across platforms × build_types, overlay env and docker.""" | ||
| pipeline = Pipeline() | ||
| git_ref = get_git_ref() | ||
| group_steps = {} | ||
|
|
||
| for p in PLATFORMS: | ||
| for bt in BUILD_TYPES: | ||
| for py in PYTHON_VERSIONS: | ||
| slug = p.slug(bt.lower(), f"py{py.replace('.', '')}") | ||
|
|
||
| # We want to use Release QuasarDB binaries when building Python API (debug and release) | ||
| dependency_slug = p.slug("release") | ||
|
|
||
| tvars = { | ||
| "slug": slug, | ||
| "queue": f"{p.queue_os}-{p.arch}", | ||
| "name": slug.replace("-", " ").title(), | ||
| } | ||
|
|
||
| artifact_vars_per_step = { | ||
| "upload": {"variant": slug, "git-ref": git_ref}, | ||
| "promote": {"variant": slug, "git-ref": git_ref}, | ||
| "download": { | ||
| "variant": dependency_slug, | ||
| "git-ref": git_ref, | ||
| }, | ||
| } | ||
|
|
||
| compose_config = { | ||
| "run": "pypa", | ||
| "config": "docker/docker-compose.yml", | ||
| "propagate-uid-gid": True, | ||
| } | ||
|
|
||
| step = load_template(STEPS_DIR / "_build.yml", **tvars) | ||
| env = _env(p, "test", bt) | ||
| env.update(step.get("env") or {}) | ||
| env.update({"PYTHON_VERSION": py}) | ||
| env.update(_get_agent_python_env(p, py)) | ||
| step["env"] = env | ||
| if p.os == "linux": | ||
| apply_docker_compose(step, config=compose_config) | ||
| set_artifact_plugin_options(step, artifact_vars_per_step) | ||
| _apply_doc_command(step, p) | ||
|
|
||
| # add step to group | ||
| group_name = p.slug(bt.lower()).replace("-", " ").title() | ||
| if group_name not in group_steps: | ||
| group_steps[group_name] = [] | ||
| group_steps[group_name].append(step) | ||
|
|
||
| # create groups and add to pipeline | ||
| for group, steps in group_steps.items(): | ||
| group_step = GroupStep(group=group, steps=steps) | ||
| pipeline.add_step(group_step) | ||
|
|
||
| return pipeline | ||
|
|
||
|
|
||
| def main() -> None: | ||
| command = sys.argv[1] if len(sys.argv) > 1 else "generate" | ||
|
|
||
| try: | ||
| pipeline = generate_pipeline() | ||
| except Exception as e: | ||
| print(f"[FAIL] Pipeline generation failed: {e}", file=sys.stderr) | ||
| sys.exit(1) | ||
|
|
||
| if command == "generate": | ||
| print(pipeline.to_yaml()) | ||
| elif command == "check": | ||
| errors = validate_pipeline(pipeline) | ||
| if errors: | ||
| for e in errors: | ||
| print(f"[FAIL] {e}", file=sys.stderr) | ||
| sys.exit(1) | ||
| print(f"[OK] Pipeline valid: {len(pipeline.steps)} steps") | ||
| else: | ||
| print(f"Unknown command: {command}", file=sys.stderr) | ||
| print("Usage: pipeline.py [generate|check]", file=sys.stderr) | ||
| sys.exit(1) | ||
|
|
||
|
|
||
| if __name__ == "__main__": | ||
| main() | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,2 @@ | ||
| buildkite-sdk==0.8.0 | ||
| -r tools/requirements.txt |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,36 @@ | ||
| agents: | ||
| queue: "default-{queue}" | ||
|
|
||
| retry: | ||
| automatic: | ||
| limit: 3 | ||
|
|
||
| label: "{name} ({slug})" | ||
| key: "build-{slug}" | ||
|
|
||
| commands: | ||
| - echo \"+++ Start Services\" | ||
| - bash scripts/tests/setup/start-services.sh | ||
| - echo \"+++ Run Tests\" | ||
| - bash scripts/cicd/20.test.sh | ||
| - echo \"+++ Stop Services\" | ||
| - bash scripts/tests/setup/stop-services.sh | ||
| - echo \"+++ Run build\" | ||
| - bash scripts/cicd/10.build.sh | ||
|
|
||
| plugins: | ||
| - bureau14/qdb-artifacts#master: | ||
| download: | ||
| project_id: "quasardb" | ||
| output-dir: "qdb" | ||
| extract: true | ||
| clean: true | ||
| files: | ||
| - "*-c-api.tar.zst!*" | ||
| - "*-server.tar.zst!*" | ||
| - "*-utils.tar.zst!*" | ||
| upload: | ||
| files: | ||
| - "dist/quasardb-*.whl" | ||
| - "dist/doc.tar.gz" | ||
| promote: {} |
| Original file line number | Diff line number | Diff line change | ||||||
|---|---|---|---|---|---|---|---|---|
|
|
@@ -6,6 +6,22 @@ FROM quay.io/pypa/$PLATFORM:${TAG} | |||||||
|
|
||||||||
| ARG PYTHON_VERSION | ||||||||
|
|
||||||||
| ADD set-python-version.sh /set-python-version.sh | ||||||||
| RUN bash /set-python-version.sh ${PYTHON_VERSION} \ | ||||||||
| && rm /set-python-version.sh | ||||||||
| # We need to ensure that the container user has the same UID and GID as the host buildkite agent to use the `propagate-uid-gid` feature of the docker plugin. | ||||||||
| # The default UID, GID matches the default UID, GID of the buildkite agent on the host, if needed can be overridden | ||||||||
| ARG USER_ID=929 | ||||||||
| ARG GROUP_ID=929 | ||||||||
| ARG USERNAME=builder | ||||||||
| ARG HOME=/home/${USERNAME} | ||||||||
| ARG COMMENT=builder | ||||||||
|
|
||||||||
| # Install packages required for running start-services inside the container | ||||||||
| RUN yum install lsof -y | ||||||||
|
|
||||||||
| RUN groupadd --gid $GROUP_ID $USERNAME | ||||||||
| RUN useradd --comment "$COMMENT" --home-dir $HOME --create-home --system --uid $USER_ID --gid $GROUP_ID $USERNAME | ||||||||
|
|
||||||||
| USER $USERNAME | ||||||||
|
|
||||||||
|
||||||||
| ENV BASH_ENV=/home/builder/.bashrc |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,11 @@ | ||
| services: | ||
| pypa: | ||
| build: | ||
| context: . | ||
| dockerfile: Dockerfile | ||
| args: | ||
| - PYTHON_VERSION=${PYTHON_VERSION} | ||
| - ARCH=${ARCH-x86_64} | ||
| volumes: | ||
| - ../:/workdir | ||
| working_dir: /workdir |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,6 @@ | ||
| #!/usr/bin/env bash | ||
|
|
||
| if [ -f $HOME/.bashrc ] | ||
| then | ||
| source $HOME/.bashrc | ||
| fi |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,56 @@ | ||
| #!/bin/bash | ||
|
|
||
| SCRIPT_DIR="$(cd "$(dirname -- "${BASH_SOURCE[0]}")" >/dev/null && pwd)" | ||
|
|
||
| source ${SCRIPT_DIR}/00.common.sh | ||
|
|
||
| git config --global --add safe.directory '*' | ||
|
|
||
| # No more errors should occur after here | ||
| set -e -u -x | ||
|
|
||
| PYTHON="${PYTHON_CMD:-python3}" | ||
|
|
||
| # Now use a virtualenv to run the tests. If the virtualenv already exists, we remove | ||
| # it to ensure a clean install. | ||
| ${PYTHON} -m venv --clear ${SCRIPT_DIR}/../../.env/ | ||
| if [[ "$(uname)" == MINGW* ]] | ||
| then | ||
| VENV_PYTHON="${SCRIPT_DIR}/../../.env/Scripts/python.exe" | ||
| else | ||
| VENV_PYTHON="${SCRIPT_DIR}/../../.env/bin/python" | ||
| fi | ||
|
|
||
| ${VENV_PYTHON} --version | ||
|
|
||
| function relabel_wheel { | ||
| wheel="$1" | ||
|
|
||
| if ! ${VENV_PYTHON} -m auditwheel show "$wheel" | ||
| then | ||
| echo "Skipping non-platform specific wheel $wheel" | ||
| else | ||
| # ${AUDITWHEEL_PLAT} is defined in manylinux base docker image | ||
| ${VENV_PYTHON} -m auditwheel repair "$wheel" --plat "$AUDITWHEEL_PLAT" -w dist/ | ||
| rm "$wheel" | ||
| fi | ||
| } | ||
|
|
||
| rm -r -f build/ dist/ | ||
|
|
||
| if [[ "$OSTYPE" == "darwin"* && $PYTHON == "python3.9"* ]]; then | ||
| ${VENV_PYTHON} -m pip install --upgrade setuptools==63.0.0b1 wheel | ||
| else | ||
| ${VENV_PYTHON} -m pip install --upgrade setuptools wheel auditwheel | ||
| fi | ||
|
|
||
| ${VENV_PYTHON} -m pip install -r dev-requirements.txt | ||
|
|
||
| export DISTUTILS_DEBUG=1 | ||
| export QDB_TESTS_ENABLED=OFF | ||
|
|
||
| ${VENV_PYTHON} -m build -w | ||
|
|
||
| for whl in dist/*.whl; do | ||
| relabel_wheel "$whl" | ||
| done |
Uh oh!
There was an error while loading. Please reload this page.