diff --git a/.aspect/axl.axl b/.aspect/axl.axl index 6e7ee1641..a0c377c89 100644 --- a/.aspect/axl.axl +++ b/.aspect/axl.axl @@ -377,6 +377,23 @@ def test_http(ctx: TaskContext, tc: int, temp_dir: str) -> int: return tc +def test_http_get_post(ctx: TaskContext, tc: int, temp_dir: str) -> int: + # Test HTTP get and post methods including unix_socket support + + # Test 1: Basic HTTP GET request (reuse same URL as download test) + url = "https://raw.githubusercontent.com/aspect-build/aspect-cli/refs/heads/main/LICENSE" + resp1 = ctx.http().get(url = url).block() + tc = test_case(tc, resp1.status == 200, "http.get should return status 200") + tc = test_case(tc, "Apache License" in resp1.body, "http.get body should contain Apache License text") + tc = test_case(tc, type(resp1.headers) == "list", "http.get headers should be a list") + tc = test_case(tc, len(resp1.headers) > 0, "http.get headers should not be empty") + + # Test 2: HTTP GET with custom headers + resp2 = ctx.http().get(url = url, headers = {"User-Agent": "AXL-Test"}).block() + tc = test_case(tc, resp2.status == 200, "http.get with custom headers should return status 200") + + return tc + def impl(ctx: TaskContext) -> int: tc = 0 @@ -422,6 +439,7 @@ def impl(ctx: TaskContext) -> int: tc = test_build_events(ctx, tc, temp_dir) tc = test_large_bes(ctx, tc, temp_dir) tc = test_http(ctx, tc, temp_dir) + tc = test_http_get_post(ctx, tc, temp_dir) print(tc, "tests passed") return 0 diff --git a/.aspect/user-task.axl b/.aspect/user-task.axl index d85135450..b0fd7ce33 100644 --- a/.aspect/user-task.axl +++ b/.aspect/user-task.axl @@ -26,5 +26,5 @@ user_task = task( group = ["user"], implementation = _impl, args = {}, - config = UserTaskConfig(), + config = UserTaskConfig, ) diff --git a/.bazelignore b/.bazelignore index f41c42fd7..b2b72506d 100644 --- a/.bazelignore +++ b/.bazelignore @@ -1,3 +1,4 @@ # Cargo output directory target/ -bazel/proto/ \ No newline at end of file +bazel/proto/ +examples/ diff --git a/.bazelrc b/.bazelrc index c64522bab..d53f6d766 100644 --- a/.bazelrc +++ b/.bazelrc @@ -6,6 +6,8 @@ common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1 # Check local dev setup common --workspace_status_command=tools/githooks/check-config.sh +common --toolchain_resolution_debug='\Q@@rules_rs++rules_rust+rules_rust//rust:toolchain_type\E' + common:macos --build_tag_filters=-no-macos common:linux --host_platform=//bazel/platforms:linux_host_platform diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command new file mode 100755 index 000000000..8433bcbe5 --- /dev/null +++ b/.buildkite/hooks/pre-command @@ -0,0 +1,51 @@ +#!/bin/sh +set -eu + +ASPECT_BIN_DIR="/home/aspect-runner/.aspect/bin" + +# Bazel startup options for CI +BAZEL_STARTUP_OPTS="--nohome_rc --output_user_root=/mnt/ephemeral/bazel/aspect-cli/__main__ --output_base=/mnt/ephemeral/output/aspect-cli/__main__" +BAZEL_BUILD_OPTS="--config=workflows --config=ci --show_progress_rate_limit=1" + +# Skip if bazel is not installed +if ! command -v bazel >/dev/null 2>&1; then + echo "DEBUG: bazel not found, skipping" + exit 0 +fi + +# Generate workflows bazelrc +rosetta bazelrc > /etc/bazel.bazelrc + +# Create target directory +mkdir -p "$ASPECT_BIN_DIR" + +# Get current commit +CURRENT_COMMIT="$(git rev-parse HEAD)" + +# Check if crates/ changed between two commits +crates_changed() { + local old_commit="$1" + local new_commit="$2" + git diff --name-only "$old_commit" "$new_commit" -- crates/ | grep -q . +} + +# Build and install aspect-cli if not present or crates/ changed +ASPECT_COMMIT_FILE="$ASPECT_BIN_DIR/aspect.commit" +CACHED_COMMIT="" +if [ -f "$ASPECT_COMMIT_FILE" ]; then + CACHED_COMMIT="$(cat "$ASPECT_COMMIT_FILE")" +fi + +if [ -x "$ASPECT_BIN_DIR/aspect" ] && [ -n "$CACHED_COMMIT" ] && ! crates_changed "$CACHED_COMMIT" "$CURRENT_COMMIT"; then + echo "DEBUG: Found existing aspect binary (no crates/ changes since $CACHED_COMMIT)" +else + echo "--- Building aspect-cli" + bazel $BAZEL_STARTUP_OPTS build $BAZEL_BUILD_OPTS -c dbg --remote_download_toplevel //:cli + + CLI_PATH="$(bazel $BAZEL_STARTUP_OPTS cquery $BAZEL_BUILD_OPTS -c dbg --output=files //:cli 2>/dev/null)" + ls -la "$CLI_PATH" 2>&1 || echo "DEBUG: File does not exist!" + cp -f "$CLI_PATH" "$ASPECT_BIN_DIR/aspect" + chmod 0755 "$ASPECT_BIN_DIR/aspect" + echo "$CURRENT_COMMIT" > "$ASPECT_COMMIT_FILE" + echo "aspect-cli installed to $ASPECT_BIN_DIR/aspect (commit: $CURRENT_COMMIT)" +fi diff --git a/.buildkite/pipeline.yaml b/.buildkite/pipeline.yaml new file mode 100644 index 000000000..6e953495f --- /dev/null +++ b/.buildkite/pipeline.yaml @@ -0,0 +1,51 @@ +# Generated by: aspect workflows migrate --host=buildkite +# Source: .aspect/workflows/config_aws.yaml +# DO NOT EDIT - regenerate with 'aspect workflows migrate' + +env: + PATH: "/home/aspect-runner/.aspect/bin:$PATH" + ASPECT_DEBUG: "1" +steps: + - key: __main__::debug + label: ":bazel: Debug" + agents: + queue: aspect-default + timeout_in_minutes: 20 + command: + - cat /etc/aspect/workflows/platform/rosetta_api_tokens + - echo "" + - cat /etc/aspect/workflows/platform/api_client_id + - echo "" + - cat /etc/aspect/workflows/platform/api_key + - echo "" + - cat /etc/aspect/workflows/platform/brs_api_endpoint + - echo "" + - cat /etc/aspect/workflows/platform/az + - echo "" + - ls /etc/aspect/workflows/bin + - key: __main__::build + label: ":bazel: Build" + agents: + queue: aspect-default + timeout_in_minutes: 20 + command: + - | + echo "--- :aspect-build: Workflows environment" + /etc/aspect/workflows/bin/configure_workflows_env + echo "--- :stethoscope: Agent health check" + /etc/aspect/workflows/bin/agent_health_check + - ls /etc/aspect/workflows/platform + - aspect build ... + - key: __main__::delivery + label: ":ship: :package: :bazel: Delivery" + agents: + queue: aspect-default + timeout_in_minutes: 20 + command: + - | + echo "--- :aspect-build: Workflows environment" + /etc/aspect/workflows/bin/configure_workflows_env + echo "--- :stethoscope: Agent health check" + /etc/aspect/workflows/bin/agent_health_check + - ls /tmp + - aspect delivery --build_url $BUILDKITE_BUILD_URL --commit_sha $BUILDKITE_COMMIT --force_target //:hello -- //:hello //:hello2 diff --git a/.gitignore b/.gitignore index f6e04c951..2eb9433bf 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +*.swp .jj .envrc.user workflow-samples @@ -13,3 +14,6 @@ site # macOS desktop services files .DS_Store + +.workflows + diff --git a/BUILD.bazel b/BUILD.bazel index d38de46e3..277f77db9 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -3,6 +3,13 @@ alias( actual = "//crates/aspect-launcher", ) +genrule( + name = "hello", + srcs = [], + outs = ["hello.txt"], + cmd = "echo 'Hello, World!' > $@; exit 1", +) + alias( name = "cli", actual = "//crates/aspect-cli", diff --git a/Cargo.lock b/Cargo.lock index 1d7ee3788..47cfdc704 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -301,7 +301,6 @@ version = "0.0.0-dev" dependencies = [ "allocative", "anyhow", - "derive_more 2.0.1", "display_container", "prost", "prost-build", @@ -310,6 +309,7 @@ dependencies = [ "starbuf-types", "starlark", "starlark_derive", + "tokio", "tonic", "tonic-build", "tonic-prost", @@ -340,6 +340,10 @@ dependencies = [ "galvanize", "getargs", "handlebars", + "http-body-util", + "hyper", + "hyper-util", + "hyperlocal", "liquid", "liquid-core", "minijinja", @@ -347,6 +351,7 @@ dependencies = [ "prost", "rand 0.8.5", "reqwest", + "semver", "serde_json", "sha256", "ssri", @@ -359,6 +364,7 @@ dependencies = [ "tokio-stream", "tonic", "tracing", + "url", "uuid", "wasmi", "wasmi_wasi", @@ -1767,6 +1773,21 @@ dependencies = [ "windows-registry", ] +[[package]] +name = "hyperlocal" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "986c5ce3b994526b3cd75578e62554abd09f0899d6206de48b3e96ab34ccc8c7" +dependencies = [ + "hex", + "http-body-util", + "hyper", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", +] + [[package]] name = "iana-time-zone" version = "0.1.63" @@ -3683,6 +3704,12 @@ dependencies = [ "windows-sys 0.60.2", ] +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + [[package]] name = "serde" version = "1.0.227" diff --git a/Cargo.toml b/Cargo.toml index 5ce3ab877..73aeb0629 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] members = ["crates/*"] -exclude = ["crates/example-tui"] +exclude = ["crates/example-tui", "examples/lint"] default-members = ["crates/aspect-cli"] resolver = "2" diff --git a/MODULE.aspect b/MODULE.aspect index d204f69bd..d292b70c5 100644 --- a/MODULE.aspect +++ b/MODULE.aspect @@ -10,13 +10,4 @@ axl_local_dep( auto_use_tasks = True, ) -axl_archive_dep( - name = "aspect_rules_lint", - urls = ["https://github.com/aspect-build/rules_lint/archive/65525d871f677071877d3ea1ec096499ff7dd147.tar.gz"], - integrity = "sha512-TGcxutWr8FwxrK3G+uthbEpuYM2oOVpHPOvaVPzLLuHkfPY0jn/GWFp9myQeFzDFsRZ4ilT0jAWfGZhTk/nesQ==", - strip_prefix = "rules_lint-65525d871f677071877d3ea1ec096499ff7dd147", - auto_use_tasks = True, - dev = True, -) - -use_task(".aspect/user/user-task-manual.axl", "user_task_manual") +use_task(".aspect/user/user-task-manual.axl", "user_task_manual") \ No newline at end of file diff --git a/MODULE.bazel b/MODULE.bazel index dd094a17f..8c5a0f52e 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -8,14 +8,12 @@ bazel_dep(name = "rules_nodejs", version = "6.7.3") # Transitive dep; bump for bazel_dep(name = "bazel_skylib", version = "1.8.2") bazel_dep(name = "bazelrc-preset.bzl", version = "1.4.0") bazel_dep(name = "buildifier_prebuilt", version = "8.2.0.2") -bazel_dep(name = "gazelle", version = "0.45.0") bazel_dep(name = "platforms", version = "1.0.0") bazel_dep(name = "rules_cc", version = "0.2.16") -bazel_dep(name = "rules_go", version = "0.59.0") bazel_dep(name = "rules_pkg", version = "1.1.0") bazel_dep(name = "rules_shell", version = "0.6.1") bazel_dep(name = "with_cfg.bzl", version = "0.12.0") -bazel_dep(name = "toolchains_llvm_bootstrapped", version = "0.5.2") +bazel_dep(name = "toolchains_llvm_bootstrapped", version = "0.5.5") register_toolchains("@toolchains_llvm_bootstrapped//toolchain:all") diff --git a/MODULE.bazel.lock b/MODULE.bazel.lock index 11c2fc884..bce4ae46f 100644 --- a/MODULE.bazel.lock +++ b/MODULE.bazel.lock @@ -70,7 +70,8 @@ "https://bcr.bazel.build/modules/bazel_features/1.9.1/MODULE.bazel": "8f679097876a9b609ad1f60249c49d68bfab783dd9be012faf9d82547b14815a", "https://bcr.bazel.build/modules/bazel_lib/3.0.0-beta.1/MODULE.bazel": "407729e232f611c3270005b016b437005daa7b1505826798ea584169a476e878", "https://bcr.bazel.build/modules/bazel_lib/3.0.0/MODULE.bazel": "22b70b80ac89ad3f3772526cd9feee2fa412c2b01933fea7ed13238a448d370d", - "https://bcr.bazel.build/modules/bazel_lib/3.0.0/source.json": "895f21909c6fba01d7c17914bb6c8e135982275a1b18cdaa4e62272217ef1751", + "https://bcr.bazel.build/modules/bazel_lib/3.2.0/MODULE.bazel": "39b50d94b9be6bda507862254e20c263f9b950e3160112348d10a938be9ce2c2", + "https://bcr.bazel.build/modules/bazel_lib/3.2.0/source.json": "a6f45a903134bebbf33a6166dd42b4c7ab45169de094b37a85f348ca41170a84", "https://bcr.bazel.build/modules/bazel_skylib/1.0.3/MODULE.bazel": "bcb0fd896384802d1ad283b4e4eb4d718eebd8cb820b0a2c3a347fb971afd9d8", "https://bcr.bazel.build/modules/bazel_skylib/1.1.1/MODULE.bazel": "1add3e7d93ff2e6998f9e118022c84d163917d912f5afafb3058e3d2f1545b5e", "https://bcr.bazel.build/modules/bazel_skylib/1.2.0/MODULE.bazel": "44fe84260e454ed94ad326352a698422dbe372b21a1ac9f3eab76eb531223686", @@ -102,9 +103,7 @@ "https://bcr.bazel.build/modules/gazelle/0.32.0/MODULE.bazel": "b499f58a5d0d3537f3cf5b76d8ada18242f64ec474d8391247438bf04f58c7b8", "https://bcr.bazel.build/modules/gazelle/0.33.0/MODULE.bazel": "a13a0f279b462b784fb8dd52a4074526c4a2afe70e114c7d09066097a46b3350", "https://bcr.bazel.build/modules/gazelle/0.34.0/MODULE.bazel": "abdd8ce4d70978933209db92e436deb3a8b737859e9354fb5fd11fb5c2004c8a", - "https://bcr.bazel.build/modules/gazelle/0.36.0/MODULE.bazel": "e375d5d6e9a6ca59b0cb38b0540bc9a05b6aa926d322f2de268ad267a2ee74c0", - "https://bcr.bazel.build/modules/gazelle/0.45.0/MODULE.bazel": "ecd19ebe9f8e024e1ccffb6d997cc893a974bcc581f1ae08f386bdd448b10687", - "https://bcr.bazel.build/modules/gazelle/0.45.0/source.json": "111d182facc5f5e80f0b823d5f077b74128f40c3fd2eccc89a06f34191bd3392", + "https://bcr.bazel.build/modules/gazelle/0.34.0/source.json": "cdf0182297e3adabbdea2da88d5b930b2ee5e56511c3e7d6512069db6315a1f7", "https://bcr.bazel.build/modules/google_benchmark/1.8.2/MODULE.bazel": "a70cf1bba851000ba93b58ae2f6d76490a9feb74192e57ab8e8ff13c34ec50cb", "https://bcr.bazel.build/modules/googletest/1.11.0/MODULE.bazel": "3a83f095183f66345ca86aa13c58b59f9f94a2f81999c093d4eeaa2d262d12f4", "https://bcr.bazel.build/modules/googletest/1.14.0.bcr.1/MODULE.bazel": "22c31a561553727960057361aa33bf20fb2e98584bc4fec007906e27053f80c6", @@ -138,7 +137,6 @@ "https://bcr.bazel.build/modules/protobuf/21.7/MODULE.bazel": "a5a29bb89544f9b97edce05642fac225a808b5b7be74038ea3640fae2f8e66a7", "https://bcr.bazel.build/modules/protobuf/27.0/MODULE.bazel": "7873b60be88844a0a1d8f80b9d5d20cfbd8495a689b8763e76c6372998d3f64c", "https://bcr.bazel.build/modules/protobuf/27.1/MODULE.bazel": "703a7b614728bb06647f965264967a8ef1c39e09e8f167b3ca0bb1fd80449c0d", - "https://bcr.bazel.build/modules/protobuf/29.0-rc2.bcr.1/MODULE.bazel": "52f4126f63a2f0bbf36b99c2a87648f08467a4eaf92ba726bc7d6a500bbf770c", "https://bcr.bazel.build/modules/protobuf/29.0-rc2/MODULE.bazel": "6241d35983510143049943fc0d57937937122baf1b287862f9dc8590fc4c37df", "https://bcr.bazel.build/modules/protobuf/29.0-rc3/MODULE.bazel": "33c2dfa286578573afc55a7acaea3cada4122b9631007c594bf0729f41c8de92", "https://bcr.bazel.build/modules/protobuf/29.1/MODULE.bazel": "557c3457560ff49e122ed76c0bc3397a64af9574691cb8201b4e46d4ab2ecb95", @@ -191,10 +189,7 @@ "https://bcr.bazel.build/modules/rules_go/0.39.1/MODULE.bazel": "d34fb2a249403a5f4339c754f1e63dc9e5ad70b47c5e97faee1441fc6636cd61", "https://bcr.bazel.build/modules/rules_go/0.41.0/MODULE.bazel": "55861d8e8bb0e62cbd2896f60ff303f62ffcb0eddb74ecb0e5c0cbe36fc292c8", "https://bcr.bazel.build/modules/rules_go/0.42.0/MODULE.bazel": "8cfa875b9aa8c6fce2b2e5925e73c1388173ea3c32a0db4d2b4804b453c14270", - "https://bcr.bazel.build/modules/rules_go/0.46.0/MODULE.bazel": "3477df8bdcc49e698b9d25f734c4f3a9f5931ff34ee48a2c662be168f5f2d3fd", - "https://bcr.bazel.build/modules/rules_go/0.53.0/MODULE.bazel": "a4ed760d3ac0dbc0d7b967631a9a3fd9100d28f7d9fcf214b4df87d4bfff5f9a", - "https://bcr.bazel.build/modules/rules_go/0.59.0/MODULE.bazel": "b7e43e7414a3139a7547d1b4909b29085fbe5182b6c58cbe1ed4c6272815aeae", - "https://bcr.bazel.build/modules/rules_go/0.59.0/source.json": "1df17bb7865cfc029492c30163cee891d0dd8658ea0d5bfdf252c4b6db5c1ef6", + "https://bcr.bazel.build/modules/rules_go/0.42.0/source.json": "33cd3d725806ad432753c4263ffd0459692010fdc940cce60b2c0e32282b45c5", "https://bcr.bazel.build/modules/rules_java/4.0.0/MODULE.bazel": "5a78a7ae82cd1a33cef56dc578c7d2a46ed0dca12643ee45edbb8417899e6f74", "https://bcr.bazel.build/modules/rules_java/5.3.5/MODULE.bazel": "a4ec4f2db570171e3e5eb753276ee4b389bae16b96207e9d3230895c99644b86", "https://bcr.bazel.build/modules/rules_java/6.0.0/MODULE.bazel": "8a43b7df601a7ec1af61d79345c17b31ea1fedc6711fd4abfd013ea612978e39", @@ -295,7 +290,8 @@ "https://bcr.bazel.build/modules/toolchain_utils/1.0.2/MODULE.bazel": "9b8be503a4fcfd3b8b952525bff0869177a5234d5c35dc3e566b9f5ca2f755a1", "https://bcr.bazel.build/modules/toolchain_utils/1.0.2/source.json": "88769ec576dddacafd8cca4631812cf8eead89f10a29d9405d9f7a553de6bf87", "https://bcr.bazel.build/modules/toolchains_llvm_bootstrapped/0.5.2/MODULE.bazel": "f7c822cea99caef928d7cbe695498096e53c4b2c0ea45997e9a64bf6b77b43b0", - "https://bcr.bazel.build/modules/toolchains_llvm_bootstrapped/0.5.2/source.json": "13d260b3a10804b3b2ab822c49e329c36ef5cd325fa01d0f9a1616c5364b7fff", + "https://bcr.bazel.build/modules/toolchains_llvm_bootstrapped/0.5.5/MODULE.bazel": "a2f1469b2920fcc9c043c87301b7a207d320231615abecd7771d89e516fc3d0b", + "https://bcr.bazel.build/modules/toolchains_llvm_bootstrapped/0.5.5/source.json": "8498c094703da13a859d19dd2a200152f75870fb7f22a9770df99d230a4f50d1", "https://bcr.bazel.build/modules/upb/0.0.0-20220923-a547704/MODULE.bazel": "7298990c00040a0e2f121f6c32544bab27d4452f80d9ce51349b1a28f3005c43", "https://bcr.bazel.build/modules/with_cfg.bzl/0.12.0/MODULE.bazel": "b573395fe63aef4299ba095173e2f62ccfee5ad9bbf7acaa95dba73af9fc2b38", "https://bcr.bazel.build/modules/with_cfg.bzl/0.12.0/source.json": "3f3fbaeafecaf629877ad152a2c9def21f8d330d91aa94c5dc75bbb98c10b8b8", @@ -488,6 +484,65 @@ } } }, + "@@rules_go+//go:extensions.bzl%go_sdk": { + "os:osx,arch:aarch64": { + "bzlTransitiveDigest": "u6ipp4B4trSrM0FoJr/iOAbAzHnadJmFUMourQbBdA0=", + "usagesDigest": "igIBXyqNg9Be63Cuu6kZxOeoDRDMqxSv8BcoWiqSh3w=", + "recordedInputs": [ + "REPO_MAPPING:bazel_features+,bazel_features_globals bazel_features++version_extension+bazel_features_globals", + "REPO_MAPPING:bazel_features+,bazel_features_version bazel_features++version_extension+bazel_features_version", + "REPO_MAPPING:rules_go+,bazel_features bazel_features+", + "REPO_MAPPING:rules_go+,bazel_tools bazel_tools" + ], + "generatedRepoSpecs": { + "go_default_sdk": { + "repoRuleId": "@@rules_go+//go/private:sdk.bzl%go_download_sdk_rule", + "attributes": { + "goos": "", + "goarch": "", + "sdks": {}, + "experiments": [], + "patches": [], + "patch_strip": 0, + "urls": [ + "https://dl.google.com/go/{}" + ], + "version": "1.21.1", + "strip_prefix": "go" + } + }, + "go_host_compatible_sdk_label": { + "repoRuleId": "@@rules_go+//go/private:extensions.bzl%host_compatible_toolchain", + "attributes": { + "toolchain": "@go_default_sdk//:ROOT" + } + }, + "go_toolchains": { + "repoRuleId": "@@rules_go+//go/private:sdk.bzl%go_multiple_toolchains", + "attributes": { + "prefixes": [ + "_0000_go_default_sdk_" + ], + "geese": [ + "" + ], + "goarchs": [ + "" + ], + "sdk_repos": [ + "go_default_sdk" + ], + "sdk_types": [ + "remote" + ], + "sdk_versions": [ + "1.21.1" + ] + } + } + } + } + }, "@@rules_kotlin+//src/main/starlark/core/repositories:bzlmod_setup.bzl%rules_kotlin_extensions": { "general": { "bzlTransitiveDigest": "ABI1D/sbS1ovwaW/kHDoj8nnXjQ0oKU9fzmzEG4iT8o=", @@ -876,170 +931,6 @@ } }, "facts": { - "@@rules_go+//go:extensions.bzl%go_sdk": { - "1.25.0": { - "aix_ppc64": [ - "go1.25.0.aix-ppc64.tar.gz", - "e5234a7dac67bc86c528fe9752fc9d63557918627707a733ab4cac1a6faed2d4" - ], - "darwin_amd64": [ - "go1.25.0.darwin-amd64.tar.gz", - "5bd60e823037062c2307c71e8111809865116714d6f6b410597cf5075dfd80ef" - ], - "darwin_arm64": [ - "go1.25.0.darwin-arm64.tar.gz", - "544932844156d8172f7a28f77f2ac9c15a23046698b6243f633b0a0b00c0749c" - ], - "dragonfly_amd64": [ - "go1.25.0.dragonfly-amd64.tar.gz", - "5ed3cf9a810a1483822538674f1336c06b51aa1b94d6d545a1a0319a48177120" - ], - "freebsd_386": [ - "go1.25.0.freebsd-386.tar.gz", - "abea5d5c6697e6b5c224731f2158fe87c602996a2a233ac0c4730cd57bf8374e" - ], - "freebsd_amd64": [ - "go1.25.0.freebsd-amd64.tar.gz", - "86e6fe0a29698d7601c4442052dac48bd58d532c51cccb8f1917df648138730b" - ], - "freebsd_arm": [ - "go1.25.0.freebsd-arm.tar.gz", - "d90b78e41921f72f30e8bbc81d9dec2cff7ff384a33d8d8debb24053e4336bfe" - ], - "freebsd_arm64": [ - "go1.25.0.freebsd-arm64.tar.gz", - "451d0da1affd886bfb291b7c63a6018527b269505db21ce6e14724f22ab0662e" - ], - "freebsd_riscv64": [ - "go1.25.0.freebsd-riscv64.tar.gz", - "7b565f76bd8bda46549eeaaefe0e53b251e644c230577290c0f66b1ecdb3cdbe" - ], - "illumos_amd64": [ - "go1.25.0.illumos-amd64.tar.gz", - "b1e1fdaab1ad25aa1c08d7a36c97d45d74b98b89c3f78c6d2145f77face54a2c" - ], - "linux_386": [ - "go1.25.0.linux-386.tar.gz", - "8c602dd9d99bc9453b3995d20ce4baf382cc50855900a0ece5de9929df4a993a" - ], - "linux_amd64": [ - "go1.25.0.linux-amd64.tar.gz", - "2852af0cb20a13139b3448992e69b868e50ed0f8a1e5940ee1de9e19a123b613" - ], - "linux_arm64": [ - "go1.25.0.linux-arm64.tar.gz", - "05de75d6994a2783699815ee553bd5a9327d8b79991de36e38b66862782f54ae" - ], - "linux_armv6l": [ - "go1.25.0.linux-armv6l.tar.gz", - "a5a8f8198fcf00e1e485b8ecef9ee020778bf32a408a4e8873371bfce458cd09" - ], - "linux_loong64": [ - "go1.25.0.linux-loong64.tar.gz", - "cab86b1cf761b1cb3bac86a8877cfc92e7b036fc0d3084123d77013d61432afc" - ], - "linux_mips": [ - "go1.25.0.linux-mips.tar.gz", - "d66b6fb74c3d91b9829dc95ec10ca1f047ef5e89332152f92e136cf0e2da5be1" - ], - "linux_mips64": [ - "go1.25.0.linux-mips64.tar.gz", - "4082e4381a8661bc2a839ff94ba3daf4f6cde20f8fb771b5b3d4762dc84198a2" - ], - "linux_mips64le": [ - "go1.25.0.linux-mips64le.tar.gz", - "70002c299ec7f7175ac2ef673b1b347eecfa54ae11f34416a6053c17f855afcc" - ], - "linux_mipsle": [ - "go1.25.0.linux-mipsle.tar.gz", - "b00a3a39eff099f6df9f1c7355bf28e4589d0586f42d7d4a394efb763d145a73" - ], - "linux_ppc64": [ - "go1.25.0.linux-ppc64.tar.gz", - "df166f33bd98160662560a72ff0b4ba731f969a80f088922bddcf566a88c1ec1" - ], - "linux_ppc64le": [ - "go1.25.0.linux-ppc64le.tar.gz", - "0f18a89e7576cf2c5fa0b487a1635d9bcbf843df5f110e9982c64df52a983ad0" - ], - "linux_riscv64": [ - "go1.25.0.linux-riscv64.tar.gz", - "c018ff74a2c48d55c8ca9b07c8e24163558ffec8bea08b326d6336905d956b67" - ], - "linux_s390x": [ - "go1.25.0.linux-s390x.tar.gz", - "34e5a2e19f2292fbaf8783e3a241e6e49689276aef6510a8060ea5ef54eee408" - ], - "netbsd_386": [ - "go1.25.0.netbsd-386.tar.gz", - "f8586cdb7aa855657609a5c5f6dbf523efa00c2bbd7c76d3936bec80aa6c0aba" - ], - "netbsd_amd64": [ - "go1.25.0.netbsd-amd64.tar.gz", - "ae8dc1469385b86a157a423bb56304ba45730de8a897615874f57dd096db2c2a" - ], - "netbsd_arm": [ - "go1.25.0.netbsd-arm.tar.gz", - "1ff7e4cc764425fc9dd6825eaee79d02b3c7cafffbb3691687c8d672ade76cb7" - ], - "netbsd_arm64": [ - "go1.25.0.netbsd-arm64.tar.gz", - "e1b310739f26724216aa6d7d7208c4031f9ff54c9b5b9a796ddc8bebcb4a5f16" - ], - "openbsd_386": [ - "go1.25.0.openbsd-386.tar.gz", - "4802a9b20e533da91adb84aab42e94aa56cfe3e5475d0550bed3385b182e69d8" - ], - "openbsd_amd64": [ - "go1.25.0.openbsd-amd64.tar.gz", - "c016cd984bebe317b19a4f297c4f50def120dc9788490540c89f28e42f1dabe1" - ], - "openbsd_arm": [ - "go1.25.0.openbsd-arm.tar.gz", - "a1e31d0bf22172ddde42edf5ec811ef81be43433df0948ece52fecb247ccfd8d" - ], - "openbsd_arm64": [ - "go1.25.0.openbsd-arm64.tar.gz", - "343ea8edd8c218196e15a859c6072d0dd3246fbbb168481ab665eb4c4140458d" - ], - "openbsd_ppc64": [ - "go1.25.0.openbsd-ppc64.tar.gz", - "694c14da1bcaeb5e3332d49bdc2b6d155067648f8fe1540c5de8f3cf8e157154" - ], - "openbsd_riscv64": [ - "go1.25.0.openbsd-riscv64.tar.gz", - "aa510ad25cf54c06cd9c70b6d80ded69cb20188ac6e1735655eef29ff7e7885f" - ], - "plan9_386": [ - "go1.25.0.plan9-386.tar.gz", - "46f8cef02086cf04bf186c5912776b56535178d4cb319cd19c9fdbdd29231986" - ], - "plan9_amd64": [ - "go1.25.0.plan9-amd64.tar.gz", - "29b34391d84095e44608a228f63f2f88113a37b74a79781353ec043dfbcb427b" - ], - "plan9_arm": [ - "go1.25.0.plan9-arm.tar.gz", - "0a047107d13ebe7943aaa6d54b1d7bbd2e45e68ce449b52915a818da715799c2" - ], - "solaris_amd64": [ - "go1.25.0.solaris-amd64.tar.gz", - "9977f9e4351984364a3b2b78f8b88bfd1d339812356d5237678514594b7d3611" - ], - "windows_386": [ - "go1.25.0.windows-386.zip", - "df9f39db82a803af0db639e3613a36681ab7a42866b1384b3f3a1045663961a7" - ], - "windows_amd64": [ - "go1.25.0.windows-amd64.zip", - "89efb4f9b30812eee083cc1770fdd2913c14d301064f6454851428f9707d190b" - ], - "windows_arm64": [ - "go1.25.0.windows-arm64.zip", - "27bab004c72b3d7bd05a69b6ec0fc54a309b4b78cc569dd963d8b3ec28bfdb8c" - ] - } - }, "@@rules_rs+//rs:extensions.bzl%crate": { "Inflector_0.11.4": "{\"dependencies\":[{\"name\":\"lazy_static\",\"optional\":true,\"req\":\"^1.2.0\"},{\"name\":\"regex\",\"optional\":true,\"req\":\"^1.1\"}],\"features\":{\"default\":[\"heavyweight\"],\"heavyweight\":[\"regex\",\"lazy_static\"],\"unstable\":[]}}", "addr2line_0.24.2": "{\"dependencies\":[{\"name\":\"alloc\",\"optional\":true,\"package\":\"rustc-std-workspace-alloc\",\"req\":\"^1.0.0\"},{\"kind\":\"dev\",\"name\":\"backtrace\",\"req\":\"^0.3.13\"},{\"features\":[\"wrap_help\"],\"name\":\"clap\",\"optional\":true,\"req\":\"^4.3.21\"},{\"name\":\"compiler_builtins\",\"optional\":true,\"req\":\"^0.1.2\"},{\"name\":\"core\",\"optional\":true,\"package\":\"rustc-std-workspace-core\",\"req\":\"^1.0.0\"},{\"default_features\":false,\"features\":[\"alloc\"],\"name\":\"cpp_demangle\",\"optional\":true,\"req\":\"^0.4\"},{\"default_features\":false,\"name\":\"fallible-iterator\",\"optional\":true,\"req\":\"^0.3.0\"},{\"kind\":\"dev\",\"name\":\"findshlibs\",\"req\":\"^0.10\"},{\"default_features\":false,\"features\":[\"read\"],\"name\":\"gimli\",\"req\":\"^0.31.1\"},{\"kind\":\"dev\",\"name\":\"libtest-mimic\",\"req\":\"^0.7.2\"},{\"name\":\"memmap2\",\"optional\":true,\"req\":\"^0.9.4\"},{\"default_features\":false,\"features\":[\"read\",\"compression\"],\"name\":\"object\",\"optional\":true,\"req\":\"^0.36.0\"},{\"name\":\"rustc-demangle\",\"optional\":true,\"req\":\"^0.1\"},{\"default_features\":false,\"name\":\"smallvec\",\"optional\":true,\"req\":\"^1\"},{\"name\":\"typed-arena\",\"optional\":true,\"req\":\"^2\"}],\"features\":{\"all\":[\"bin\"],\"bin\":[\"loader\",\"rustc-demangle\",\"cpp_demangle\",\"fallible-iterator\",\"smallvec\",\"dep:clap\"],\"cargo-all\":[],\"default\":[\"rustc-demangle\",\"cpp_demangle\",\"loader\",\"fallible-iterator\",\"smallvec\"],\"loader\":[\"std\",\"dep:object\",\"dep:memmap2\",\"dep:typed-arena\"],\"rustc-dep-of-std\":[\"core\",\"alloc\",\"compiler_builtins\",\"gimli/rustc-dep-of-std\"],\"std\":[\"gimli/std\"]}}", @@ -1207,6 +1098,7 @@ "hyper-tls_0.6.0": "{\"dependencies\":[{\"name\":\"bytes\",\"req\":\"^1\"},{\"name\":\"http-body-util\",\"req\":\"^0.1.0\"},{\"name\":\"hyper\",\"req\":\"^1\"},{\"features\":[\"client-legacy\",\"tokio\"],\"name\":\"hyper-util\",\"req\":\"^0.1.0\"},{\"features\":[\"http1\"],\"kind\":\"dev\",\"name\":\"hyper-util\",\"req\":\"^0.1.0\"},{\"name\":\"native-tls\",\"req\":\"^0.2.1\"},{\"name\":\"tokio\",\"req\":\"^1\"},{\"features\":[\"io-std\",\"macros\",\"io-util\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.0.0\"},{\"name\":\"tokio-native-tls\",\"req\":\"^0.3\"},{\"name\":\"tower-service\",\"req\":\"^0.3\"}],\"features\":{\"alpn\":[\"native-tls/alpn\"],\"vendored\":[\"native-tls/vendored\"]}}", "hyper-util_0.1.16": "{\"dependencies\":[{\"name\":\"base64\",\"optional\":true,\"req\":\"^0.22\"},{\"name\":\"bytes\",\"req\":\"^1.7.1\"},{\"kind\":\"dev\",\"name\":\"bytes\",\"req\":\"^1\"},{\"name\":\"futures-channel\",\"optional\":true,\"req\":\"^0.3\"},{\"name\":\"futures-core\",\"req\":\"^0.3\"},{\"default_features\":false,\"name\":\"futures-util\",\"optional\":true,\"req\":\"^0.3.16\"},{\"default_features\":false,\"features\":[\"alloc\"],\"kind\":\"dev\",\"name\":\"futures-util\",\"req\":\"^0.3.16\"},{\"name\":\"http\",\"req\":\"^1.0\"},{\"name\":\"http-body\",\"req\":\"^1.0.0\"},{\"kind\":\"dev\",\"name\":\"http-body-util\",\"req\":\"^0.1.0\"},{\"name\":\"hyper\",\"req\":\"^1.6.0\"},{\"features\":[\"full\"],\"kind\":\"dev\",\"name\":\"hyper\",\"req\":\"^1.4.0\"},{\"name\":\"ipnet\",\"optional\":true,\"req\":\"^2.9\"},{\"name\":\"libc\",\"optional\":true,\"req\":\"^0.2\"},{\"name\":\"percent-encoding\",\"optional\":true,\"req\":\"^2.3\"},{\"name\":\"pin-project-lite\",\"req\":\"^0.2.4\"},{\"kind\":\"dev\",\"name\":\"pnet_datalink\",\"req\":\"^0.35.0\",\"target\":\"cfg(any(target_os = \\\"linux\\\", target_os = \\\"macos\\\"))\"},{\"kind\":\"dev\",\"name\":\"pretty_env_logger\",\"req\":\"^0.5\"},{\"features\":[\"all\"],\"name\":\"socket2\",\"optional\":true,\"req\":\">=0.5.9, <0.7\"},{\"name\":\"system-configuration\",\"optional\":true,\"req\":\"^0.6.1\",\"target\":\"cfg(target_os = \\\"macos\\\")\"},{\"default_features\":false,\"name\":\"tokio\",\"optional\":true,\"req\":\"^1\"},{\"features\":[\"macros\",\"test-util\",\"signal\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"tokio-test\",\"req\":\"^0.4\"},{\"name\":\"tower-service\",\"optional\":true,\"req\":\"^0.3\"},{\"default_features\":false,\"features\":[\"std\"],\"name\":\"tracing\",\"optional\":true,\"req\":\"^0.1\"},{\"name\":\"windows-registry\",\"optional\":true,\"req\":\"^0.5\",\"target\":\"cfg(windows)\"}],\"features\":{\"__internal_happy_eyeballs_tests\":[],\"client\":[\"hyper/client\",\"tokio/net\",\"dep:tracing\",\"dep:futures-channel\",\"dep:tower-service\"],\"client-legacy\":[\"client\",\"dep:socket2\",\"tokio/sync\",\"dep:libc\",\"dep:futures-util\"],\"client-proxy\":[\"client\",\"dep:base64\",\"dep:ipnet\",\"dep:percent-encoding\"],\"client-proxy-system\":[\"dep:system-configuration\",\"dep:windows-registry\"],\"default\":[],\"full\":[\"client\",\"client-legacy\",\"client-proxy\",\"client-proxy-system\",\"server\",\"server-auto\",\"server-graceful\",\"service\",\"http1\",\"http2\",\"tokio\",\"tracing\"],\"http1\":[\"hyper/http1\"],\"http2\":[\"hyper/http2\"],\"server\":[\"hyper/server\"],\"server-auto\":[\"server\",\"http1\",\"http2\"],\"server-graceful\":[\"server\",\"tokio/sync\"],\"service\":[\"dep:tower-service\"],\"tokio\":[\"dep:tokio\",\"tokio/rt\",\"tokio/time\"],\"tracing\":[\"dep:tracing\"]}}", "hyper_1.7.0": "{\"dependencies\":[{\"name\":\"atomic-waker\",\"optional\":true,\"req\":\"^1.1.2\"},{\"name\":\"bytes\",\"req\":\"^1.2\"},{\"kind\":\"dev\",\"name\":\"form_urlencoded\",\"req\":\"^1\"},{\"name\":\"futures-channel\",\"optional\":true,\"req\":\"^0.3\"},{\"features\":[\"sink\"],\"kind\":\"dev\",\"name\":\"futures-channel\",\"req\":\"^0.3\"},{\"name\":\"futures-core\",\"optional\":true,\"req\":\"^0.3.31\"},{\"default_features\":false,\"features\":[\"alloc\"],\"name\":\"futures-util\",\"optional\":true,\"req\":\"^0.3\"},{\"default_features\":false,\"features\":[\"alloc\",\"sink\"],\"kind\":\"dev\",\"name\":\"futures-util\",\"req\":\"^0.3\"},{\"name\":\"h2\",\"optional\":true,\"req\":\"^0.4.2\"},{\"name\":\"http\",\"req\":\"^1\"},{\"name\":\"http-body\",\"req\":\"^1\"},{\"name\":\"http-body-util\",\"optional\":true,\"req\":\"^0.1\"},{\"kind\":\"dev\",\"name\":\"http-body-util\",\"req\":\"^0.1\"},{\"name\":\"httparse\",\"optional\":true,\"req\":\"^1.9\"},{\"name\":\"httpdate\",\"optional\":true,\"req\":\"^1.0\"},{\"name\":\"itoa\",\"optional\":true,\"req\":\"^1\"},{\"name\":\"pin-project-lite\",\"optional\":true,\"req\":\"^0.2.4\"},{\"kind\":\"dev\",\"name\":\"pin-project-lite\",\"req\":\"^0.2.4\"},{\"name\":\"pin-utils\",\"optional\":true,\"req\":\"^0.1\"},{\"kind\":\"dev\",\"name\":\"pretty_env_logger\",\"req\":\"^0.5\"},{\"features\":[\"derive\"],\"kind\":\"dev\",\"name\":\"serde\",\"req\":\"^1.0\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1.0\"},{\"features\":[\"const_generics\",\"const_new\"],\"name\":\"smallvec\",\"optional\":true,\"req\":\"^1.12\"},{\"kind\":\"dev\",\"name\":\"spmc\",\"req\":\"^0.3\"},{\"features\":[\"sync\"],\"name\":\"tokio\",\"req\":\"^1\"},{\"features\":[\"fs\",\"macros\",\"net\",\"io-std\",\"io-util\",\"rt\",\"rt-multi-thread\",\"sync\",\"time\",\"test-util\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"tokio-test\",\"req\":\"^0.4\"},{\"kind\":\"dev\",\"name\":\"tokio-util\",\"req\":\"^0.7.10\"},{\"default_features\":false,\"features\":[\"std\"],\"name\":\"tracing\",\"optional\":true,\"req\":\"^0.1\"},{\"name\":\"want\",\"optional\":true,\"req\":\"^0.3\"}],\"features\":{\"capi\":[],\"client\":[\"dep:want\",\"dep:pin-project-lite\",\"dep:smallvec\"],\"default\":[],\"ffi\":[\"dep:http-body-util\",\"dep:futures-util\"],\"full\":[\"client\",\"http1\",\"http2\",\"server\"],\"http1\":[\"dep:atomic-waker\",\"dep:futures-channel\",\"dep:futures-core\",\"dep:httparse\",\"dep:itoa\",\"dep:pin-utils\"],\"http2\":[\"dep:futures-channel\",\"dep:futures-core\",\"dep:h2\"],\"nightly\":[],\"server\":[\"dep:httpdate\",\"dep:pin-project-lite\",\"dep:smallvec\"],\"tracing\":[\"dep:tracing\"]}}", + "hyperlocal_0.9.1": "{\"dependencies\":[{\"name\":\"hex\",\"req\":\"^0.4\"},{\"name\":\"http-body-util\",\"optional\":true,\"req\":\"^0.1\"},{\"name\":\"hyper\",\"req\":\"^1.3\"},{\"name\":\"hyper-util\",\"optional\":true,\"req\":\"^0.1.2\"},{\"name\":\"pin-project-lite\",\"req\":\"^0.2\"},{\"kind\":\"dev\",\"name\":\"thiserror\",\"req\":\"^1.0\"},{\"default_features\":false,\"features\":[\"net\"],\"name\":\"tokio\",\"req\":\"^1.35\"},{\"features\":[\"io-std\",\"io-util\",\"macros\",\"rt-multi-thread\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.35\"},{\"name\":\"tower-service\",\"optional\":true,\"req\":\"^0.3\"}],\"features\":{\"client\":[\"http-body-util\",\"hyper/client\",\"hyper/http1\",\"hyper-util/client-legacy\",\"hyper-util/http1\",\"hyper-util/tokio\",\"tower-service\"],\"default\":[\"client\",\"server\"],\"server\":[\"hyper/http1\",\"hyper/server\",\"hyper-util/tokio\"]}}", "iana-time-zone-haiku_0.1.2": "{\"dependencies\":[{\"kind\":\"build\",\"name\":\"cc\",\"req\":\"^1.0.79\"}],\"features\":{}}", "iana-time-zone_0.1.63": "{\"dependencies\":[{\"name\":\"android_system_properties\",\"req\":\"^0.1.5\",\"target\":\"cfg(target_os = \\\"android\\\")\"},{\"kind\":\"dev\",\"name\":\"chrono-tz\",\"req\":\"^0.10.1\"},{\"name\":\"core-foundation-sys\",\"req\":\"^0.8.6\",\"target\":\"cfg(target_vendor = \\\"apple\\\")\"},{\"kind\":\"dev\",\"name\":\"getrandom\",\"req\":\"^0.2.1\"},{\"features\":[\"js\"],\"kind\":\"dev\",\"name\":\"getrandom\",\"req\":\"^0.2.1\",\"target\":\"cfg(all(target_arch = \\\"wasm32\\\", target_os = \\\"unknown\\\"))\"},{\"name\":\"iana-time-zone-haiku\",\"req\":\"^0.1.1\",\"target\":\"cfg(target_os = \\\"haiku\\\")\"},{\"name\":\"js-sys\",\"req\":\"^0.3.66\",\"target\":\"cfg(all(target_arch = \\\"wasm32\\\", target_os = \\\"unknown\\\"))\"},{\"name\":\"log\",\"req\":\"^0.4.14\",\"target\":\"cfg(all(target_arch = \\\"wasm32\\\", target_os = \\\"unknown\\\"))\"},{\"name\":\"wasm-bindgen\",\"req\":\"^0.2.89\",\"target\":\"cfg(all(target_arch = \\\"wasm32\\\", target_os = \\\"unknown\\\"))\"},{\"kind\":\"dev\",\"name\":\"wasm-bindgen-test\",\"req\":\"^0.3.46\",\"target\":\"cfg(all(target_arch = \\\"wasm32\\\", target_os = \\\"unknown\\\"))\"},{\"name\":\"windows-core\",\"req\":\">=0.56, <=0.61\",\"target\":\"cfg(target_os = \\\"windows\\\")\"}],\"features\":{\"fallback\":[]}}", "icu_collections_2.0.0": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"criterion\",\"req\":\"^0.5.0\",\"target\":\"cfg(not(target_arch = \\\"wasm32\\\"))\"},{\"default_features\":false,\"features\":[\"derive\"],\"name\":\"databake\",\"optional\":true,\"req\":\"^0.2.0\"},{\"default_features\":false,\"name\":\"displaydoc\",\"req\":\"^0.2.3\"},{\"kind\":\"dev\",\"name\":\"iai\",\"req\":\"^0.1.1\"},{\"default_features\":false,\"features\":[\"alloc\"],\"kind\":\"dev\",\"name\":\"postcard\",\"req\":\"^1.0.3\"},{\"default_features\":false,\"features\":[\"zerovec\"],\"name\":\"potential_utf\",\"req\":\"^0.1.1\"},{\"default_features\":false,\"features\":[\"derive\",\"alloc\"],\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0.110\"},{\"default_features\":false,\"features\":[\"derive\"],\"kind\":\"dev\",\"name\":\"serde\",\"req\":\"^1.0.110\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1.0.45\"},{\"default_features\":false,\"features\":[\"parse\"],\"kind\":\"dev\",\"name\":\"toml\",\"req\":\"^0.8.0\"},{\"default_features\":false,\"features\":[\"derive\"],\"name\":\"yoke\",\"req\":\"^0.8.0\"},{\"default_features\":false,\"features\":[\"derive\"],\"name\":\"zerofrom\",\"req\":\"^0.1.3\"},{\"default_features\":false,\"features\":[\"derive\",\"yoke\"],\"name\":\"zerovec\",\"req\":\"^0.11.1\"}],\"features\":{\"alloc\":[\"zerovec/alloc\"],\"databake\":[\"dep:databake\",\"zerovec/databake\"],\"serde\":[\"dep:serde\",\"zerovec/serde\",\"potential_utf/serde\",\"alloc\"]}}", @@ -1391,12 +1283,12 @@ "security-framework_2.11.1": "{\"dependencies\":[{\"name\":\"bitflags\",\"req\":\"^2.6\"},{\"name\":\"core-foundation\",\"req\":\"^0.9.4\"},{\"name\":\"core-foundation-sys\",\"req\":\"^0.8.6\"},{\"kind\":\"dev\",\"name\":\"env_logger\",\"req\":\"^0.10\"},{\"kind\":\"dev\",\"name\":\"hex\",\"req\":\"^0.4.3\"},{\"name\":\"libc\",\"req\":\"^0.2.139\"},{\"name\":\"log\",\"optional\":true,\"req\":\"^0.4.20\"},{\"name\":\"num-bigint\",\"optional\":true,\"req\":\"^0.4.6\"},{\"default_features\":false,\"name\":\"security-framework-sys\",\"req\":\"^2.11.1\"},{\"kind\":\"dev\",\"name\":\"tempfile\",\"req\":\"^3.3.0\"},{\"kind\":\"dev\",\"name\":\"time\",\"req\":\"^0.3.17\"},{\"kind\":\"dev\",\"name\":\"x509-parser\",\"req\":\"^0.16\"}],\"features\":{\"OSX_10_10\":[\"OSX_10_9\",\"security-framework-sys/OSX_10_10\"],\"OSX_10_11\":[\"OSX_10_10\",\"security-framework-sys/OSX_10_11\"],\"OSX_10_12\":[\"OSX_10_11\",\"security-framework-sys/OSX_10_12\"],\"OSX_10_13\":[\"OSX_10_12\",\"security-framework-sys/OSX_10_13\",\"alpn\",\"session-tickets\",\"serial-number-bigint\"],\"OSX_10_14\":[\"OSX_10_13\",\"security-framework-sys/OSX_10_14\"],\"OSX_10_15\":[\"OSX_10_14\",\"security-framework-sys/OSX_10_15\"],\"OSX_10_9\":[\"security-framework-sys/OSX_10_9\"],\"alpn\":[],\"default\":[\"OSX_10_12\"],\"job-bless\":[],\"nightly\":[],\"serial-number-bigint\":[\"dep:num-bigint\"],\"session-tickets\":[]}}", "security-framework_3.3.0": "{\"dependencies\":[{\"name\":\"bitflags\",\"req\":\"^2.6\"},{\"name\":\"core-foundation\",\"req\":\"^0.10\"},{\"name\":\"core-foundation-sys\",\"req\":\"^0.8.6\"},{\"kind\":\"dev\",\"name\":\"env_logger\",\"req\":\"^0.10\"},{\"kind\":\"dev\",\"name\":\"hex\",\"req\":\"^0.4.3\"},{\"name\":\"libc\",\"req\":\"^0.2.139\"},{\"name\":\"log\",\"optional\":true,\"req\":\"^0.4.20\"},{\"default_features\":false,\"name\":\"security-framework-sys\",\"req\":\"^2.14\"},{\"kind\":\"dev\",\"name\":\"tempfile\",\"req\":\"^3.12.0\"},{\"kind\":\"dev\",\"name\":\"time\",\"req\":\"^0.3.23\"},{\"kind\":\"dev\",\"name\":\"x509-parser\",\"req\":\"^0.16\"}],\"features\":{\"OSX_10_12\":[\"security-framework-sys/OSX_10_12\"],\"OSX_10_13\":[\"OSX_10_12\",\"security-framework-sys/OSX_10_13\",\"alpn\",\"session-tickets\"],\"OSX_10_14\":[\"OSX_10_13\",\"security-framework-sys/OSX_10_14\"],\"OSX_10_15\":[\"OSX_10_14\",\"security-framework-sys/OSX_10_15\"],\"alpn\":[],\"default\":[\"OSX_10_12\"],\"job-bless\":[],\"nightly\":[],\"session-tickets\":[],\"sync-keychain\":[\"OSX_10_13\"]}}", "seize_0.5.1": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"criterion\",\"req\":\"^0.7.0\"},{\"kind\":\"dev\",\"name\":\"crossbeam-epoch\",\"req\":\"^0.9.8\"},{\"kind\":\"dev\",\"name\":\"haphazard\",\"req\":\"^0.1.8\"},{\"name\":\"libc\",\"optional\":true,\"req\":\"^0.2\"},{\"features\":[\"Win32_System_Threading\"],\"name\":\"windows-sys\",\"optional\":true,\"req\":\">=0.52, <=0.61\",\"target\":\"cfg(windows)\"}],\"features\":{\"default\":[\"fast-barrier\"],\"fast-barrier\":[\"windows-sys\",\"libc\"]}}", + "semver_1.0.27": "{\"dependencies\":[{\"default_features\":false,\"name\":\"serde\",\"optional\":true,\"package\":\"serde_core\",\"req\":\"^1.0.220\"},{\"default_features\":false,\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0.220\",\"target\":\"cfg(any())\"}],\"features\":{\"default\":[\"std\"],\"serde\":[\"dep:serde\"],\"std\":[]}}", "serde_1.0.227": "{\"dependencies\":[{\"default_features\":false,\"features\":[\"result\"],\"name\":\"serde_core\",\"req\":\"=1.0.227\"},{\"name\":\"serde_derive\",\"optional\":true,\"req\":\"^1\"}],\"features\":{\"alloc\":[\"serde_core/alloc\"],\"default\":[\"std\"],\"derive\":[\"serde_derive\"],\"rc\":[\"serde_core/rc\"],\"std\":[\"serde_core/std\"],\"unstable\":[\"serde_core/unstable\"]}}", "serde_core_1.0.227": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"serde\",\"req\":\"^1\"},{\"name\":\"serde_derive\",\"req\":\"=1.0.227\",\"target\":\"cfg(any())\"},{\"kind\":\"dev\",\"name\":\"serde_derive\",\"req\":\"^1\"}],\"features\":{\"alloc\":[],\"default\":[\"std\",\"result\"],\"rc\":[],\"result\":[],\"std\":[],\"unstable\":[]}}", "serde_derive_1.0.227": "{\"dependencies\":[{\"default_features\":false,\"features\":[\"proc-macro\"],\"name\":\"proc-macro2\",\"req\":\"^1.0.74\"},{\"default_features\":false,\"features\":[\"proc-macro\"],\"name\":\"quote\",\"req\":\"^1.0.35\"},{\"kind\":\"dev\",\"name\":\"serde\",\"req\":\"^1\"},{\"default_features\":false,\"features\":[\"clone-impls\",\"derive\",\"parsing\",\"printing\",\"proc-macro\"],\"name\":\"syn\",\"req\":\"^2.0.81\"}],\"features\":{\"default\":[],\"deserialize_in_place\":[]}}", "serde_json_1.0.145": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"automod\",\"req\":\"^1.0.11\"},{\"name\":\"indexmap\",\"optional\":true,\"req\":\"^2.2.3\"},{\"kind\":\"dev\",\"name\":\"indoc\",\"req\":\"^2.0.2\"},{\"name\":\"itoa\",\"req\":\"^1.0\"},{\"default_features\":false,\"name\":\"memchr\",\"req\":\"^2\"},{\"kind\":\"dev\",\"name\":\"ref-cast\",\"req\":\"^1.0.18\"},{\"kind\":\"dev\",\"name\":\"rustversion\",\"req\":\"^1.0.13\"},{\"name\":\"ryu\",\"req\":\"^1.0\"},{\"default_features\":false,\"name\":\"serde\",\"req\":\"^1.0.220\",\"target\":\"cfg(any())\"},{\"features\":[\"derive\"],\"kind\":\"dev\",\"name\":\"serde\",\"req\":\"^1.0.194\"},{\"kind\":\"dev\",\"name\":\"serde_bytes\",\"req\":\"^0.11.10\"},{\"default_features\":false,\"name\":\"serde_core\",\"req\":\"^1.0.220\"},{\"kind\":\"dev\",\"name\":\"serde_derive\",\"req\":\"^1.0.166\"},{\"kind\":\"dev\",\"name\":\"serde_stacker\",\"req\":\"^0.1.8\"},{\"features\":[\"diff\"],\"kind\":\"dev\",\"name\":\"trybuild\",\"req\":\"^1.0.108\"}],\"features\":{\"alloc\":[\"serde_core/alloc\"],\"arbitrary_precision\":[],\"default\":[\"std\"],\"float_roundtrip\":[],\"preserve_order\":[\"indexmap\",\"std\"],\"raw_value\":[],\"std\":[\"memchr/std\",\"serde_core/std\"],\"unbounded_depth\":[]}}", "serde_repr_0.1.20": "{\"dependencies\":[{\"name\":\"proc-macro2\",\"req\":\"^1.0.74\"},{\"name\":\"quote\",\"req\":\"^1.0.35\"},{\"kind\":\"dev\",\"name\":\"rustversion\",\"req\":\"^1.0.13\"},{\"kind\":\"dev\",\"name\":\"serde\",\"req\":\"^1.0.166\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1.0.100\"},{\"name\":\"syn\",\"req\":\"^2.0.46\"},{\"features\":[\"diff\"],\"kind\":\"dev\",\"name\":\"trybuild\",\"req\":\"^1.0.81\"}],\"features\":{}}", - "serde_spanned_0.6.9": "{\"dependencies\":[{\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0.145\"},{\"kind\":\"dev\",\"name\":\"serde\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"serde-untagged\",\"req\":\"^0.1\"},{\"kind\":\"dev\",\"name\":\"serde_derive\",\"req\":\"^1\"}],\"features\":{}}", "serde_urlencoded_0.7.1": "{\"dependencies\":[{\"name\":\"form_urlencoded\",\"req\":\"^1\"},{\"name\":\"itoa\",\"req\":\"^1\"},{\"name\":\"ryu\",\"req\":\"^1\"},{\"name\":\"serde\",\"req\":\"^1.0.69\"},{\"kind\":\"dev\",\"name\":\"serde_derive\",\"req\":\"^1\"}],\"features\":{}}", "serial2_0.2.33": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"assert2\",\"req\":\"^0.3.11\"},{\"name\":\"cfg-if\",\"req\":\"^1.0.0\",\"target\":\"cfg(unix)\"},{\"name\":\"libc\",\"req\":\"^0.2.109\",\"target\":\"cfg(unix)\"},{\"features\":[\"derive\"],\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1.0.108\"},{\"features\":[\"commapi\",\"fileapi\",\"handleapi\",\"ioapiset\",\"std\",\"synchapi\",\"winbase\",\"winerror\",\"winreg\"],\"name\":\"winapi\",\"req\":\"^0.3.9\",\"target\":\"cfg(windows)\"}],\"features\":{\"doc\":[],\"doc-cfg\":[],\"rs4xx\":[],\"serde\":[\"dep:serde\"],\"unix\":[],\"windows\":[]}}", "sha-1_0.10.1": "{\"dependencies\":[{\"name\":\"cfg-if\",\"req\":\"^1.0\"},{\"name\":\"cpufeatures\",\"req\":\"^0.2\",\"target\":\"cfg(any(target_arch = \\\"aarch64\\\", target_arch = \\\"x86\\\", target_arch = \\\"x86_64\\\"))\"},{\"name\":\"digest\",\"req\":\"^0.10.4\"},{\"features\":[\"dev\"],\"kind\":\"dev\",\"name\":\"digest\",\"req\":\"^0.10.4\"},{\"kind\":\"dev\",\"name\":\"hex-literal\",\"req\":\"^0.2.2\"},{\"name\":\"sha1-asm\",\"optional\":true,\"req\":\"^0.5\",\"target\":\"cfg(any(target_arch = \\\"aarch64\\\", target_arch = \\\"x86\\\", target_arch = \\\"x86_64\\\"))\"}],\"features\":{\"asm\":[\"sha1-asm\"],\"compress\":[],\"default\":[\"std\"],\"force-soft\":[],\"oid\":[\"digest/oid\"],\"std\":[\"digest/std\"]}}", @@ -1466,10 +1358,6 @@ "tokio-stream_0.1.17": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"async-stream\",\"req\":\"^0.3\"},{\"default_features\":false,\"kind\":\"dev\",\"name\":\"futures\",\"req\":\"^0.3\"},{\"name\":\"futures-core\",\"req\":\"^0.3.0\"},{\"kind\":\"dev\",\"name\":\"parking_lot\",\"req\":\"^0.12.0\"},{\"name\":\"pin-project-lite\",\"req\":\"^0.2.11\"},{\"features\":[\"sync\"],\"name\":\"tokio\",\"req\":\"^1.15.0\"},{\"features\":[\"full\",\"test-util\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.2.0\"},{\"kind\":\"dev\",\"name\":\"tokio-test\",\"req\":\"^0.4\"},{\"name\":\"tokio-util\",\"optional\":true,\"req\":\"^0.7.0\"}],\"features\":{\"default\":[\"time\"],\"fs\":[\"tokio/fs\"],\"full\":[\"time\",\"net\",\"io-util\",\"fs\",\"sync\",\"signal\"],\"io-util\":[\"tokio/io-util\"],\"net\":[\"tokio/net\"],\"signal\":[\"tokio/signal\"],\"sync\":[\"tokio/sync\",\"tokio-util\"],\"time\":[\"tokio/time\"]}}", "tokio-util_0.7.16": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"async-stream\",\"req\":\"^0.3.0\"},{\"name\":\"bytes\",\"req\":\"^1.5.0\"},{\"kind\":\"dev\",\"name\":\"futures\",\"req\":\"^0.3.0\"},{\"name\":\"futures-core\",\"req\":\"^0.3.0\"},{\"name\":\"futures-io\",\"optional\":true,\"req\":\"^0.3.0\"},{\"name\":\"futures-sink\",\"req\":\"^0.3.0\"},{\"kind\":\"dev\",\"name\":\"futures-test\",\"req\":\"^0.3.5\"},{\"name\":\"futures-util\",\"optional\":true,\"req\":\"^0.3.0\"},{\"default_features\":false,\"name\":\"hashbrown\",\"optional\":true,\"req\":\"^0.15.0\"},{\"kind\":\"dev\",\"name\":\"parking_lot\",\"req\":\"^0.12.0\"},{\"name\":\"pin-project-lite\",\"req\":\"^0.2.11\"},{\"name\":\"slab\",\"optional\":true,\"req\":\"^0.4.4\"},{\"kind\":\"dev\",\"name\":\"tempfile\",\"req\":\"^3.1.0\"},{\"features\":[\"sync\"],\"name\":\"tokio\",\"req\":\"^1.28.0\"},{\"features\":[\"full\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1.0.0\"},{\"kind\":\"dev\",\"name\":\"tokio-stream\",\"req\":\"^0.1\"},{\"kind\":\"dev\",\"name\":\"tokio-test\",\"req\":\"^0.4.0\"},{\"default_features\":false,\"features\":[\"std\"],\"name\":\"tracing\",\"optional\":true,\"req\":\"^0.1.29\"}],\"features\":{\"__docs_rs\":[\"futures-util\"],\"codec\":[],\"compat\":[\"futures-io\"],\"default\":[],\"full\":[\"codec\",\"compat\",\"io-util\",\"time\",\"net\",\"rt\",\"join-map\"],\"io\":[],\"io-util\":[\"io\",\"tokio/rt\",\"tokio/io-util\"],\"join-map\":[\"rt\",\"hashbrown\"],\"net\":[\"tokio/net\"],\"rt\":[\"tokio/rt\",\"tokio/sync\",\"futures-util\"],\"time\":[\"tokio/time\",\"slab\"]}}", "tokio_1.47.1": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"async-stream\",\"req\":\"^0.3\"},{\"name\":\"backtrace\",\"req\":\"^0.3.58\",\"target\":\"cfg(tokio_taskdump)\"},{\"name\":\"bytes\",\"optional\":true,\"req\":\"^1.2.1\"},{\"features\":[\"async-await\"],\"kind\":\"dev\",\"name\":\"futures\",\"req\":\"^0.3.0\"},{\"kind\":\"dev\",\"name\":\"futures-concurrency\",\"req\":\"^7.6.3\"},{\"default_features\":false,\"name\":\"io-uring\",\"req\":\"^0.7.6\",\"target\":\"cfg(all(tokio_uring, target_os = \\\"linux\\\"))\"},{\"name\":\"libc\",\"req\":\"^0.2.168\",\"target\":\"cfg(all(tokio_uring, target_os = \\\"linux\\\"))\"},{\"name\":\"libc\",\"optional\":true,\"req\":\"^0.2.168\",\"target\":\"cfg(unix)\"},{\"kind\":\"dev\",\"name\":\"libc\",\"req\":\"^0.2.168\",\"target\":\"cfg(unix)\"},{\"features\":[\"futures\",\"checkpoint\"],\"kind\":\"dev\",\"name\":\"loom\",\"req\":\"^0.7\",\"target\":\"cfg(loom)\"},{\"default_features\":false,\"features\":[\"os-poll\",\"os-ext\"],\"name\":\"mio\",\"req\":\"^1.0.1\",\"target\":\"cfg(all(tokio_uring, target_os = \\\"linux\\\"))\"},{\"default_features\":false,\"name\":\"mio\",\"optional\":true,\"req\":\"^1.0.1\"},{\"features\":[\"tokio\"],\"kind\":\"dev\",\"name\":\"mio-aio\",\"req\":\"^1\",\"target\":\"cfg(target_os = \\\"freebsd\\\")\"},{\"kind\":\"dev\",\"name\":\"mockall\",\"req\":\"^0.13.0\"},{\"default_features\":false,\"features\":[\"aio\",\"fs\",\"socket\"],\"kind\":\"dev\",\"name\":\"nix\",\"req\":\"^0.29.0\",\"target\":\"cfg(unix)\"},{\"name\":\"parking_lot\",\"optional\":true,\"req\":\"^0.12.0\"},{\"name\":\"pin-project-lite\",\"req\":\"^0.2.11\"},{\"kind\":\"dev\",\"name\":\"proptest\",\"req\":\"^1\",\"target\":\"cfg(not(target_family = \\\"wasm\\\"))\"},{\"kind\":\"dev\",\"name\":\"rand\",\"req\":\"^0.9\",\"target\":\"cfg(not(all(target_family = \\\"wasm\\\", target_os = \\\"unknown\\\")))\"},{\"name\":\"signal-hook-registry\",\"optional\":true,\"req\":\"^1.1.1\",\"target\":\"cfg(unix)\"},{\"name\":\"slab\",\"req\":\"^0.4.9\",\"target\":\"cfg(all(tokio_uring, target_os = \\\"linux\\\"))\"},{\"features\":[\"all\"],\"name\":\"socket2\",\"optional\":true,\"req\":\"^0.6.0\",\"target\":\"cfg(not(target_family = \\\"wasm\\\"))\"},{\"kind\":\"dev\",\"name\":\"socket2\",\"req\":\"^0.6.0\",\"target\":\"cfg(not(target_family = \\\"wasm\\\"))\"},{\"kind\":\"dev\",\"name\":\"tempfile\",\"req\":\"^3.1.0\",\"target\":\"cfg(not(target_family = \\\"wasm\\\"))\"},{\"name\":\"tokio-macros\",\"optional\":true,\"req\":\"~2.5.0\"},{\"kind\":\"dev\",\"name\":\"tokio-stream\",\"req\":\"^0.1\"},{\"kind\":\"dev\",\"name\":\"tokio-test\",\"req\":\"^0.4.0\"},{\"default_features\":false,\"features\":[\"std\"],\"name\":\"tracing\",\"optional\":true,\"req\":\"^0.1.29\",\"target\":\"cfg(tokio_unstable)\"},{\"kind\":\"dev\",\"name\":\"tracing-mock\",\"req\":\"=0.1.0-beta.1\",\"target\":\"cfg(all(tokio_unstable, target_has_atomic = \\\"64\\\"))\"},{\"kind\":\"dev\",\"name\":\"wasm-bindgen-test\",\"req\":\"^0.3.0\",\"target\":\"cfg(all(target_family = \\\"wasm\\\", not(target_os = \\\"wasi\\\")))\"},{\"name\":\"windows-sys\",\"optional\":true,\"req\":\"^0.59\",\"target\":\"cfg(windows)\"},{\"features\":[\"Win32_Foundation\",\"Win32_Security_Authorization\"],\"kind\":\"dev\",\"name\":\"windows-sys\",\"req\":\"^0.59\",\"target\":\"cfg(windows)\"}],\"features\":{\"default\":[],\"fs\":[],\"full\":[\"fs\",\"io-util\",\"io-std\",\"macros\",\"net\",\"parking_lot\",\"process\",\"rt\",\"rt-multi-thread\",\"signal\",\"sync\",\"time\"],\"io-std\":[],\"io-util\":[\"bytes\"],\"macros\":[\"tokio-macros\"],\"net\":[\"libc\",\"mio/os-poll\",\"mio/os-ext\",\"mio/net\",\"socket2\",\"windows-sys/Win32_Foundation\",\"windows-sys/Win32_Security\",\"windows-sys/Win32_Storage_FileSystem\",\"windows-sys/Win32_System_Pipes\",\"windows-sys/Win32_System_SystemServices\"],\"process\":[\"bytes\",\"libc\",\"mio/os-poll\",\"mio/os-ext\",\"mio/net\",\"signal-hook-registry\",\"windows-sys/Win32_Foundation\",\"windows-sys/Win32_System_Threading\",\"windows-sys/Win32_System_WindowsProgramming\"],\"rt\":[],\"rt-multi-thread\":[\"rt\"],\"signal\":[\"libc\",\"mio/os-poll\",\"mio/net\",\"mio/os-ext\",\"signal-hook-registry\",\"windows-sys/Win32_Foundation\",\"windows-sys/Win32_System_Console\"],\"sync\":[],\"test-util\":[\"rt\",\"sync\",\"time\"],\"time\":[]}}", - "toml_0.8.23": "{\"dependencies\":[{\"name\":\"indexmap\",\"optional\":true,\"req\":\"^2.0.0\"},{\"name\":\"serde\",\"req\":\"^1.0.145\"},{\"features\":[\"derive\"],\"kind\":\"dev\",\"name\":\"serde\",\"req\":\"^1.0.199\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1.0.116\"},{\"features\":[\"serde\"],\"name\":\"serde_spanned\",\"req\":\"^0.6.9\"},{\"kind\":\"dev\",\"name\":\"snapbox\",\"req\":\"^0.6.0\"},{\"kind\":\"dev\",\"name\":\"toml-test-data\",\"req\":\"^2.3.0\"},{\"features\":[\"snapshot\"],\"kind\":\"dev\",\"name\":\"toml-test-harness\",\"req\":\"^1.3.2\"},{\"features\":[\"serde\"],\"name\":\"toml_datetime\",\"req\":\"^0.6.11\"},{\"default_features\":false,\"features\":[\"serde\"],\"name\":\"toml_edit\",\"optional\":true,\"req\":\"^0.22.27\"},{\"kind\":\"dev\",\"name\":\"walkdir\",\"req\":\"^2.5.0\"}],\"features\":{\"default\":[\"parse\",\"display\"],\"display\":[\"dep:toml_edit\",\"toml_edit?/display\"],\"parse\":[\"dep:toml_edit\",\"toml_edit?/parse\"],\"preserve_order\":[\"indexmap\"],\"unbounded\":[\"toml_edit?/unbounded\"]}}", - "toml_datetime_0.6.11": "{\"dependencies\":[{\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0.145\"},{\"kind\":\"dev\",\"name\":\"snapbox\",\"req\":\"^0.6.21\"}],\"features\":{}}", - "toml_edit_0.22.27": "{\"dependencies\":[{\"features\":[\"std\"],\"name\":\"indexmap\",\"req\":\"^2.3.0\"},{\"features\":[\"max_inline\"],\"name\":\"kstring\",\"optional\":true,\"req\":\"^2.0.0\"},{\"kind\":\"dev\",\"name\":\"proptest\",\"req\":\"^1.5.0\"},{\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0.145\"},{\"features\":[\"derive\"],\"kind\":\"dev\",\"name\":\"serde\",\"req\":\"^1.0.199\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1.0.116\"},{\"features\":[\"serde\"],\"name\":\"serde_spanned\",\"optional\":true,\"req\":\"^0.6.9\"},{\"kind\":\"dev\",\"name\":\"snapbox\",\"req\":\"^0.6.0\"},{\"kind\":\"dev\",\"name\":\"toml-test-data\",\"req\":\"^2.3.0\"},{\"features\":[\"snapshot\"],\"kind\":\"dev\",\"name\":\"toml-test-harness\",\"req\":\"^1.3.2\"},{\"name\":\"toml_datetime\",\"req\":\"^0.6.11\"},{\"name\":\"toml_write\",\"optional\":true,\"req\":\"^0.1.2\"},{\"kind\":\"dev\",\"name\":\"walkdir\",\"req\":\"^2.5.0\"},{\"name\":\"winnow\",\"optional\":true,\"req\":\"^0.7.10\"}],\"features\":{\"default\":[\"parse\",\"display\"],\"display\":[\"dep:toml_write\"],\"parse\":[\"dep:winnow\"],\"perf\":[\"dep:kstring\"],\"serde\":[\"dep:serde\",\"toml_datetime/serde\",\"dep:serde_spanned\"],\"unbounded\":[],\"unstable-debug\":[\"winnow?/debug\"]}}", - "toml_write_0.1.2": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"proptest\",\"req\":\"^1.6.0\"},{\"kind\":\"dev\",\"name\":\"snapbox\",\"req\":\"^0.6.0\"},{\"kind\":\"dev\",\"name\":\"toml_old\",\"package\":\"toml\",\"req\":\"^0.5.10\"}],\"features\":{\"alloc\":[],\"default\":[\"std\"],\"std\":[\"alloc\"]}}", "tonic-build_0.14.2": "{\"dependencies\":[{\"name\":\"prettyplease\",\"req\":\"^0.2\"},{\"name\":\"proc-macro2\",\"req\":\"^1.0\"},{\"name\":\"quote\",\"req\":\"^1.0\"},{\"name\":\"syn\",\"req\":\"^2.0\"}],\"features\":{\"default\":[\"transport\"],\"transport\":[]}}", "tonic-prost-build_0.14.2": "{\"dependencies\":[{\"name\":\"prettyplease\",\"req\":\"^0.2\"},{\"name\":\"proc-macro2\",\"req\":\"^1.0\"},{\"name\":\"prost-build\",\"req\":\"^0.14\"},{\"name\":\"prost-types\",\"req\":\"^0.14\"},{\"name\":\"quote\",\"req\":\"^1.0\"},{\"name\":\"syn\",\"req\":\"^2.0\"},{\"name\":\"tempfile\",\"req\":\"^3.0\"},{\"default_features\":false,\"kind\":\"dev\",\"name\":\"tonic\",\"req\":\"^0.14.0\"},{\"default_features\":false,\"name\":\"tonic-build\",\"req\":\"^0.14.0\"}],\"features\":{\"cleanup-markdown\":[\"prost-build/cleanup-markdown\"],\"default\":[\"transport\",\"cleanup-markdown\"],\"transport\":[\"tonic-build/transport\"]}}", "tonic-prost_0.14.2": "{\"dependencies\":[{\"name\":\"bytes\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"http-body\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"http-body-util\",\"req\":\"^0.1\"},{\"name\":\"prost\",\"req\":\"^0.14\"},{\"features\":[\"macros\",\"rt-multi-thread\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"tokio-stream\",\"req\":\"^0.1\"},{\"default_features\":false,\"name\":\"tonic\",\"req\":\"^0.14.0\"}],\"features\":{}}", @@ -1568,7 +1456,6 @@ "windows_x86_64_gnullvm_0.53.0": "{\"dependencies\":[],\"features\":{}}", "windows_x86_64_msvc_0.52.6": "{\"dependencies\":[],\"features\":{}}", "windows_x86_64_msvc_0.53.0": "{\"dependencies\":[],\"features\":{}}", - "winnow_0.7.13": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"annotate-snippets\",\"req\":\"^0.11.3\"},{\"name\":\"anstream\",\"optional\":true,\"req\":\"^0.3.2\"},{\"name\":\"anstyle\",\"optional\":true,\"req\":\"^1.0.1\"},{\"kind\":\"dev\",\"name\":\"anyhow\",\"req\":\"^1.0.86\"},{\"kind\":\"dev\",\"name\":\"automod\",\"req\":\"^1.0.14\"},{\"kind\":\"dev\",\"name\":\"circular\",\"req\":\"^0.3.0\"},{\"kind\":\"dev\",\"name\":\"criterion\",\"req\":\"^0.5.1\"},{\"name\":\"is_terminal_polyfill\",\"optional\":true,\"req\":\"^1.48.0\"},{\"kind\":\"dev\",\"name\":\"lexopt\",\"req\":\"^0.3.0\"},{\"default_features\":false,\"name\":\"memchr\",\"optional\":true,\"req\":\"^2.5\"},{\"kind\":\"dev\",\"name\":\"proptest\",\"req\":\"^1.2.0\"},{\"kind\":\"dev\",\"name\":\"rustc-hash\",\"req\":\"^1.1.0\"},{\"features\":[\"examples\"],\"kind\":\"dev\",\"name\":\"snapbox\",\"req\":\"^0.6.21\"},{\"kind\":\"dev\",\"name\":\"term-transcript\",\"req\":\"^0.2.0\"},{\"name\":\"terminal_size\",\"optional\":true,\"req\":\"^0.4.0\"}],\"features\":{\"alloc\":[],\"debug\":[\"std\",\"dep:anstream\",\"dep:anstyle\",\"dep:is_terminal_polyfill\",\"dep:terminal_size\"],\"default\":[\"std\"],\"simd\":[\"dep:memchr\"],\"std\":[\"alloc\",\"memchr?/std\"],\"unstable-doc\":[\"alloc\",\"std\",\"simd\",\"unstable-recover\"],\"unstable-recover\":[]}}", "winreg_0.10.1": "{\"dependencies\":[{\"name\":\"chrono\",\"optional\":true,\"req\":\"^0.4.6\"},{\"kind\":\"dev\",\"name\":\"rand\",\"req\":\"^0.3\"},{\"name\":\"serde\",\"optional\":true,\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"serde_derive\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"tempfile\",\"req\":\"~3.0\"},{\"features\":[\"impl-default\",\"impl-debug\",\"minwindef\",\"minwinbase\",\"timezoneapi\",\"winerror\",\"winnt\",\"winreg\",\"handleapi\"],\"name\":\"winapi\",\"req\":\"^0.3.9\"}],\"features\":{\"serialization-serde\":[\"transactions\",\"serde\"],\"transactions\":[\"winapi/ktmw32\"]}}", "winx_0.36.4": "{\"dependencies\":[{\"name\":\"bitflags\",\"req\":\"^2.4\"},{\"features\":[\"Win32_Foundation\",\"Win32_Storage_FileSystem\",\"Win32_System_IO\",\"Win32_System_Ioctl\",\"Win32_System_LibraryLoader\",\"Win32_System_Performance\",\"Win32_System_SystemServices\"],\"name\":\"windows-sys\",\"req\":\">=0.52, <=0.59\",\"target\":\"cfg(windows)\"}],\"features\":{}}", "wit-bindgen-rt_0.39.0": "{\"dependencies\":[{\"name\":\"bitflags\",\"optional\":true,\"req\":\"^2.3.3\"},{\"name\":\"futures\",\"optional\":true,\"req\":\"^0.3.30\"},{\"name\":\"once_cell\",\"optional\":true,\"req\":\"^1.19.0\"}],\"features\":{\"async\":[\"dep:futures\",\"dep:once_cell\"]}}", diff --git a/bazel/patches/go_compiler_flags.patch b/bazel/patches/go_compiler_flags.patch new file mode 100644 index 000000000..1e21552da --- /dev/null +++ b/bazel/patches/go_compiler_flags.patch @@ -0,0 +1,84 @@ +From 99d7121934a9cfa7963d3a9bfd840779fd2869f6 Mon Sep 17 00:00:00 2001 +From: Corentin Kerisit +Date: Tue, 16 Dec 2025 23:53:01 +0000 +Subject: [PATCH] cmd/link: add more clang driver flags when testing flag + +This changes does 2 things: + +- Move `-L` to `prefixesToKeep` since it allows providing a custom +default libs search path. + +- Allow various flags that impact the behaviour of the clang driver. + +The latter allows for LLVM only toolchains to be compatible with +linkerFlagSupported checks. + +The end goal of this PR is to allow fully hermetic toolchains, +especially pure LLVM ones, to be used to cross-compile CGO. + +Fixes #76825 + +Change-Id: I2311c9566ce9c7e8f6b325258af58eb333663cf0 +GitHub-Last-Rev: 74342aae35124cf174a3f8b888999ffd4cea191f +GitHub-Pull-Request: golang/go#76858 +Reviewed-on: https://go-review.googlesource.com/c/go/+/730561 +Auto-Submit: Ian Lance Taylor +Reviewed-by: Dmitri Shuralyov +LUCI-TryBot-Result: Go LUCI +Reviewed-by: Michael Pratt +Reviewed-by: Ian Lance Taylor +--- + src/cmd/link/internal/ld/lib.go | 20 +++++++++++++++++++- + 1 file changed, 19 insertions(+), 1 deletion(-) + +diff --git a/src/cmd/link/internal/ld/lib.go b/src/cmd/link/internal/ld/lib.go +index bcad5add4abe19..3799aafac769fa 100644 +--- a/src/cmd/link/internal/ld/lib.go ++++ b/src/cmd/link/internal/ld/lib.go +@@ -2208,20 +2208,30 @@ func trimLinkerArgv(argv []string) []string { + flagsWithNextArgSkip := []string{ + "-F", + "-l", +- "-L", + "-framework", + "-Wl,-framework", + "-Wl,-rpath", + "-Wl,-undefined", + } + flagsWithNextArgKeep := []string{ ++ "-B", ++ "-L", + "-arch", + "-isysroot", + "--sysroot", + "-target", + "--target", ++ "-resource-dir", ++ "-rtlib", ++ "--rtlib", ++ "-stdlib", ++ "--stdlib", ++ "-unwindlib", ++ "--unwindlib", + } + prefixesToKeep := []string{ ++ "-B", ++ "-L", + "-f", + "-m", + "-p", +@@ -2231,6 +2241,14 @@ func trimLinkerArgv(argv []string) []string { + "--sysroot", + "-target", + "--target", ++ "-resource-dir", ++ "-rtlib", ++ "--rtlib", ++ "-stdlib", ++ "--stdlib", ++ "-unwindlib", ++ "--unwindlib", ++ "-nostdlib++", + } + + var flags []string diff --git a/bazel/proto/BUILD.bazel b/bazel/proto/BUILD.bazel index 89feba0ef..d99bbc8ed 100644 --- a/bazel/proto/BUILD.bazel +++ b/bazel/proto/BUILD.bazel @@ -15,6 +15,8 @@ descriptor_set( "@bazel//src/main/protobuf:failure_details_proto", "@bazel//src/main/protobuf:invocation_policy_proto", "@bazel//src/main/protobuf:build_proto", - "@googleapis//google/devtools/build/v1:build_proto" + "@bazel//src/main/protobuf:remote_execution_log_proto", + "@googleapis//google/devtools/build/v1:build_proto", + "@remoteapis//:build_bazel_remote_execution_v2_remote_execution_proto" ], ) diff --git a/bazel/proto/MODULE.bazel.lock b/bazel/proto/MODULE.bazel.lock new file mode 100644 index 000000000..7519bf87e --- /dev/null +++ b/bazel/proto/MODULE.bazel.lock @@ -0,0 +1,731 @@ +{ + "lockFileVersion": 16, + "registryFileHashes": { + "https://bcr.bazel.build/bazel_registry.json": "8a28e4aff06ee60aed2a8c281907fb8bcbf3b753c91fb5a5c57da3215d5b3497", + "https://bcr.bazel.build/modules/abseil-cpp/20210324.2/MODULE.bazel": "7cd0312e064fde87c8d1cd79ba06c876bd23630c83466e9500321be55c96ace2", + "https://bcr.bazel.build/modules/abseil-cpp/20211102.0/MODULE.bazel": "70390338f7a5106231d20620712f7cccb659cd0e9d073d1991c038eb9fc57589", + "https://bcr.bazel.build/modules/abseil-cpp/20220623.1/MODULE.bazel": "73ae41b6818d423a11fd79d95aedef1258f304448193d4db4ff90e5e7a0f076c", + "https://bcr.bazel.build/modules/abseil-cpp/20230125.1/MODULE.bazel": "89047429cb0207707b2dface14ba7f8df85273d484c2572755be4bab7ce9c3a0", + "https://bcr.bazel.build/modules/abseil-cpp/20230802.0.bcr.1/MODULE.bazel": "1c8cec495288dccd14fdae6e3f95f772c1c91857047a098fad772034264cc8cb", + "https://bcr.bazel.build/modules/abseil-cpp/20230802.0/MODULE.bazel": "d253ae36a8bd9ee3c5955384096ccb6baf16a1b1e93e858370da0a3b94f77c16", + "https://bcr.bazel.build/modules/abseil-cpp/20230802.1/MODULE.bazel": "fa92e2eb41a04df73cdabeec37107316f7e5272650f81d6cc096418fe647b915", + "https://bcr.bazel.build/modules/abseil-cpp/20240116.0/MODULE.bazel": "98dc378d64c12a4e4741ad3362f87fb737ee6a0886b2d90c3cdbb4d93ea3e0bf", + "https://bcr.bazel.build/modules/abseil-cpp/20240116.1/MODULE.bazel": "37bcdb4440fbb61df6a1c296ae01b327f19e9bb521f9b8e26ec854b6f97309ed", + "https://bcr.bazel.build/modules/abseil-cpp/20240116.2/MODULE.bazel": "73939767a4686cd9a520d16af5ab440071ed75cec1a876bf2fcfaf1f71987a16", + "https://bcr.bazel.build/modules/abseil-cpp/20240722.0/MODULE.bazel": "88668a07647adbdc14cb3a7cd116fb23c9dda37a90a1681590b6c9d8339a5b84", + "https://bcr.bazel.build/modules/abseil-cpp/20240722.0/source.json": "59af9f8a8a4817092624e21263fe1fb7d7951a3b06f0570c610c7e5a9caf5f29", + "https://bcr.bazel.build/modules/apple_support/1.15.1/MODULE.bazel": "a0556fefca0b1bb2de8567b8827518f94db6a6e7e7d632b4c48dc5f865bc7c85", + "https://bcr.bazel.build/modules/apple_support/1.15.1/source.json": "517f2b77430084c541bc9be2db63fdcbb7102938c5f64c17ee60ffda2e5cf07b", + "https://bcr.bazel.build/modules/apple_support/1.8.1/MODULE.bazel": "500f7aa32c008222e360dc9a158c248c2dbaeb3b6246c19e7269981dbd61e29b", + "https://bcr.bazel.build/modules/bazel_ci_rules/1.0.0/MODULE.bazel": "0f92c944b9c466066ed484cfc899cf43fca765df78caca18984c62479f7925eb", + "https://bcr.bazel.build/modules/bazel_ci_rules/1.0.0/source.json": "3405a2a7f9f827a44934b01470faeac1b56fb1304955c98ee9fcd03ad2ca5dcc", + "https://bcr.bazel.build/modules/bazel_features/1.0.0/MODULE.bazel": "d7f022dc887efb96e1ee51cec7b2e48d41e36ff59a6e4f216c40e4029e1585bf", + "https://bcr.bazel.build/modules/bazel_features/1.1.0/MODULE.bazel": "cfd42ff3b815a5f39554d97182657f8c4b9719568eb7fded2b9135f084bf760b", + "https://bcr.bazel.build/modules/bazel_features/1.1.1/MODULE.bazel": "27b8c79ef57efe08efccbd9dd6ef70d61b4798320b8d3c134fd571f78963dbcd", + "https://bcr.bazel.build/modules/bazel_features/1.11.0/MODULE.bazel": "f9382337dd5a474c3b7d334c2f83e50b6eaedc284253334cf823044a26de03e8", + "https://bcr.bazel.build/modules/bazel_features/1.15.0/MODULE.bazel": "d38ff6e517149dc509406aca0db3ad1efdd890a85e049585b7234d04238e2a4d", + "https://bcr.bazel.build/modules/bazel_features/1.17.0/MODULE.bazel": "039de32d21b816b47bd42c778e0454217e9c9caac4a3cf8e15c7231ee3ddee4d", + "https://bcr.bazel.build/modules/bazel_features/1.18.0/MODULE.bazel": "1be0ae2557ab3a72a57aeb31b29be347bcdc5d2b1eb1e70f39e3851a7e97041a", + "https://bcr.bazel.build/modules/bazel_features/1.19.0/MODULE.bazel": "59adcdf28230d220f0067b1f435b8537dd033bfff8db21335ef9217919c7fb58", + "https://bcr.bazel.build/modules/bazel_features/1.21.0/MODULE.bazel": "675642261665d8eea09989aa3b8afb5c37627f1be178382c320d1b46afba5e3b", + "https://bcr.bazel.build/modules/bazel_features/1.28.0/MODULE.bazel": "4b4200e6cbf8fa335b2c3f43e1d6ef3e240319c33d43d60cc0fbd4b87ece299d", + "https://bcr.bazel.build/modules/bazel_features/1.28.0/source.json": "16a3fc5b4483cb307643791f5a4b7365fa98d2e70da7c378cdbde55f0c0b32cf", + "https://bcr.bazel.build/modules/bazel_features/1.3.0/MODULE.bazel": "cdcafe83ec318cda34e02948e81d790aab8df7a929cec6f6969f13a489ccecd9", + "https://bcr.bazel.build/modules/bazel_features/1.4.1/MODULE.bazel": "e45b6bb2350aff3e442ae1111c555e27eac1d915e77775f6fdc4b351b758b5d7", + "https://bcr.bazel.build/modules/bazel_features/1.9.1/MODULE.bazel": "8f679097876a9b609ad1f60249c49d68bfab783dd9be012faf9d82547b14815a", + "https://bcr.bazel.build/modules/bazel_skylib/1.0.3/MODULE.bazel": "bcb0fd896384802d1ad283b4e4eb4d718eebd8cb820b0a2c3a347fb971afd9d8", + "https://bcr.bazel.build/modules/bazel_skylib/1.1.1/MODULE.bazel": "1add3e7d93ff2e6998f9e118022c84d163917d912f5afafb3058e3d2f1545b5e", + "https://bcr.bazel.build/modules/bazel_skylib/1.2.0/MODULE.bazel": "44fe84260e454ed94ad326352a698422dbe372b21a1ac9f3eab76eb531223686", + "https://bcr.bazel.build/modules/bazel_skylib/1.2.1/MODULE.bazel": "f35baf9da0efe45fa3da1696ae906eea3d615ad41e2e3def4aeb4e8bc0ef9a7a", + "https://bcr.bazel.build/modules/bazel_skylib/1.3.0/MODULE.bazel": "20228b92868bf5cfc41bda7afc8a8ba2a543201851de39d990ec957b513579c5", + "https://bcr.bazel.build/modules/bazel_skylib/1.4.1/MODULE.bazel": "a0dcb779424be33100dcae821e9e27e4f2901d9dfd5333efe5ac6a8d7ab75e1d", + "https://bcr.bazel.build/modules/bazel_skylib/1.4.2/MODULE.bazel": "3bd40978e7a1fac911d5989e6b09d8f64921865a45822d8b09e815eaa726a651", + "https://bcr.bazel.build/modules/bazel_skylib/1.5.0/MODULE.bazel": "32880f5e2945ce6a03d1fbd588e9198c0a959bb42297b2cfaf1685b7bc32e138", + "https://bcr.bazel.build/modules/bazel_skylib/1.6.1/MODULE.bazel": "8fdee2dbaace6c252131c00e1de4b165dc65af02ea278476187765e1a617b917", + "https://bcr.bazel.build/modules/bazel_skylib/1.7.0/MODULE.bazel": "0db596f4563de7938de764cc8deeabec291f55e8ec15299718b93c4423e9796d", + "https://bcr.bazel.build/modules/bazel_skylib/1.7.1/MODULE.bazel": "3120d80c5861aa616222ec015332e5f8d3171e062e3e804a2a0253e1be26e59b", + "https://bcr.bazel.build/modules/bazel_skylib/1.7.1/source.json": "f121b43eeefc7c29efbd51b83d08631e2347297c95aac9764a701f2a6a2bb953", + "https://bcr.bazel.build/modules/blake3/1.5.1.bcr.1/MODULE.bazel": "6f22a783790d834c8e2c91ab85848e781e65078a96304e99e4595763622b171a", + "https://bcr.bazel.build/modules/blake3/1.5.1.bcr.1/source.json": "0e27e27f359ae8fdc140f8ae0891bb719664c3d6a0ab1e7cdb9b8ae372c72f17", + "https://bcr.bazel.build/modules/boringssl/0.0.0-20211025-d4f1ab9/MODULE.bazel": "6ee6353f8b1a701fe2178e1d925034294971350b6d3ac37e67e5a7d463267834", + "https://bcr.bazel.build/modules/boringssl/0.0.0-20230215-5c22014/MODULE.bazel": "4b03dc0d04375fa0271174badcd202ed249870c8e895b26664fd7298abea7282", + "https://bcr.bazel.build/modules/boringssl/0.0.0-20230215-5c22014/source.json": "f90873cd3d891bb63ece55a527d97366da650f84c79c2109bea29c17629bee20", + "https://bcr.bazel.build/modules/buildozer/7.1.2/MODULE.bazel": "2e8dd40ede9c454042645fd8d8d0cd1527966aa5c919de86661e62953cd73d84", + "https://bcr.bazel.build/modules/buildozer/7.1.2/source.json": "c9028a501d2db85793a6996205c8de120944f50a0d570438fcae0457a5f9d1f8", + "https://bcr.bazel.build/modules/c-ares/1.15.0/MODULE.bazel": "ba0a78360fdc83f02f437a9e7df0532ad1fbaa59b722f6e715c11effebaa0166", + "https://bcr.bazel.build/modules/c-ares/1.15.0/source.json": "5e3ed991616c5ec4cc09b0893b29a19232de4a1830eb78c567121bfea87453f7", + "https://bcr.bazel.build/modules/curl/8.4.0/MODULE.bazel": "0bc250aa1cb69590049383df7a9537c809591fcf876c620f5f097c58fdc9bc10", + "https://bcr.bazel.build/modules/curl/8.4.0/source.json": "8b9532397af6a24be4ec118d8637b1f4e3e5a0d4be672c94b2275d675c7f7d6b", + "https://bcr.bazel.build/modules/gazelle/0.27.0/MODULE.bazel": "3446abd608295de6d90b4a8a118ed64a9ce11dcb3dda2dc3290a22056bd20996", + "https://bcr.bazel.build/modules/gazelle/0.30.0/MODULE.bazel": "f888a1effe338491f35f0e0e85003b47bb9d8295ccba73c37e07702d8d31c65b", + "https://bcr.bazel.build/modules/gazelle/0.32.0/MODULE.bazel": "b499f58a5d0d3537f3cf5b76d8ada18242f64ec474d8391247438bf04f58c7b8", + "https://bcr.bazel.build/modules/gazelle/0.33.0/MODULE.bazel": "a13a0f279b462b784fb8dd52a4074526c4a2afe70e114c7d09066097a46b3350", + "https://bcr.bazel.build/modules/gazelle/0.34.0/MODULE.bazel": "abdd8ce4d70978933209db92e436deb3a8b737859e9354fb5fd11fb5c2004c8a", + "https://bcr.bazel.build/modules/gazelle/0.36.0/MODULE.bazel": "e375d5d6e9a6ca59b0cb38b0540bc9a05b6aa926d322f2de268ad267a2ee74c0", + "https://bcr.bazel.build/modules/gazelle/0.36.0/source.json": "0823f097b127e0201ae55d85647c94095edfe27db0431a7ae880dcab08dfaa04", + "https://bcr.bazel.build/modules/google_benchmark/1.8.2/MODULE.bazel": "a70cf1bba851000ba93b58ae2f6d76490a9feb74192e57ab8e8ff13c34ec50cb", + "https://bcr.bazel.build/modules/google_benchmark/1.8.4/MODULE.bazel": "c6d54a11dcf64ee63545f42561eda3fd94c1b5f5ebe1357011de63ae33739d5e", + "https://bcr.bazel.build/modules/google_benchmark/1.8.4/source.json": "84590f7bc5a1fd99e1ef274ee16bb41c214f705e62847b42e705010dfa81fe53", + "https://bcr.bazel.build/modules/googleapis-rules-registry/1.0.0/MODULE.bazel": "97c6a4d413b373d4cc97065da3de1b2166e22cbbb5f4cc9f05760bfa83619e24", + "https://bcr.bazel.build/modules/googleapis-rules-registry/1.0.0/source.json": "cf611c836a60e98e2e2ab2de8004f119e9f06878dcf4ea2d95a437b1b7a89fe9", + "https://bcr.bazel.build/modules/googleapis/0.0.0-20240326-1c8d509c5/MODULE.bazel": "a4b7e46393c1cdcc5a00e6f85524467c48c565256b22b5fae20f84ab4a999a68", + "https://bcr.bazel.build/modules/googleapis/0.0.0-20240819-fe8ba054a/MODULE.bazel": "117b7c7be7327ed5d6c482274533f2dbd78631313f607094d4625c28203cacdf", + "https://bcr.bazel.build/modules/googleapis/0.0.0-20250604-de157ca3/MODULE.bazel": "de6044bf0edf78f1f51b800a0633dc7dd305aaf6ee878116757f98cd1a05477d", + "https://bcr.bazel.build/modules/googleapis/0.0.0-20250604-de157ca3/source.json": "44b8b8174f416116daaa9345857b87b87351baf8b19e8f838a32f1ce08345be6", + "https://bcr.bazel.build/modules/googletest/1.11.0/MODULE.bazel": "3a83f095183f66345ca86aa13c58b59f9f94a2f81999c093d4eeaa2d262d12f4", + "https://bcr.bazel.build/modules/googletest/1.14.0.bcr.1/MODULE.bazel": "22c31a561553727960057361aa33bf20fb2e98584bc4fec007906e27053f80c6", + "https://bcr.bazel.build/modules/googletest/1.14.0/MODULE.bazel": "cfbcbf3e6eac06ef9d85900f64424708cc08687d1b527f0ef65aa7517af8118f", + "https://bcr.bazel.build/modules/googletest/1.15.2/MODULE.bazel": "6de1edc1d26cafb0ea1a6ab3f4d4192d91a312fd2d360b63adaa213cd00b2108", + "https://bcr.bazel.build/modules/googletest/1.15.2/source.json": "dbdda654dcb3a0d7a8bc5d0ac5fc7e150b58c2a986025ae5bc634bb2cb61f470", + "https://bcr.bazel.build/modules/grpc-java/1.62.2/MODULE.bazel": "99b8771e8c7cacb130170fed2a10c9e8fed26334a93e73b42d2953250885a158", + "https://bcr.bazel.build/modules/grpc-java/1.66.0/MODULE.bazel": "86ff26209fac846adb89db11f3714b3dc0090fb2fb81575673cc74880cda4e7e", + "https://bcr.bazel.build/modules/grpc-java/1.66.0/source.json": "f841b339ff8516c86c3a5272cd053194dd0cb2fdd63157123835e1157a28328d", + "https://bcr.bazel.build/modules/grpc-proto/0.0.0-20240627-ec30f58/MODULE.bazel": "88de79051e668a04726e9ea94a481ec6f1692086735fd6f488ab908b3b909238", + "https://bcr.bazel.build/modules/grpc-proto/0.0.0-20240627-ec30f58/source.json": "5035d379c61042930244ab59e750106d893ec440add92ec0df6a0098ca7f131d", + "https://bcr.bazel.build/modules/grpc/1.41.0/MODULE.bazel": "5bcbfc2b274dabea628f0649dc50c90cf36543b1cfc31624832538644ad1aae8", + "https://bcr.bazel.build/modules/grpc/1.56.3.bcr.1/MODULE.bazel": "cd5b1eb276b806ec5ab85032921f24acc51735a69ace781be586880af20ab33f", + "https://bcr.bazel.build/modules/grpc/1.66.0.bcr.2/MODULE.bazel": "0fa2b0fd028ce354febf0fe90f1ed8fecfbfc33118cddd95ac0418cc283333a0", + "https://bcr.bazel.build/modules/grpc/1.66.0.bcr.2/source.json": "d2b273a925507d47b5e2d6852f194e70d2991627d71b13793cc2498400d4f99e", + "https://bcr.bazel.build/modules/jsoncpp/1.9.5/MODULE.bazel": "31271aedc59e815656f5736f282bb7509a97c7ecb43e927ac1a37966e0578075", + "https://bcr.bazel.build/modules/jsoncpp/1.9.5/source.json": "4108ee5085dd2885a341c7fab149429db457b3169b86eb081fa245eadf69169d", + "https://bcr.bazel.build/modules/libpfm/4.11.0/MODULE.bazel": "45061ff025b301940f1e30d2c16bea596c25b176c8b6b3087e92615adbd52902", + "https://bcr.bazel.build/modules/libpfm/4.11.0/source.json": "caaffb3ac2b59b8aac456917a4ecf3167d40478ee79f15ab7a877ec9273937c9", + "https://bcr.bazel.build/modules/nlohmann_json/3.11.3/MODULE.bazel": "87023db2f55fc3a9949c7b08dc711fae4d4be339a80a99d04453c4bb3998eefc", + "https://bcr.bazel.build/modules/nlohmann_json/3.11.3/source.json": "296c63a90c6813e53b3812d24245711981fc7e563d98fe15625f55181494488a", + "https://bcr.bazel.build/modules/nlohmann_json/3.6.1/MODULE.bazel": "6f7b417dcc794d9add9e556673ad25cb3ba835224290f4f848f8e2db1e1fca74", + "https://bcr.bazel.build/modules/opentelemetry-cpp/1.14.2/MODULE.bazel": "089a5613c2a159c7dfde098dabfc61e966889c7d6a81a98422a84c51535ed17d", + "https://bcr.bazel.build/modules/opentelemetry-cpp/1.14.2/source.json": "0c5f85ab9e5894c6f1382cf58ba03a6cd024f0592bee2229f99db216ef0c6764", + "https://bcr.bazel.build/modules/opentelemetry-proto/1.1.0/MODULE.bazel": "a49f406e99bf05ab43ed4f5b3322fbd33adfd484b6546948929d1316299b68bf", + "https://bcr.bazel.build/modules/opentelemetry-proto/1.1.0/source.json": "39ffadc4b7d9ccc0c0f45422510cbaeb8eca7b26e68d4142fc3ff18b4c2711b6", + "https://bcr.bazel.build/modules/opentracing-cpp/1.6.0/MODULE.bazel": "b3925269f63561b8b880ae7cf62ccf81f6ece55b62cd791eda9925147ae116ec", + "https://bcr.bazel.build/modules/opentracing-cpp/1.6.0/source.json": "da1cb1add160f5e5074b7272e9db6fd8f1b3336c15032cd0a653af9d2f484aed", + "https://bcr.bazel.build/modules/platforms/0.0.10/MODULE.bazel": "8cb8efaf200bdeb2150d93e162c40f388529a25852b332cec879373771e48ed5", + "https://bcr.bazel.build/modules/platforms/0.0.10/source.json": "f22828ff4cf021a6b577f1bf6341cb9dcd7965092a439f64fc1bb3b7a5ae4bd5", + "https://bcr.bazel.build/modules/platforms/0.0.4/MODULE.bazel": "9b328e31ee156f53f3c416a64f8491f7eb731742655a47c9eec4703a71644aee", + "https://bcr.bazel.build/modules/platforms/0.0.5/MODULE.bazel": "5733b54ea419d5eaf7997054bb55f6a1d0b5ff8aedf0176fef9eea44f3acda37", + "https://bcr.bazel.build/modules/platforms/0.0.6/MODULE.bazel": "ad6eeef431dc52aefd2d77ed20a4b353f8ebf0f4ecdd26a807d2da5aa8cd0615", + "https://bcr.bazel.build/modules/platforms/0.0.7/MODULE.bazel": "72fd4a0ede9ee5c021f6a8dd92b503e089f46c227ba2813ff183b71616034814", + "https://bcr.bazel.build/modules/platforms/0.0.8/MODULE.bazel": "9f142c03e348f6d263719f5074b21ef3adf0b139ee4c5133e2aa35664da9eb2d", + "https://bcr.bazel.build/modules/platforms/0.0.9/MODULE.bazel": "4a87a60c927b56ddd67db50c89acaa62f4ce2a1d2149ccb63ffd871d5ce29ebc", + "https://bcr.bazel.build/modules/prometheus-cpp/1.2.4/MODULE.bazel": "0fbe5dcff66311947a3f6b86ebc6a6d9328e31a28413ca864debc4a043f371e5", + "https://bcr.bazel.build/modules/prometheus-cpp/1.2.4/source.json": "aa58bb10d0bb0dcaf4ad2c509ddcec23d2e94c3935e21517a5adbc2363248a55", + "https://bcr.bazel.build/modules/protobuf/21.7/MODULE.bazel": "a5a29bb89544f9b97edce05642fac225a808b5b7be74038ea3640fae2f8e66a7", + "https://bcr.bazel.build/modules/protobuf/23.1/MODULE.bazel": "88b393b3eb4101d18129e5db51847cd40a5517a53e81216144a8c32dfeeca52a", + "https://bcr.bazel.build/modules/protobuf/26.0.bcr.2/MODULE.bazel": "62e0b84ca727bdeb55a6fe1ef180e6b191bbe548a58305ea1426c158067be534", + "https://bcr.bazel.build/modules/protobuf/27.0/MODULE.bazel": "7873b60be88844a0a1d8f80b9d5d20cfbd8495a689b8763e76c6372998d3f64c", + "https://bcr.bazel.build/modules/protobuf/27.1/MODULE.bazel": "703a7b614728bb06647f965264967a8ef1c39e09e8f167b3ca0bb1fd80449c0d", + "https://bcr.bazel.build/modules/protobuf/29.0-rc2/MODULE.bazel": "6241d35983510143049943fc0d57937937122baf1b287862f9dc8590fc4c37df", + "https://bcr.bazel.build/modules/protobuf/29.0/MODULE.bazel": "319dc8bf4c679ff87e71b1ccfb5a6e90a6dbc4693501d471f48662ac46d04e4e", + "https://bcr.bazel.build/modules/protobuf/29.1/MODULE.bazel": "557c3457560ff49e122ed76c0bc3397a64af9574691cb8201b4e46d4ab2ecb95", + "https://bcr.bazel.build/modules/protobuf/29.1/source.json": "04cca85dce26b895ed037d98336d860367fe09919208f2ad383f0df1aff63199", + "https://bcr.bazel.build/modules/protobuf/3.19.0/MODULE.bazel": "6b5fbb433f760a99a22b18b6850ed5784ef0e9928a72668b66e4d7ccd47db9b0", + "https://bcr.bazel.build/modules/protobuf/3.19.2/MODULE.bazel": "532ffe5f2186b69fdde039efe6df13ba726ff338c6bc82275ad433013fa10573", + "https://bcr.bazel.build/modules/protobuf/3.19.6/MODULE.bazel": "9233edc5e1f2ee276a60de3eaa47ac4132302ef9643238f23128fea53ea12858", + "https://bcr.bazel.build/modules/pybind11_bazel/2.11.1/MODULE.bazel": "88af1c246226d87e65be78ed49ecd1e6f5e98648558c14ce99176da041dc378e", + "https://bcr.bazel.build/modules/pybind11_bazel/2.12.0/MODULE.bazel": "e6f4c20442eaa7c90d7190d8dc539d0ab422f95c65a57cc59562170c58ae3d34", + "https://bcr.bazel.build/modules/pybind11_bazel/2.12.0/source.json": "6900fdc8a9e95866b8c0d4ad4aba4d4236317b5c1cd04c502df3f0d33afed680", + "https://bcr.bazel.build/modules/re2/2021-09-01/MODULE.bazel": "bcb6b96f3b071e6fe2d8bed9cc8ada137a105f9d2c5912e91d27528b3d123833", + "https://bcr.bazel.build/modules/re2/2023-09-01/MODULE.bazel": "cb3d511531b16cfc78a225a9e2136007a48cf8a677e4264baeab57fe78a80206", + "https://bcr.bazel.build/modules/re2/2024-07-02/MODULE.bazel": "0eadc4395959969297cbcf31a249ff457f2f1d456228c67719480205aa306daa", + "https://bcr.bazel.build/modules/re2/2024-07-02/source.json": "547d0111a9d4f362db32196fef805abbf3676e8d6afbe44d395d87816c1130ca", + "https://bcr.bazel.build/modules/rules_android/0.1.1/MODULE.bazel": "48809ab0091b07ad0182defb787c4c5328bd3a278938415c00a7b69b50c4d3a8", + "https://bcr.bazel.build/modules/rules_android/0.1.1/source.json": "e6986b41626ee10bdc864937ffb6d6bf275bb5b9c65120e6137d56e6331f089e", + "https://bcr.bazel.build/modules/rules_apple/3.5.1/MODULE.bazel": "3d1bbf65ad3692003d36d8a29eff54d4e5c1c5f4bfb60f79e28646a924d9101c", + "https://bcr.bazel.build/modules/rules_apple/3.5.1/source.json": "e7593cdf26437d35dbda64faeaf5b82cbdd9df72674b0f041fdde75c1d20dda7", + "https://bcr.bazel.build/modules/rules_cc/0.0.1/MODULE.bazel": "cb2aa0747f84c6c3a78dad4e2049c154f08ab9d166b1273835a8174940365647", + "https://bcr.bazel.build/modules/rules_cc/0.0.10/MODULE.bazel": "ec1705118f7eaedd6e118508d3d26deba2a4e76476ada7e0e3965211be012002", + "https://bcr.bazel.build/modules/rules_cc/0.0.13/MODULE.bazel": "0e8529ed7b323dad0775ff924d2ae5af7640b23553dfcd4d34344c7e7a867191", + "https://bcr.bazel.build/modules/rules_cc/0.0.14/MODULE.bazel": "5e343a3aac88b8d7af3b1b6d2093b55c347b8eefc2e7d1442f7a02dc8fea48ac", + "https://bcr.bazel.build/modules/rules_cc/0.0.15/MODULE.bazel": "6704c35f7b4a72502ee81f61bf88706b54f06b3cbe5558ac17e2e14666cd5dcc", + "https://bcr.bazel.build/modules/rules_cc/0.0.16/MODULE.bazel": "7661303b8fc1b4d7f532e54e9d6565771fea666fbdf839e0a86affcd02defe87", + "https://bcr.bazel.build/modules/rules_cc/0.0.2/MODULE.bazel": "6915987c90970493ab97393024c156ea8fb9f3bea953b2f3ec05c34f19b5695c", + "https://bcr.bazel.build/modules/rules_cc/0.0.5/MODULE.bazel": "be41f87587998fe8890cd82ea4e848ed8eb799e053c224f78f3ff7fe1a1d9b74", + "https://bcr.bazel.build/modules/rules_cc/0.0.6/MODULE.bazel": "abf360251023dfe3efcef65ab9d56beefa8394d4176dd29529750e1c57eaa33f", + "https://bcr.bazel.build/modules/rules_cc/0.0.8/MODULE.bazel": "964c85c82cfeb6f3855e6a07054fdb159aced38e99a5eecf7bce9d53990afa3e", + "https://bcr.bazel.build/modules/rules_cc/0.0.9/MODULE.bazel": "836e76439f354b89afe6a911a7adf59a6b2518fafb174483ad78a2a2fde7b1c5", + "https://bcr.bazel.build/modules/rules_cc/0.2.13/MODULE.bazel": "eecdd666eda6be16a8d9dc15e44b5c75133405e820f620a234acc4b1fdc5aa37", + "https://bcr.bazel.build/modules/rules_cc/0.2.13/source.json": "f872e892c5265c5532e526857532f4868708f88d64e5ebe517ea72e09da61bdb", + "https://bcr.bazel.build/modules/rules_foreign_cc/0.10.1/MODULE.bazel": "b9527010e5fef060af92b6724edb3691970a5b1f76f74b21d39f7d433641be60", + "https://bcr.bazel.build/modules/rules_foreign_cc/0.10.1/source.json": "9300e71df0cdde0952f10afff1401fa664e9fc5d9ae6204660ba1b158d90d6a6", + "https://bcr.bazel.build/modules/rules_foreign_cc/0.9.0/MODULE.bazel": "c9e8c682bf75b0e7c704166d79b599f93b72cfca5ad7477df596947891feeef6", + "https://bcr.bazel.build/modules/rules_fuzzing/0.5.2/MODULE.bazel": "40c97d1144356f52905566c55811f13b299453a14ac7769dfba2ac38192337a8", + "https://bcr.bazel.build/modules/rules_fuzzing/0.5.2/source.json": "c8b1e2c717646f1702290959a3302a178fb639d987ab61d548105019f11e527e", + "https://bcr.bazel.build/modules/rules_go/0.33.0/MODULE.bazel": "a2b11b64cd24bf94f57454f53288a5dacfe6cb86453eee7761b7637728c1910c", + "https://bcr.bazel.build/modules/rules_go/0.38.1/MODULE.bazel": "fb8e73dd3b6fc4ff9d260ceacd830114891d49904f5bda1c16bc147bcc254f71", + "https://bcr.bazel.build/modules/rules_go/0.39.1/MODULE.bazel": "d34fb2a249403a5f4339c754f1e63dc9e5ad70b47c5e97faee1441fc6636cd61", + "https://bcr.bazel.build/modules/rules_go/0.41.0/MODULE.bazel": "55861d8e8bb0e62cbd2896f60ff303f62ffcb0eddb74ecb0e5c0cbe36fc292c8", + "https://bcr.bazel.build/modules/rules_go/0.42.0/MODULE.bazel": "8cfa875b9aa8c6fce2b2e5925e73c1388173ea3c32a0db4d2b4804b453c14270", + "https://bcr.bazel.build/modules/rules_go/0.46.0/MODULE.bazel": "3477df8bdcc49e698b9d25f734c4f3a9f5931ff34ee48a2c662be168f5f2d3fd", + "https://bcr.bazel.build/modules/rules_go/0.48.0/MODULE.bazel": "d00ebcae0908ee3f5e6d53f68677a303d6d59a77beef879598700049c3980a03", + "https://bcr.bazel.build/modules/rules_go/0.48.0/source.json": "895dc1698fd7c5959f92868f3a87156ad1ed8d876668bfa918fa0a623fb1eb22", + "https://bcr.bazel.build/modules/rules_graalvm/0.11.1/MODULE.bazel": "0caaea2dff60b70b8f9b9ceb6e5ae815b85ae610a0392433a22c755b2f2c2456", + "https://bcr.bazel.build/modules/rules_graalvm/0.11.1/source.json": "23d59a63e1dce95df987d19284bad81a4bf7e4df788d47b0ad99c217447bceca", + "https://bcr.bazel.build/modules/rules_java/4.0.0/MODULE.bazel": "5a78a7ae82cd1a33cef56dc578c7d2a46ed0dca12643ee45edbb8417899e6f74", + "https://bcr.bazel.build/modules/rules_java/5.1.0/MODULE.bazel": "324b6478b0343a3ce7a9add8586ad75d24076d6d43d2f622990b9c1cfd8a1b15", + "https://bcr.bazel.build/modules/rules_java/5.3.5/MODULE.bazel": "a4ec4f2db570171e3e5eb753276ee4b389bae16b96207e9d3230895c99644b86", + "https://bcr.bazel.build/modules/rules_java/6.0.0/MODULE.bazel": "8a43b7df601a7ec1af61d79345c17b31ea1fedc6711fd4abfd013ea612978e39", + "https://bcr.bazel.build/modules/rules_java/6.4.0/MODULE.bazel": "e986a9fe25aeaa84ac17ca093ef13a4637f6107375f64667a15999f77db6c8f6", + "https://bcr.bazel.build/modules/rules_java/6.5.2/MODULE.bazel": "1d440d262d0e08453fa0c4d8f699ba81609ed0e9a9a0f02cd10b3e7942e61e31", + "https://bcr.bazel.build/modules/rules_java/7.10.0/MODULE.bazel": "530c3beb3067e870561739f1144329a21c851ff771cd752a49e06e3dc9c2e71a", + "https://bcr.bazel.build/modules/rules_java/7.12.2/MODULE.bazel": "579c505165ee757a4280ef83cda0150eea193eed3bef50b1004ba88b99da6de6", + "https://bcr.bazel.build/modules/rules_java/7.2.0/MODULE.bazel": "06c0334c9be61e6cef2c8c84a7800cef502063269a5af25ceb100b192453d4ab", + "https://bcr.bazel.build/modules/rules_java/7.3.2/MODULE.bazel": "50dece891cfdf1741ea230d001aa9c14398062f2b7c066470accace78e412bc2", + "https://bcr.bazel.build/modules/rules_java/7.4.0/MODULE.bazel": "a592852f8a3dd539e82ee6542013bf2cadfc4c6946be8941e189d224500a8934", + "https://bcr.bazel.build/modules/rules_java/7.6.1/MODULE.bazel": "2f14b7e8a1aa2f67ae92bc69d1ec0fa8d9f827c4e17ff5e5f02e91caa3b2d0fe", + "https://bcr.bazel.build/modules/rules_java/8.6.1/MODULE.bazel": "f4808e2ab5b0197f094cabce9f4b006a27766beb6a9975931da07099560ca9c2", + "https://bcr.bazel.build/modules/rules_java/8.6.1/source.json": "f18d9ad3c4c54945bf422ad584fa6c5ca5b3116ff55a5b1bc77e5c1210be5960", + "https://bcr.bazel.build/modules/rules_jvm_external/4.4.2/MODULE.bazel": "a56b85e418c83eb1839819f0b515c431010160383306d13ec21959ac412d2fe7", + "https://bcr.bazel.build/modules/rules_jvm_external/5.1/MODULE.bazel": "33f6f999e03183f7d088c9be518a63467dfd0be94a11d0055fe2d210f89aa909", + "https://bcr.bazel.build/modules/rules_jvm_external/5.2/MODULE.bazel": "d9351ba35217ad0de03816ef3ed63f89d411349353077348a45348b096615036", + "https://bcr.bazel.build/modules/rules_jvm_external/5.3/MODULE.bazel": "bf93870767689637164657731849fb887ad086739bd5d360d90007a581d5527d", + "https://bcr.bazel.build/modules/rules_jvm_external/6.0/MODULE.bazel": "37c93a5a78d32e895d52f86a8d0416176e915daabd029ccb5594db422e87c495", + "https://bcr.bazel.build/modules/rules_jvm_external/6.1/MODULE.bazel": "75b5fec090dbd46cf9b7d8ea08cf84a0472d92ba3585b476f44c326eda8059c4", + "https://bcr.bazel.build/modules/rules_jvm_external/6.3/MODULE.bazel": "c998e060b85f71e00de5ec552019347c8bca255062c990ac02d051bb80a38df0", + "https://bcr.bazel.build/modules/rules_jvm_external/6.3/source.json": "6f5f5a5a4419ae4e37c35a5bb0a6ae657ed40b7abc5a5189111b47fcebe43197", + "https://bcr.bazel.build/modules/rules_kotlin/1.9.0/MODULE.bazel": "ef85697305025e5a61f395d4eaede272a5393cee479ace6686dba707de804d59", + "https://bcr.bazel.build/modules/rules_kotlin/1.9.6/MODULE.bazel": "d269a01a18ee74d0335450b10f62c9ed81f2321d7958a2934e44272fe82dcef3", + "https://bcr.bazel.build/modules/rules_kotlin/1.9.6/source.json": "2faa4794364282db7c06600b7e5e34867a564ae91bda7cae7c29c64e9466b7d5", + "https://bcr.bazel.build/modules/rules_license/0.0.3/MODULE.bazel": "627e9ab0247f7d1e05736b59dbb1b6871373de5ad31c3011880b4133cafd4bd0", + "https://bcr.bazel.build/modules/rules_license/0.0.4/MODULE.bazel": "6a88dd22800cf1f9f79ba32cacad0d3a423ed28efa2c2ed5582eaa78dd3ac1e5", + "https://bcr.bazel.build/modules/rules_license/0.0.7/MODULE.bazel": "088fbeb0b6a419005b89cf93fe62d9517c0a2b8bb56af3244af65ecfe37e7d5d", + "https://bcr.bazel.build/modules/rules_license/1.0.0/MODULE.bazel": "a7fda60eefdf3d8c827262ba499957e4df06f659330bbe6cdbdb975b768bb65c", + "https://bcr.bazel.build/modules/rules_license/1.0.0/source.json": "a52c89e54cc311196e478f8382df91c15f7a2bfdf4c6cd0e2675cc2ff0b56efb", + "https://bcr.bazel.build/modules/rules_pkg/0.7.0/MODULE.bazel": "df99f03fc7934a4737122518bb87e667e62d780b610910f0447665a7e2be62dc", + "https://bcr.bazel.build/modules/rules_pkg/1.0.1/MODULE.bazel": "5b1df97dbc29623bccdf2b0dcd0f5cb08e2f2c9050aab1092fd39a41e82686ff", + "https://bcr.bazel.build/modules/rules_pkg/1.0.1/source.json": "bd82e5d7b9ce2d31e380dd9f50c111d678c3bdaca190cb76b0e1c71b05e1ba8a", + "https://bcr.bazel.build/modules/rules_proto/4.0.0/MODULE.bazel": "a7a7b6ce9bee418c1a760b3d84f83a299ad6952f9903c67f19e4edd964894e06", + "https://bcr.bazel.build/modules/rules_proto/5.3.0-21.7/MODULE.bazel": "e8dff86b0971688790ae75528fe1813f71809b5afd57facb44dad9e8eca631b7", + "https://bcr.bazel.build/modules/rules_proto/6.0.0-rc1/MODULE.bazel": "1e5b502e2e1a9e825eef74476a5a1ee524a92297085015a052510b09a1a09483", + "https://bcr.bazel.build/modules/rules_proto/6.0.0/MODULE.bazel": "b531d7f09f58dce456cd61b4579ce8c86b38544da75184eadaf0a7cb7966453f", + "https://bcr.bazel.build/modules/rules_proto/6.0.2/MODULE.bazel": "ce916b775a62b90b61888052a416ccdda405212b6aaeb39522f7dc53431a5e73", + "https://bcr.bazel.build/modules/rules_proto/7.0.2/MODULE.bazel": "bf81793bd6d2ad89a37a40693e56c61b0ee30f7a7fdbaf3eabbf5f39de47dea2", + "https://bcr.bazel.build/modules/rules_proto/7.1.0/MODULE.bazel": "002d62d9108f75bb807cd56245d45648f38275cb3a99dcd45dfb864c5d74cb96", + "https://bcr.bazel.build/modules/rules_proto/7.1.0/source.json": "39f89066c12c24097854e8f57ab8558929f9c8d474d34b2c00ac04630ad8940e", + "https://bcr.bazel.build/modules/rules_python/0.10.2/MODULE.bazel": "cc82bc96f2997baa545ab3ce73f196d040ffb8756fd2d66125a530031cd90e5f", + "https://bcr.bazel.build/modules/rules_python/0.20.0/MODULE.bazel": "bfe14d17f20e3fe900b9588f526f52c967a6f281e47a1d6b988679bd15082286", + "https://bcr.bazel.build/modules/rules_python/0.23.1/MODULE.bazel": "49ffccf0511cb8414de28321f5fcf2a31312b47c40cc21577144b7447f2bf300", + "https://bcr.bazel.build/modules/rules_python/0.25.0/MODULE.bazel": "72f1506841c920a1afec76975b35312410eea3aa7b63267436bfb1dd91d2d382", + "https://bcr.bazel.build/modules/rules_python/0.28.0/MODULE.bazel": "cba2573d870babc976664a912539b320cbaa7114cd3e8f053c720171cde331ed", + "https://bcr.bazel.build/modules/rules_python/0.29.0/MODULE.bazel": "2ac8cd70524b4b9ec49a0b8284c79e4cd86199296f82f6e0d5da3f783d660c82", + "https://bcr.bazel.build/modules/rules_python/0.31.0/MODULE.bazel": "93a43dc47ee570e6ec9f5779b2e64c1476a6ce921c48cc9a1678a91dd5f8fd58", + "https://bcr.bazel.build/modules/rules_python/0.33.2/MODULE.bazel": "3e036c4ad8d804a4dad897d333d8dce200d943df4827cb849840055be8d2e937", + "https://bcr.bazel.build/modules/rules_python/0.4.0/MODULE.bazel": "9208ee05fd48bf09ac60ed269791cf17fb343db56c8226a720fbb1cdf467166c", + "https://bcr.bazel.build/modules/rules_python/0.40.0/MODULE.bazel": "9d1a3cd88ed7d8e39583d9ffe56ae8a244f67783ae89b60caafc9f5cf318ada7", + "https://bcr.bazel.build/modules/rules_python/0.40.0/source.json": "939d4bd2e3110f27bfb360292986bb79fd8dcefb874358ccd6cdaa7bda029320", + "https://bcr.bazel.build/modules/rules_shell/0.2.0/MODULE.bazel": "fda8a652ab3c7d8fee214de05e7a9916d8b28082234e8d2c0094505c5268ed3c", + "https://bcr.bazel.build/modules/rules_shell/0.2.0/source.json": "7f27af3c28037d9701487c4744b5448d26537cc66cdef0d8df7ae85411f8de95", + "https://bcr.bazel.build/modules/rules_swift/1.18.0/MODULE.bazel": "a6aba73625d0dc64c7b4a1e831549b6e375fbddb9d2dde9d80c9de6ec45b24c9", + "https://bcr.bazel.build/modules/rules_swift/1.18.0/source.json": "9e636cabd446f43444ea2662341a9cbb74ecd87ab0557225ae73f1127cb7ff52", + "https://bcr.bazel.build/modules/rules_testing/0.6.0/MODULE.bazel": "8518d53bc742c462536d3f1a0de0c265bd7b51f32797fea4132007223ed2926f", + "https://bcr.bazel.build/modules/rules_testing/0.6.0/source.json": "915ae13ae2247c986cc57289f21e7f1d9711cd2ecfdf5867b51dc0484f3b043b", + "https://bcr.bazel.build/modules/stardoc/0.5.1/MODULE.bazel": "1a05d92974d0c122f5ccf09291442580317cdd859f07a8655f1db9a60374f9f8", + "https://bcr.bazel.build/modules/stardoc/0.5.3/MODULE.bazel": "c7f6948dae6999bf0db32c1858ae345f112cacf98f174c7a8bb707e41b974f1c", + "https://bcr.bazel.build/modules/stardoc/0.5.6/MODULE.bazel": "c43dabc564990eeab55e25ed61c07a1aadafe9ece96a4efabb3f8bf9063b71ef", + "https://bcr.bazel.build/modules/stardoc/0.7.0/MODULE.bazel": "05e3d6d30c099b6770e97da986c53bd31844d7f13d41412480ea265ac9e8079c", + "https://bcr.bazel.build/modules/stardoc/0.7.1/MODULE.bazel": "3548faea4ee5dda5580f9af150e79d0f6aea934fc60c1cc50f4efdd9420759e7", + "https://bcr.bazel.build/modules/stardoc/0.7.1/source.json": "b6500ffcd7b48cd72c29bb67bcac781e12701cc0d6d55d266a652583cfcdab01", + "https://bcr.bazel.build/modules/upb/0.0.0-20211020-160625a/MODULE.bazel": "6cced416be2dc5b9c05efd5b997049ba795e5e4e6fafbe1624f4587767638928", + "https://bcr.bazel.build/modules/upb/0.0.0-20220923-a547704/MODULE.bazel": "7298990c00040a0e2f121f6c32544bab27d4452f80d9ce51349b1a28f3005c43", + "https://bcr.bazel.build/modules/upb/0.0.0-20230516-61a97ef/MODULE.bazel": "c0df5e35ad55e264160417fd0875932ee3c9dda63d9fccace35ac62f45e1b6f9", + "https://bcr.bazel.build/modules/upb/0.0.0-20230907-e7430e6/MODULE.bazel": "3a7dedadf70346e678dc059dbe44d05cbf3ab17f1ce43a1c7a42edc7cbf93fd9", + "https://bcr.bazel.build/modules/upb/0.0.0-20230907-e7430e6/source.json": "6e513de1d26d1ded97a1c98a8ee166ff9be371a71556d4bc91220332dd3aa48e", + "https://bcr.bazel.build/modules/with_cfg.bzl/0.6.0/MODULE.bazel": "174257966441fb8af45c939854ba38fd8364b7e48e3155c7c0ce5ef869af80aa", + "https://bcr.bazel.build/modules/with_cfg.bzl/0.6.0/source.json": "4acda83067113064cd7c43f9b9ab4a7e7155d260c1a526f48f02aa3fbb809d17", + "https://bcr.bazel.build/modules/zlib/1.2.11/MODULE.bazel": "07b389abc85fdbca459b69e2ec656ae5622873af3f845e1c9d80fe179f3effa0", + "https://bcr.bazel.build/modules/zlib/1.2.12/MODULE.bazel": "3b1a8834ada2a883674be8cbd36ede1b6ec481477ada359cd2d3ddc562340b27", + "https://bcr.bazel.build/modules/zlib/1.2.13/MODULE.bazel": "aa6deb1b83c18ffecd940c4119aff9567cd0a671d7bba756741cb2ef043a29d5", + "https://bcr.bazel.build/modules/zlib/1.3.1.bcr.1/MODULE.bazel": "6a9fe6e3fc865715a7be9823ce694ceb01e364c35f7a846bf0d2b34762bc066b", + "https://bcr.bazel.build/modules/zlib/1.3.1.bcr.3/MODULE.bazel": "af322bc08976524477c79d1e45e241b6efbeb918c497e8840b8ab116802dda79", + "https://bcr.bazel.build/modules/zlib/1.3.1.bcr.3/source.json": "2be409ac3c7601245958cd4fcdff4288be79ed23bd690b4b951f500d54ee6e7d", + "https://bcr.bazel.build/modules/zlib/1.3.1/MODULE.bazel": "751c9940dcfe869f5f7274e1295422a34623555916eb98c174c1e945594bf198", + "https://bcr.bazel.build/modules/zlib/1.3/MODULE.bazel": "6a9c02f19a24dcedb05572b2381446e27c272cd383aed11d41d99da9e3167a72", + "https://bcr.bazel.build/modules/zstd-jni/1.5.2-3.bcr.1/MODULE.bazel": "cb11f12dc4c8454bede2b64855a8126b547cc89cf77838188513f647d9edd86e", + "https://bcr.bazel.build/modules/zstd-jni/1.5.2-3.bcr.1/source.json": "f728c0f2384b4d047a759f4ff5d9cd05a81a78388fbe9cc3981b773e5536be38" + }, + "selectedYankedVersions": {}, + "moduleExtensions": { + "@@apple_support+//crosstool:setup.bzl%apple_cc_configure_extension": { + "general": { + "bzlTransitiveDigest": "p7Ghcq3+nnQxCrf+U3xnhdn7yOSTDbcFyGHK7Ja+rU4=", + "usagesDigest": "yAC1H7cg3wkisnNswc7hxM2fAxrH04yqn7CXVasZPgc=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "local_config_apple_cc_toolchains": { + "repoRuleId": "@@apple_support+//crosstool:setup.bzl%_apple_cc_autoconf_toolchains", + "attributes": {} + }, + "local_config_apple_cc": { + "repoRuleId": "@@apple_support+//crosstool:setup.bzl%_apple_cc_autoconf", + "attributes": {} + } + }, + "recordedRepoMappingEntries": [ + [ + "apple_support+", + "bazel_tools", + "bazel_tools" + ], + [ + "bazel_tools", + "rules_cc", + "rules_cc+" + ] + ] + } + }, + "@@platforms//host:extension.bzl%host_platform": { + "general": { + "bzlTransitiveDigest": "xelQcPZH8+tmuOHVjL9vDxMnnQNMlwj0SlvgoqBkm4U=", + "usagesDigest": "SeQiIN/f8/Qt9vYQk7qcXp4I4wJeEC0RnQDiaaJ4tb8=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "host_platform": { + "repoRuleId": "@@platforms//host:extension.bzl%host_platform_repo", + "attributes": {} + } + }, + "recordedRepoMappingEntries": [] + } + }, + "@@rules_foreign_cc+//foreign_cc:extensions.bzl%tools": { + "general": { + "bzlTransitiveDigest": "FApcIcVN43WOEs7g8eg7Cy1hrfRbVNEoUu8IiF+8WOc=", + "usagesDigest": "9LXdVp01HkdYQT8gYPjYLO6VLVJHo9uFfxWaU1ymiRE=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "rules_foreign_cc_framework_toolchain_linux": { + "repoRuleId": "@@rules_foreign_cc+//foreign_cc/private/framework:toolchain.bzl%framework_toolchain_repository", + "attributes": { + "commands_src": "@rules_foreign_cc//foreign_cc/private/framework/toolchains:linux_commands.bzl", + "exec_compatible_with": [ + "@platforms//os:linux" + ] + } + }, + "rules_foreign_cc_framework_toolchain_freebsd": { + "repoRuleId": "@@rules_foreign_cc+//foreign_cc/private/framework:toolchain.bzl%framework_toolchain_repository", + "attributes": { + "commands_src": "@rules_foreign_cc//foreign_cc/private/framework/toolchains:freebsd_commands.bzl", + "exec_compatible_with": [ + "@platforms//os:freebsd" + ] + } + }, + "rules_foreign_cc_framework_toolchain_windows": { + "repoRuleId": "@@rules_foreign_cc+//foreign_cc/private/framework:toolchain.bzl%framework_toolchain_repository", + "attributes": { + "commands_src": "@rules_foreign_cc//foreign_cc/private/framework/toolchains:windows_commands.bzl", + "exec_compatible_with": [ + "@platforms//os:windows" + ] + } + }, + "rules_foreign_cc_framework_toolchain_macos": { + "repoRuleId": "@@rules_foreign_cc+//foreign_cc/private/framework:toolchain.bzl%framework_toolchain_repository", + "attributes": { + "commands_src": "@rules_foreign_cc//foreign_cc/private/framework/toolchains:macos_commands.bzl", + "exec_compatible_with": [ + "@platforms//os:macos" + ] + } + }, + "rules_foreign_cc_framework_toolchains": { + "repoRuleId": "@@rules_foreign_cc+//foreign_cc/private/framework:toolchain.bzl%framework_toolchain_repository_hub", + "attributes": {} + }, + "cmake_src": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "build_file_content": "filegroup(\n name = \"all_srcs\",\n srcs = glob([\"**\"]),\n visibility = [\"//visibility:public\"],\n)\n", + "sha256": "f316b40053466f9a416adf981efda41b160ca859e97f6a484b447ea299ff26aa", + "strip_prefix": "cmake-3.23.2", + "urls": [ + "https://github.com/Kitware/CMake/releases/download/v3.23.2/cmake-3.23.2.tar.gz" + ] + } + }, + "gnumake_src": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "build_file_content": "filegroup(\n name = \"all_srcs\",\n srcs = glob([\"**\"]),\n visibility = [\"//visibility:public\"],\n)\n", + "sha256": "581f4d4e872da74b3941c874215898a7d35802f03732bdccee1d4a7979105d18", + "strip_prefix": "make-4.4", + "urls": [ + "https://mirror.bazel.build/ftpmirror.gnu.org/gnu/make/make-4.4.tar.gz", + "http://ftpmirror.gnu.org/gnu/make/make-4.4.tar.gz" + ] + } + }, + "ninja_build_src": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "build_file_content": "filegroup(\n name = \"all_srcs\",\n srcs = glob([\"**\"]),\n visibility = [\"//visibility:public\"],\n)\n", + "sha256": "31747ae633213f1eda3842686f83c2aa1412e0f5691d1c14dbbcc67fe7400cea", + "strip_prefix": "ninja-1.11.1", + "urls": [ + "https://github.com/ninja-build/ninja/archive/v1.11.1.tar.gz" + ] + } + }, + "meson_src": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "build_file_content": "exports_files([\"meson.py\"])\n\nfilegroup(\n name = \"runtime\",\n srcs = glob([\"mesonbuild/**\"]),\n visibility = [\"//visibility:public\"],\n)\n", + "strip_prefix": "meson-1.1.1", + "url": "https://github.com/mesonbuild/meson/releases/download/1.1.1/meson-1.1.1.tar.gz" + } + }, + "glib_dev": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "build_file_content": "\nload(\"@rules_cc//cc:defs.bzl\", \"cc_library\")\n\ncc_import(\n name = \"glib_dev\",\n hdrs = glob([\"include/**\"]),\n shared_library = \"@glib_runtime//:bin/libglib-2.0-0.dll\",\n visibility = [\"//visibility:public\"],\n)\n ", + "sha256": "bdf18506df304d38be98a4b3f18055b8b8cca81beabecad0eece6ce95319c369", + "urls": [ + "https://download.gnome.org/binaries/win64/glib/2.26/glib-dev_2.26.1-1_win64.zip" + ] + } + }, + "glib_src": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "build_file_content": "\ncc_import(\n name = \"msvc_hdr\",\n hdrs = [\"msvc_recommended_pragmas.h\"],\n visibility = [\"//visibility:public\"],\n)\n ", + "sha256": "bc96f63112823b7d6c9f06572d2ad626ddac7eb452c04d762592197f6e07898e", + "strip_prefix": "glib-2.26.1", + "urls": [ + "https://download.gnome.org/sources/glib/2.26/glib-2.26.1.tar.gz" + ] + } + }, + "glib_runtime": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "build_file_content": "\nexports_files(\n [\n \"bin/libgio-2.0-0.dll\",\n \"bin/libglib-2.0-0.dll\",\n \"bin/libgmodule-2.0-0.dll\",\n \"bin/libgobject-2.0-0.dll\",\n \"bin/libgthread-2.0-0.dll\",\n ],\n visibility = [\"//visibility:public\"],\n)\n ", + "sha256": "88d857087e86f16a9be651ee7021880b3f7ba050d34a1ed9f06113b8799cb973", + "urls": [ + "https://download.gnome.org/binaries/win64/glib/2.26/glib_2.26.1-1_win64.zip" + ] + } + }, + "gettext_runtime": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "build_file_content": "\ncc_import(\n name = \"gettext_runtime\",\n shared_library = \"bin/libintl-8.dll\",\n visibility = [\"//visibility:public\"],\n)\n ", + "sha256": "1f4269c0e021076d60a54e98da6f978a3195013f6de21674ba0edbc339c5b079", + "urls": [ + "https://download.gnome.org/binaries/win64/dependencies/gettext-runtime_0.18.1.1-2_win64.zip" + ] + } + }, + "pkgconfig_src": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "build_file_content": "filegroup(\n name = \"all_srcs\",\n srcs = glob([\"**\"]),\n visibility = [\"//visibility:public\"],\n)\n", + "sha256": "6fc69c01688c9458a57eb9a1664c9aba372ccda420a02bf4429fe610e7e7d591", + "strip_prefix": "pkg-config-0.29.2", + "patches": [ + "@@rules_foreign_cc+//toolchains:pkgconfig-detectenv.patch", + "@@rules_foreign_cc+//toolchains:pkgconfig-makefile-vc.patch" + ], + "urls": [ + "https://pkgconfig.freedesktop.org/releases/pkg-config-0.29.2.tar.gz" + ] + } + }, + "bazel_skylib": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "urls": [ + "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.2.1/bazel-skylib-1.2.1.tar.gz", + "https://github.com/bazelbuild/bazel-skylib/releases/download/1.2.1/bazel-skylib-1.2.1.tar.gz" + ], + "sha256": "f7be3474d42aae265405a592bb7da8e171919d74c16f082a5457840f06054728" + } + }, + "rules_python": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "sha256": "84aec9e21cc56fbc7f1335035a71c850d1b9b5cc6ff497306f84cced9a769841", + "strip_prefix": "rules_python-0.23.1", + "url": "https://github.com/bazelbuild/rules_python/archive/refs/tags/0.23.1.tar.gz" + } + }, + "cmake-3.23.2-linux-aarch64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "urls": [ + "https://github.com/Kitware/CMake/releases/download/v3.23.2/cmake-3.23.2-linux-aarch64.tar.gz" + ], + "sha256": "f2654bf780b53f170bbbec44d8ac67d401d24788e590faa53036a89476efa91e", + "strip_prefix": "cmake-3.23.2-linux-aarch64", + "build_file_content": "load(\"@rules_foreign_cc//toolchains/native_tools:native_tools_toolchain.bzl\", \"native_tool_toolchain\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nfilegroup(\n name = \"cmake_data\",\n srcs = glob(\n [\n \"**\",\n ],\n exclude = [\n \"WORKSPACE\",\n \"WORKSPACE.bazel\",\n \"BUILD\",\n \"BUILD.bazel\",\n ],\n ),\n)\n\nnative_tool_toolchain(\n name = \"cmake_tool\",\n path = \"bin/cmake\",\n target = \":cmake_data\",\n)\n" + } + }, + "cmake-3.23.2-linux-x86_64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "urls": [ + "https://github.com/Kitware/CMake/releases/download/v3.23.2/cmake-3.23.2-linux-x86_64.tar.gz" + ], + "sha256": "aaced6f745b86ce853661a595bdac6c5314a60f8181b6912a0a4920acfa32708", + "strip_prefix": "cmake-3.23.2-linux-x86_64", + "build_file_content": "load(\"@rules_foreign_cc//toolchains/native_tools:native_tools_toolchain.bzl\", \"native_tool_toolchain\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nfilegroup(\n name = \"cmake_data\",\n srcs = glob(\n [\n \"**\",\n ],\n exclude = [\n \"WORKSPACE\",\n \"WORKSPACE.bazel\",\n \"BUILD\",\n \"BUILD.bazel\",\n ],\n ),\n)\n\nnative_tool_toolchain(\n name = \"cmake_tool\",\n path = \"bin/cmake\",\n target = \":cmake_data\",\n)\n" + } + }, + "cmake-3.23.2-macos-universal": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "urls": [ + "https://github.com/Kitware/CMake/releases/download/v3.23.2/cmake-3.23.2-macos-universal.tar.gz" + ], + "sha256": "853a0f9af148c5ef47282ffffee06c4c9f257be2635936755f39ca13c3286c88", + "strip_prefix": "cmake-3.23.2-macos-universal/CMake.app/Contents", + "build_file_content": "load(\"@rules_foreign_cc//toolchains/native_tools:native_tools_toolchain.bzl\", \"native_tool_toolchain\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nfilegroup(\n name = \"cmake_data\",\n srcs = glob(\n [\n \"**\",\n ],\n exclude = [\n \"WORKSPACE\",\n \"WORKSPACE.bazel\",\n \"BUILD\",\n \"BUILD.bazel\",\n ],\n ),\n)\n\nnative_tool_toolchain(\n name = \"cmake_tool\",\n path = \"bin/cmake\",\n target = \":cmake_data\",\n)\n" + } + }, + "cmake-3.23.2-windows-i386": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "urls": [ + "https://github.com/Kitware/CMake/releases/download/v3.23.2/cmake-3.23.2-windows-i386.zip" + ], + "sha256": "6a4fcd6a2315b93cb23c93507efccacc30c449c2bf98f14d6032bb226c582e07", + "strip_prefix": "cmake-3.23.2-windows-i386", + "build_file_content": "load(\"@rules_foreign_cc//toolchains/native_tools:native_tools_toolchain.bzl\", \"native_tool_toolchain\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nfilegroup(\n name = \"cmake_data\",\n srcs = glob(\n [\n \"**\",\n ],\n exclude = [\n \"WORKSPACE\",\n \"WORKSPACE.bazel\",\n \"BUILD\",\n \"BUILD.bazel\",\n ],\n ),\n)\n\nnative_tool_toolchain(\n name = \"cmake_tool\",\n path = \"bin/cmake.exe\",\n target = \":cmake_data\",\n)\n" + } + }, + "cmake-3.23.2-windows-x86_64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "urls": [ + "https://github.com/Kitware/CMake/releases/download/v3.23.2/cmake-3.23.2-windows-x86_64.zip" + ], + "sha256": "2329387f3166b84c25091c86389fb891193967740c9bcf01e7f6d3306f7ffda0", + "strip_prefix": "cmake-3.23.2-windows-x86_64", + "build_file_content": "load(\"@rules_foreign_cc//toolchains/native_tools:native_tools_toolchain.bzl\", \"native_tool_toolchain\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nfilegroup(\n name = \"cmake_data\",\n srcs = glob(\n [\n \"**\",\n ],\n exclude = [\n \"WORKSPACE\",\n \"WORKSPACE.bazel\",\n \"BUILD\",\n \"BUILD.bazel\",\n ],\n ),\n)\n\nnative_tool_toolchain(\n name = \"cmake_tool\",\n path = \"bin/cmake.exe\",\n target = \":cmake_data\",\n)\n" + } + }, + "cmake_3.23.2_toolchains": { + "repoRuleId": "@@rules_foreign_cc+//toolchains:prebuilt_toolchains_repository.bzl%prebuilt_toolchains_repository", + "attributes": { + "repos": { + "cmake-3.23.2-linux-aarch64": [ + "@platforms//cpu:aarch64", + "@platforms//os:linux" + ], + "cmake-3.23.2-linux-x86_64": [ + "@platforms//cpu:x86_64", + "@platforms//os:linux" + ], + "cmake-3.23.2-macos-universal": [ + "@platforms//os:macos" + ], + "cmake-3.23.2-windows-i386": [ + "@platforms//cpu:x86_32", + "@platforms//os:windows" + ], + "cmake-3.23.2-windows-x86_64": [ + "@platforms//cpu:x86_64", + "@platforms//os:windows" + ] + }, + "tool": "cmake" + } + }, + "ninja_1.11.1_linux": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "urls": [ + "https://github.com/ninja-build/ninja/releases/download/v1.11.1/ninja-linux.zip" + ], + "sha256": "b901ba96e486dce377f9a070ed4ef3f79deb45f4ffe2938f8e7ddc69cfb3df77", + "strip_prefix": "", + "build_file_content": "load(\"@rules_foreign_cc//toolchains/native_tools:native_tools_toolchain.bzl\", \"native_tool_toolchain\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nfilegroup(\n name = \"ninja_bin\",\n srcs = [\"ninja\"],\n)\n\nnative_tool_toolchain(\n name = \"ninja_tool\",\n env = {\"NINJA\": \"$(execpath :ninja_bin)\"},\n path = \"$(execpath :ninja_bin)\",\n target = \":ninja_bin\",\n)\n" + } + }, + "ninja_1.11.1_mac": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "urls": [ + "https://github.com/ninja-build/ninja/releases/download/v1.11.1/ninja-mac.zip" + ], + "sha256": "482ecb23c59ae3d4f158029112de172dd96bb0e97549c4b1ca32d8fad11f873e", + "strip_prefix": "", + "build_file_content": "load(\"@rules_foreign_cc//toolchains/native_tools:native_tools_toolchain.bzl\", \"native_tool_toolchain\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nfilegroup(\n name = \"ninja_bin\",\n srcs = [\"ninja\"],\n)\n\nnative_tool_toolchain(\n name = \"ninja_tool\",\n env = {\"NINJA\": \"$(execpath :ninja_bin)\"},\n path = \"$(execpath :ninja_bin)\",\n target = \":ninja_bin\",\n)\n" + } + }, + "ninja_1.11.1_win": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "urls": [ + "https://github.com/ninja-build/ninja/releases/download/v1.11.1/ninja-win.zip" + ], + "sha256": "524b344a1a9a55005eaf868d991e090ab8ce07fa109f1820d40e74642e289abc", + "strip_prefix": "", + "build_file_content": "load(\"@rules_foreign_cc//toolchains/native_tools:native_tools_toolchain.bzl\", \"native_tool_toolchain\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nfilegroup(\n name = \"ninja_bin\",\n srcs = [\"ninja.exe\"],\n)\n\nnative_tool_toolchain(\n name = \"ninja_tool\",\n env = {\"NINJA\": \"$(execpath :ninja_bin)\"},\n path = \"$(execpath :ninja_bin)\",\n target = \":ninja_bin\",\n)\n" + } + }, + "ninja_1.11.1_toolchains": { + "repoRuleId": "@@rules_foreign_cc+//toolchains:prebuilt_toolchains_repository.bzl%prebuilt_toolchains_repository", + "attributes": { + "repos": { + "ninja_1.11.1_linux": [ + "@platforms//cpu:x86_64", + "@platforms//os:linux" + ], + "ninja_1.11.1_mac": [ + "@platforms//cpu:x86_64", + "@platforms//os:macos" + ], + "ninja_1.11.1_win": [ + "@platforms//cpu:x86_64", + "@platforms//os:windows" + ] + }, + "tool": "ninja" + } + } + }, + "recordedRepoMappingEntries": [ + [ + "rules_foreign_cc+", + "bazel_tools", + "bazel_tools" + ], + [ + "rules_foreign_cc+", + "rules_foreign_cc", + "rules_foreign_cc+" + ] + ] + } + }, + "@@rules_graalvm+//:extensions.bzl%graalvm": { + "general": { + "bzlTransitiveDigest": "i0x35JJR57FKhcEk8ExwoG+DGBTXVhlQQuaoQCNX3fA=", + "usagesDigest": "eRL31Ff6BONMH7tPoZ33ymdqu8/PPwm9lkAl1F9fu88=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "graalvm_toolchains": { + "repoRuleId": "@@rules_graalvm+//internal:graalvm_bindist.bzl%_toolchain_config", + "attributes": { + "build_file": "\nalias(\n name = \"toolchain_gvm\",\n actual = \"gvm\",\n visibility = [\"//visibility:public\"],\n)\ntoolchain(\n name = \"gvm\",\n exec_compatible_with = [\n \n ],\n target_compatible_with = [\n \n ],\n toolchain = \"@graalvm//:gvm\",\n toolchain_type = \"@rules_graalvm//graalvm/toolchain\",\n visibility = [\"//visibility:public\"],\n)\n\nconfig_setting(\n name = \"prefix_version_setting\",\n values = {\"java_runtime_version\": \"graalvm_21\"},\n visibility = [\"//visibility:private\"],\n)\ntoolchain(\n name = \"toolchain\",\n target_compatible_with = [],\n target_settings = [\":prefix_version_setting\"],\n toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n toolchain = \"@graalvm//:jdk\",\n visibility = [\"//visibility:public\"],\n)\n\ntoolchain(\n name = \"bootstrap_runtime_toolchain\",\n # These constraints are not required for correctness, but prevent fetches of remote JDK for\n # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n # the same configuration, this constraint will not result in toolchain resolution failures.\n exec_compatible_with = [],\n target_settings = [\":prefix_version_setting\"],\n toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n toolchain = \"@graalvm//:jdk\",\n visibility = [\"//visibility:public\"],\n)\n\n" + } + }, + "graalvm": { + "repoRuleId": "@@rules_graalvm+//internal:graalvm_bindist.bzl%_graalvm_bindist_repository", + "attributes": { + "version": "21.0.2", + "java_version": "21", + "distribution": "ce", + "components": [], + "setup_actions": [], + "enable_toolchain": true, + "toolchain_config": "graalvm_toolchains" + } + } + }, + "recordedRepoMappingEntries": [ + [ + "rules_graalvm+", + "bazel_skylib", + "bazel_skylib+" + ] + ] + } + }, + "@@rules_java+//java:rules_java_deps.bzl%compatibility_proxy": { + "general": { + "bzlTransitiveDigest": "84xJEZ1jnXXwo8BXMprvBm++rRt4jsTu9liBxz0ivps=", + "usagesDigest": "jTQDdLDxsS43zuRmg1faAjIEPWdLAbDAowI1pInQSoo=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "compatibility_proxy": { + "repoRuleId": "@@rules_java+//java:rules_java_deps.bzl%_compatibility_proxy_repo_rule", + "attributes": {} + } + }, + "recordedRepoMappingEntries": [ + [ + "rules_java+", + "bazel_tools", + "bazel_tools" + ] + ] + } + }, + "@@rules_kotlin+//src/main/starlark/core/repositories:bzlmod_setup.bzl%rules_kotlin_extensions": { + "general": { + "bzlTransitiveDigest": "sFhcgPbDQehmbD1EOXzX4H1q/CD5df8zwG4kp4jbvr8=", + "usagesDigest": "QI2z8ZUR+mqtbwsf2fLqYdJAkPOHdOV+tF2yVAUgRzw=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "com_github_jetbrains_kotlin_git": { + "repoRuleId": "@@rules_kotlin+//src/main/starlark/core/repositories:compiler.bzl%kotlin_compiler_git_repository", + "attributes": { + "urls": [ + "https://github.com/JetBrains/kotlin/releases/download/v1.9.23/kotlin-compiler-1.9.23.zip" + ], + "sha256": "93137d3aab9afa9b27cb06a824c2324195c6b6f6179d8a8653f440f5bd58be88" + } + }, + "com_github_jetbrains_kotlin": { + "repoRuleId": "@@rules_kotlin+//src/main/starlark/core/repositories:compiler.bzl%kotlin_capabilities_repository", + "attributes": { + "git_repository_name": "com_github_jetbrains_kotlin_git", + "compiler_version": "1.9.23" + } + }, + "com_github_google_ksp": { + "repoRuleId": "@@rules_kotlin+//src/main/starlark/core/repositories:ksp.bzl%ksp_compiler_plugin_repository", + "attributes": { + "urls": [ + "https://github.com/google/ksp/releases/download/1.9.23-1.0.20/artifacts.zip" + ], + "sha256": "ee0618755913ef7fd6511288a232e8fad24838b9af6ea73972a76e81053c8c2d", + "strip_version": "1.9.23-1.0.20" + } + }, + "com_github_pinterest_ktlint": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_file", + "attributes": { + "sha256": "01b2e0ef893383a50dbeb13970fe7fa3be36ca3e83259e01649945b09d736985", + "urls": [ + "https://github.com/pinterest/ktlint/releases/download/1.3.0/ktlint" + ], + "executable": true + } + }, + "rules_android": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "sha256": "cd06d15dd8bb59926e4d65f9003bfc20f9da4b2519985c27e190cddc8b7a7806", + "strip_prefix": "rules_android-0.1.1", + "urls": [ + "https://github.com/bazelbuild/rules_android/archive/v0.1.1.zip" + ] + } + } + }, + "recordedRepoMappingEntries": [ + [ + "rules_kotlin+", + "bazel_tools", + "bazel_tools" + ] + ] + } + } + } +} diff --git a/bazel/release/hashes.bzl b/bazel/release/hashes.bzl index 316cee510..a92f47c58 100644 --- a/bazel/release/hashes.bzl +++ b/bazel/release/hashes.bzl @@ -6,6 +6,22 @@ via output groups. Based on https://github.com/bazelbuild/examples/blob/main/rules/implicit_output/hash.bzl """ +_COREUTILS_TOOLCHAIN = "@aspect_bazel_lib//lib:coreutils_toolchain_type" + +def _hash_action(ctx, coreutils, algorithm, src, out): + ctx.actions.run_shell( + outputs = [out], + inputs = [src], + tools = [coreutils.bin], + command = "{coreutils} hashsum --{algorithm} {src} > {out}".format( + coreutils = coreutils.bin.path, + algorithm = algorithm, + src = src.path, + out = out.path, + ), + toolchain = _COREUTILS_TOOLCHAIN, + ) + def _impl(ctx): # Create actions to generate the three output files. # Actions are run only when the corresponding file is requested. @@ -13,32 +29,16 @@ def _impl(ctx): if ctx.file.src.is_directory: fail("src expected to be a file but got a directory") + coreutils = ctx.toolchains[_COREUTILS_TOOLCHAIN].coreutils_info + md5out = ctx.actions.declare_file("{}.md5".format(ctx.file.src.basename)) - ctx.actions.run_shell( - outputs = [md5out], - inputs = [ctx.file.src], - command = "ROOT=$PWD && cd {} && md5sum {} > $ROOT/{}".format(ctx.file.src.dirname, ctx.file.src.basename, md5out.path), - ) + _hash_action(ctx, coreutils, "md5", ctx.file.src, md5out) sha1out = ctx.actions.declare_file("{}.sha1".format(ctx.file.src.basename)) - ctx.actions.run_shell( - outputs = [sha1out], - inputs = [ctx.file.src], - command = "ROOT=$PWD && cd {} && sha1sum {} > $ROOT/{}".format(ctx.file.src.dirname, ctx.file.src.basename, sha1out.path), - ) + _hash_action(ctx, coreutils, "sha1", ctx.file.src, sha1out) sha256out = ctx.actions.declare_file("{}.sha256".format(ctx.file.src.basename)) - ctx.actions.run_shell( - outputs = [sha256out], - inputs = [ctx.file.src], - command = "ROOT=$PWD && cd {dirname} && $ROOT/{sha256sum} {basename} > $ROOT/{path}".format( - dirname = ctx.file.src.dirname, - sha256sum = ctx.executable._sha256sum.path, - basename = ctx.file.src.basename, - path = sha256out.path, - ), - tools = [ctx.executable._sha256sum], - ) + _hash_action(ctx, coreutils, "sha256", ctx.file.src, sha256out) # By default (if you run `bazel build` on this target, or if you use it as a # source of another target), only the sha256 is computed. @@ -60,12 +60,8 @@ _hashes = rule( allow_single_file = True, mandatory = True, ), - "_sha256sum": attr.label( - executable = True, - cfg = "exec", - default = "//bazel/release/sha256sum", - ), }, + toolchains = [_COREUTILS_TOOLCHAIN], ) def hashes(name, src, **kwargs): diff --git a/bazel/release/sha256sum/BUILD.bazel b/bazel/release/sha256sum/BUILD.bazel deleted file mode 100644 index 4bef6d218..000000000 --- a/bazel/release/sha256sum/BUILD.bazel +++ /dev/null @@ -1,17 +0,0 @@ -load("@rules_go//go:def.bzl", "go_binary", "go_library") - -go_library( - name = "sha256sum_lib", - srcs = ["main.go"], - importpath = "github.com/aspect-build/silo/bazel/release/sha256sum", - visibility = ["//visibility:private"], -) - -go_binary( - name = "sha256sum", - embed = [":sha256sum_lib"], - visibility = [ - "//bazel/release:__pkg__", - "//cli:__pkg__", - ], -) diff --git a/bazel/release/sha256sum/main.go b/bazel/release/sha256sum/main.go deleted file mode 100644 index 18f272286..000000000 --- a/bazel/release/sha256sum/main.go +++ /dev/null @@ -1,32 +0,0 @@ -package main - -import ( - "crypto/sha256" - "fmt" - "io" - "log" - "os" -) - -func main() { - var input io.Reader - var filename string - if len(os.Args) == 1 { - input = os.Stdin - filename = "-" - } else { - f, err := os.Open(os.Args[1]) - if err != nil { - log.Fatal(err) - } - defer f.Close() - input = f - filename = os.Args[1] - } - - hash := sha256.New() - if _, err := io.Copy(hash, input); err != nil { - log.Fatal(err) - } - fmt.Printf("%x %s\n", hash.Sum(nil), filename) -} diff --git a/crates/aspect-cli/.aspect/config.axl b/crates/aspect-cli/.aspect/config.axl index 5acc4d7af..417478929 100644 --- a/crates/aspect-cli/.aspect/config.axl +++ b/crates/aspect-cli/.aspect/config.axl @@ -1,3 +1,2 @@ def config(ctx: ConfigContext): - print("running crates/aspect-cli/.aspect/config.axl") pass diff --git a/crates/aspect-cli/src/main.rs b/crates/aspect-cli/src/main.rs index c998ebabc..23ba4fa39 100644 --- a/crates/aspect-cli/src/main.rs +++ b/crates/aspect-cli/src/main.rs @@ -3,7 +3,7 @@ mod flags; mod helpers; mod trace; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::env::var; use std::path::PathBuf; use std::process::ExitCode; @@ -14,7 +14,7 @@ use axl_runtime::engine::task_arg::TaskArg; use axl_runtime::engine::task_args::TaskArgs; use axl_runtime::eval::{self, FrozenTaskModuleLike, ModuleScope, execute_task_with_args}; use axl_runtime::module::{AXL_MODULE_FILE, AXL_ROOT_MODULE_NAME}; -use axl_runtime::module::{AxlModuleEvaluator, DiskStore}; +use axl_runtime::module::{AxlModuleEvaluator, DiskStore, UseConfigEntry}; use clap::{Arg, ArgAction, Command}; use miette::{IntoDiagnostic, miette}; use starlark::environment::FrozenModule; @@ -59,7 +59,7 @@ async fn main() -> miette::Result { } // Initialize tracing for logging and instrumentation. - let _tracing = trace::init(); + // let _tracing = trace::init(); // Enter the root tracing span for the entire application. let _root = info_span!("root").entered(); @@ -85,26 +85,29 @@ async fn main() -> miette::Result { .evaluate(AXL_ROOT_MODULE_NAME.to_string(), repo_root.clone()) .into_diagnostic()?; - // Expand all module dependencies (including the builtin @aspect module) to the disk store and collect their root paths. - // This results in a Vec of (String, PathBuf) such as - // [ - // ( "aspect", "/Users/username/Library/Caches/axl/deps/27e6d838c365a7c5d79674a7b6c7ec7b8d22f686dbcc8088a8d1454a6489a9ae/aspect" ), - // ( "experimental", "/Users/username/Library/Caches/axl/deps/27e6d838c365a7c5d79674a7b6c7ec7b8d22f686dbcc8088a8d1454a6489a9ae/experimental" ), - // ( "local", "/Users/username/Library/Caches/axl/deps/27e6d838c365a7c5d79674a7b6c7ec7b8d22f686dbcc8088a8d1454a6489a9ae/local" ), - // ] + // Expand all module dependencies (including builtins) to the disk store. + // Returns (name, path, use_config) for each module. let module_roots = disk_store .expand_store(&root_module_store) .await .into_diagnostic()?; - // Collect root and dependency modules into a vector of modules with exported tasks. + // Build the set of deps with use_config enabled (as determined by disk_store) + let use_config_deps: HashSet = module_roots + .iter() + .filter(|(_, _, use_config)| *use_config) + .map(|(name, _, _)| name.clone()) + .collect(); + + // Collect root and dependency modules into a vector of modules with exported tasks and configs. let mut modules = vec![( root_module_store.module_name, root_module_store.module_root, root_module_store.tasks.take(), + root_module_store.configs.take(), )]; - for (name, root) in module_roots { + for (name, root, _) in module_roots { let module_store = module_eval.evaluate(name, root).into_diagnostic()?; if debug_mode() { eprintln!( @@ -116,6 +119,7 @@ async fn main() -> miette::Result { module_store.module_name, module_store.module_root, module_store.tasks.take(), + module_store.configs.take(), )) } @@ -170,7 +174,11 @@ async fn main() -> miette::Result { HashMap)>, )> = vec![]; - for (module_name, module_root, map) in modules.into_iter() { + // Collect configs from each module for use_config processing + let mut module_configs: Vec<(String, PathBuf, Vec)> = vec![]; + + for (module_name, module_root, map, configs) in modules.into_iter() { + module_configs.push((module_name.clone(), module_root.clone(), configs)); let mut mmap = HashMap::new(); for (path, (label, symbols)) in map.into_iter() { let rel_path = path.strip_prefix(&module_root).unwrap().to_path_buf(); @@ -234,16 +242,76 @@ async fn main() -> miette::Result { } } + // Build scoped configs: package configs first (from use_config), then customer configs last + let root_scope = ModuleScope { + name: AXL_ROOT_MODULE_NAME.to_string(), + path: repo_root.clone(), + }; + + // Collect resolved package names for requires/conflicts checking + let resolved_packages: HashSet = module_configs + .iter() + .map(|(name, _, _)| name.clone()) + .collect(); + + // Build package configs from use_config() declarations (dependency order, leaves first) + let mut scoped_configs: Vec<(ModuleScope, PathBuf, String)> = vec![]; + + for (module_name, module_root, configs_entries) in &module_configs { + // Skip if root module didn't enable use_config for this dep (root module is always allowed) + if module_name != AXL_ROOT_MODULE_NAME && !use_config_deps.contains(module_name) { + continue; + } + for entry in configs_entries { + // Check requires: all referenced packages must be present + let requires_met = entry.requires.iter().all(|(pkg, version_constraint)| { + if !resolved_packages.contains(pkg) { + return false; + } + // Version constraint checking deferred until modules carry version metadata + if version_constraint.is_some() { + // TODO: implement version constraint checking with semver crate + // For now, presence check is sufficient + } + true + }); + // Check conflicts: all referenced packages must be absent + let conflicts_clear = entry + .conflicts + .iter() + .all(|pkg| !resolved_packages.contains(pkg)); + + if requires_met && conflicts_clear { + let scope = ModuleScope { + name: module_name.clone(), + path: module_root.clone(), + }; + let abs_path = module_root.join(&entry.path); + scoped_configs.push((scope, abs_path, entry.function.clone())); + + if debug_mode() { + eprintln!( + "use_config: @{} -> {} (fn: {})", + module_name, entry.path, entry.function + ); + } + } else if debug_mode() { + eprintln!( + "use_config: @{} -> {} SKIPPED (requires={}, conflicts={})", + module_name, entry.path, requires_met, conflicts_clear + ); + } + } + } + + // Append customer configs (filesystem-discovered) — always last + for path in configs.iter() { + scoped_configs.push((root_scope.clone(), path.clone(), "config".to_string())); + } + // Run all config functions, passing in vector of tasks for configuration let tasks = config_eval - .run_all( - ModuleScope { - name: AXL_ROOT_MODULE_NAME.to_string(), - path: repo_root.clone(), - }, - configs.clone(), - tasks, - ) + .run_all(scoped_configs, tasks) .into_diagnostic()?; // Build the command tree from the evaluated and configured tasks. @@ -424,7 +492,7 @@ async fn main() -> miette::Result { match out.await { Ok(result) => { drop(_root); - drop(_tracing); + // drop(_tracing); result } Err(err) => panic!("{:?}", err), diff --git a/crates/axl-proto/Cargo.toml b/crates/axl-proto/Cargo.toml index 50ca9ff0a..793149b7c 100644 --- a/crates/axl-proto/Cargo.toml +++ b/crates/axl-proto/Cargo.toml @@ -12,7 +12,6 @@ rust-version.workspace = true [dependencies] allocative = "0.3.4" anyhow = "1.0.98" -derive_more = { version = "2.0.1", features = ["full"] } display_container = "0.9.0" prost = "0.14.1" prost-types = "0.14.1" @@ -20,8 +19,9 @@ starbuf-derive = { path = "../starbuf-derive" } starbuf-types = { path = "../starbuf-types" } starlark = "0.13.0" starlark_derive = "0.13.0" -tonic = "0.14.2" +tonic = { version = "0.14.2", features = ["transport", "tls-native-roots"] } tonic-prost = "0.14.2" +tokio = { version = "1", features = ["rt-multi-thread", "macros"] } [build-dependencies] prost-build = "0.14.1" diff --git a/crates/axl-proto/build.rs b/crates/axl-proto/build.rs index 0f1f7c312..01f18c9a4 100644 --- a/crates/axl-proto/build.rs +++ b/crates/axl-proto/build.rs @@ -75,20 +75,15 @@ fn main() -> Result<(), std::io::Error> { } config.type_attribute( format!("{}.{}", prefix, desc.name()), - format!( - r#" + r#" #[derive( ::starlark::values::ProvidesStaticType, - ::derive_more::Display, ::starlark::values::Trace, ::starlark::values::NoSerialize, ::allocative::Allocative, ::starbuf_derive::Message )] -#[display("{}")] "#, - desc.name() - ), ); for oneof in &desc.oneof_decl { @@ -110,7 +105,7 @@ fn main() -> Result<(), std::io::Error> { } for file in &fds.file { - if file.package() == "google.devtools.build.v1" { + if file.package() == "google.devtools.build.v1" || file.package() == "google.longrunning" { continue; } traverse( @@ -178,6 +173,24 @@ pub mod protos {{ }} +}} + "#, + tools = tools, + ), + )?; + + let v2 = fs::read_to_string(format!("{out_dir}/build.bazel.remote.execution.v2.rs"))?; + + fs::write( + format!("{out_dir}/build.bazel.remote.execution.v2.rs"), + format!( + r#" +/// @Generated by build.rs +#[starbuf_derive::types] +pub mod v2 {{ + +{v2} + }} "# ), diff --git a/crates/axl-proto/descriptor.bin b/crates/axl-proto/descriptor.bin index ebe10d52d..711036b01 100644 Binary files a/crates/axl-proto/descriptor.bin and b/crates/axl-proto/descriptor.bin differ diff --git a/crates/axl-proto/src/lib.rs b/crates/axl-proto/src/lib.rs index 80218d42b..77e82ec83 100644 --- a/crates/axl-proto/src/lib.rs +++ b/crates/axl-proto/src/lib.rs @@ -14,6 +14,38 @@ include!(concat!(env!("OUT_DIR"), "/workspace_log.rs")); include!(concat!(env!("OUT_DIR"), "/build_event_stream.rs")); include!(concat!(env!("OUT_DIR"), "/blaze_query.rs")); include!(concat!(env!("OUT_DIR"), "/tools.protos.rs")); +pub mod build { + pub mod bazel { + pub mod semver { + include!(concat!(env!("OUT_DIR"), "/build.bazel.semver.rs")); + } + pub mod remote { + pub mod execution { + include!(concat!( + env!("OUT_DIR"), + "/build.bazel.remote.execution.v2.rs" + )); + + #[starbuf_derive::service( + client = "crate::build::bazel::remote::execution::v2::action_cache_client::ActionCacheClient", + methods( + name = "GetActionResult", + method = "get_action_result", + request = "crate::build::bazel::remote::execution::v2::GetActionResultRequest", + response = "crate::build::bazel::remote::execution::v2::ActionResult", + ), + methods( + name = "UpdateActionResult", + method = "update_action_result", + request = "crate::build::bazel::remote::execution::v2::UpdateActionResultRequest", + response = "crate::build::bazel::remote::execution::v2::ActionResult", + ) + )] + pub struct ActionCache; + } + } + } +} #[path = "./pb_impl.rs"] mod pb_impl; @@ -26,6 +58,12 @@ pub mod google { } } } + pub mod rpc { + include!(concat!(env!("OUT_DIR"), "/google.rpc.rs")); + } + pub mod longrunning { + include!(concat!(env!("OUT_DIR"), "/google.longrunning.rs")); + } } pub mod analysis { diff --git a/crates/axl-runtime/BUILD.bazel b/crates/axl-runtime/BUILD.bazel index 95c0f570e..aa6d73e65 100644 --- a/crates/axl-runtime/BUILD.bazel +++ b/crates/axl-runtime/BUILD.bazel @@ -17,6 +17,10 @@ rust_library( "@crates//:futures-util", "@crates//:futures", "@crates//:handlebars", + "@crates//:http-body-util", + "@crates//:hyper", + "@crates//:hyper-util", + "@crates//:hyperlocal", "@crates//:liquid-core", "@crates//:liquid", "@crates//:minijinja", @@ -33,6 +37,7 @@ rust_library( "@crates//:tokio-stream", "@crates//:tokio", "@crates//:tracing", + "@crates//:url", "@crates//:uuid", "@crates//:wasmi_wasi", "@crates//:wasmi", diff --git a/crates/axl-runtime/Cargo.toml b/crates/axl-runtime/Cargo.toml index 96d423ddd..ed67c518b 100644 --- a/crates/axl-runtime/Cargo.toml +++ b/crates/axl-runtime/Cargo.toml @@ -24,6 +24,11 @@ thiserror = "2.0.12" prost = "0.14.1" reqwest = { version="0.12.22", features = ["stream", "gzip", "zstd"] } +hyper = "1.6.0" +hyper-util = { version = "0.1.12", features = ["client-legacy", "tokio", "http1"] } +hyperlocal = "0.9.1" +http-body-util = "0.1.3" +url = "2.5.4" zstd = "0.13.3" nix = { version = "0.30.1", features = ["fs"] } @@ -54,6 +59,7 @@ dirs = "6.0.0" fibre = "0.5.0" flate2 = "1.1.2" rand = "0.8.5" +semver = "1" sha256 = "1.6.0" ssri = "9.2.0" base64 = "0.22.1" diff --git a/crates/axl-runtime/src/builtins/aspect/MODULE.aspect b/crates/axl-runtime/src/builtins/aspect/MODULE.aspect index bd4ae78fe..9822e3885 100644 --- a/crates/axl-runtime/src/builtins/aspect/MODULE.aspect +++ b/crates/axl-runtime/src/builtins/aspect/MODULE.aspect @@ -1,3 +1,25 @@ use_task("build.axl", "build") use_task("test.axl", "test") use_task("axl_add.axl", "add") + +# Register warming tasks. +use_task("tasks/warming.axl", "restore", "update") + +# Configure delivery +use_config("config/delivery.axl", "configure_delivery") + +# Configure builtins +use_config("config/builtins.axl", "configure_builtins") + +# Configure rules_lint if its declared by user +use_config( + "config/lint.axl", + "configure_rules_lint", + requires = ["aspect_rules_lint"] +) + +use_config( + "config/nolint.axl", + "configure_dummy_lint", + conflicts = ["aspect_rules_lint"] +) diff --git a/crates/axl-runtime/src/builtins/aspect/bazel.axl b/crates/axl-runtime/src/builtins/aspect/bazel.axl new file mode 100644 index 000000000..f3ef897c5 --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/bazel.axl @@ -0,0 +1,41 @@ +""" +Bazel exit code constants. + +These correspond to the exit codes defined in Bazel's ExitCode.java: +https://github.com/bazelbuild/bazel/blob/master/src/main/java/com/google/devtools/build/lib/util/ExitCode.java +""" + +exit_codes = struct( + SUCCESS = 0, + BUILD_FAILURE = 1, + PARSING_FAILURE = 1, + COMMAND_LINE_ERROR = 2, + TESTS_FAILED = 3, + PARTIAL_ANALYSIS_FAILURE = 3, + NO_TESTS_FOUND = 4, + RUN_FAILURE = 6, + ANALYSIS_FAILURE = 7, + INTERRUPTED = 8, + LOCK_HELD_NOBLOCK_FOR_LOCK = 9, + REMOTE_ENVIRONMENTAL_ERROR = 32, + OOM_ERROR = 33, + REMOTE_ERROR = 34, + LOCAL_ENVIRONMENTAL_ERROR = 36, + BLAZE_INTERNAL_ERROR = 37, + TRANSIENT_BUILD_EVENT_SERVICE_UPLOAD_ERROR = 38, + REMOTE_CACHE_EVICTED = 39, + PERSISTENT_BUILD_EVENT_SERVICE_UPLOAD_ERROR = 45, + EXTERNAL_DEPS_ERROR = 48, +) + +# https://bazel.build/run/scripts#exit-codes +def default_retry(code: int) -> bool: + """Returns True if the given exit code is retryable. + + Retryable codes are those indicating transient infrastructure failures + where re-running the command may succeed: + - BLAZE_INTERNAL_ERROR (37): Bazel server crash. + - LOCAL_ENVIRONMENTAL_ERROR (36): Local env failure, often caused by + a queued command failing because the server is crashing. + """ + return code == exit_codes.BLAZE_INTERNAL_ERROR or code == exit_codes.LOCAL_ENVIRONMENTAL_ERROR diff --git a/crates/axl-runtime/src/builtins/aspect/build.axl b/crates/axl-runtime/src/builtins/aspect/build.axl index 28a27304e..14d1d3092 100644 --- a/crates/axl-runtime/src/builtins/aspect/build.axl +++ b/crates/axl-runtime/src/builtins/aspect/build.axl @@ -1,52 +1,106 @@ """ A default 'build' task that wraps a 'bazel build' command. """ +load("./bazel.axl", "default_retry") -def impl(ctx: TaskContext) -> int: - stdout = ctx.std.io.stdout +BuildConfig = spec( + # Declarative data — composable, zero-cost reads + extra_flags = attr(list[str], []), + extra_startup_flags = attr(list[str], []), + build_event_sinks = attr(list[bazel.build.BuildEventSink], []), + + # Optional transforms — only called when set + flags = attr(typing.Callable[[list[str]], list[str]] | None, None), + startup_flags = attr(typing.Callable[[list[str]], list[str]] | None, None), + + # Lifecycle — only called when set + build_start = attr(typing.Callable[[], None] | None, None), + build_event = attr(typing.Callable[[dict, str, str], None] | None, None), + build_retry = attr(typing.Callable[[int], bool], default_retry), + build_end = attr(typing.Callable[[int], None] | None, None), +) - build_events = True +def _collect_bes_from_args(ctx): + """Collect BES sinks from CLI args (--bes_backend/--bes_header).""" + sinks = [] for bes_backend in ctx.args.bes_backend: metadata = {} for bes_header in ctx.args.bes_header: (k, _, v) = bes_header.partition("=") metadata[k] = v - if type(build_events) != "list": - build_events = [] - build_events.append( + sinks.append( bazel.build_events.grpc( uri = bes_backend, - metadata = metadata + metadata = metadata, ) ) + return sinks + +def impl(ctx: TaskContext) -> int: + # Flags: accumulate data, then optionally transform + flags = ["--isatty=" + str(int(ctx.std.io.stdout.is_tty))] + flags.extend(ctx.args.bazel_flag) + flags.extend(ctx.config.extra_flags) + if ctx.config.flags: + flags = ctx.config.flags(flags) + + startup_flags = list(ctx.args.bazel_startup_flag) + startup_flags.extend(ctx.config.extra_startup_flags) + if ctx.config.startup_flags: + startup_flags = ctx.config.startup_flags(startup_flags) + + # BES: merge arg-based sinks with config sinks + build_events = _collect_bes_from_args(ctx) + if ctx.config.build_event_sinks: + build_events.extend(ctx.config.build_event_sinks) + + # Coerce to bool/list for ctx.bazel.build: + # - non-empty list → stream to those sinks + build_events() iterator. + # - True → stream without explicit sinks (build_event handler only) + # - False → no BEP stream at all + if not build_events: + if ctx.config.build_event: + build_events = True + else: + build_events = False + + if ctx.config.build_start: + ctx.config.build_start() + + for _ in range(10): + build = ctx.bazel.build( + build_events = build_events, + flags = flags, + startup_flags = startup_flags, + *ctx.args.target_pattern, + ) + + if ctx.config.build_event: + handler = ctx.config.build_event + state = {} + for event in build.build_events(): + handler(ctx, state, event) - bazel_flags = ["--isatty=" + str(int(ctx.std.io.stdout.is_tty))] - for bazel_flag in ctx.args.bazel_flag: - bazel_flags.append(bazel_flag) + build_status = build.wait() - bazel_startup_flags = [] - for flag in ctx.args.bazel_startup_flag: - bazel_startup_flags.append(flag) + if build_status.code == 0 or not ctx.config.build_retry(build_status.code): + break - build = ctx.bazel.build( - build_events = build_events, - flags = bazel_flags, - startup_flags = bazel_startup_flags, - *ctx.args.target_pattern - ) + if ctx.config.build_end: + ctx.config.build_end(build_status.code) - build_status = build.wait() return build_status.code build = task( implementation = impl, + config = BuildConfig, args = { - # TODO: Support a long --pattern_file like bazel does (@./targets) - # TODO: Support - (list from stdin) "target_pattern": args.positional(minimum = 1, maximum = 512, default = ["..."]), "bazel_flag": args.string_list(), "bazel_startup_flag": args.string_list(), + "remote_executor": args.string(), + "remote_cache": args.string(), "bes_backend": args.string_list(), "bes_header": args.string_list(), - } + }, ) diff --git a/crates/axl-runtime/src/builtins/aspect/config/builtins.axl b/crates/axl-runtime/src/builtins/aspect/config/builtins.axl new file mode 100644 index 000000000..7f075b8f2 --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/config/builtins.axl @@ -0,0 +1,137 @@ +"""Configures builtin tasks for Workflows""" + +load("../tasks/migrate.axl", "migrate") + +load("../lib/platform.axl", + "read_platform_config", + "read_host_config", + "get_bazelrc_flags", + "DEFAULT_PLATFORM_DIR" +) + +load( + "../lib/github.axl", + "create_check_run", + "update_check_run", + "complete_check_run", + "build_output", + "build_annotation", + "create_review", + "build_suggestion", +) + +def _format_build_state(build_state): + md = """ +# Build Summary +Total: {} +Failed targets: {} +State: {} + +""".format( + build_state["total"], + len(build_state["failures"]), + "failure" if len(build_state["failures"]) else "success" + ) + for target, failure in build_state["failures"].items(): + md += "## {}\n\n```{}```\n".format(target, failure.message) + return md + + +def on_build_event(ctx: TaskContext, build_state: dict, event): + if not build_state: + build_state["total"] = 0 + build_state["failures"] = {} + if event.kind == "target_completed": + build_state["total"] += 1 + if not event.payload.success: + build_state["failures"][event.id.label] = event.payload.failure_detail + annotations = [ + build_annotation( + path = "README.md", # NOT "./README.md" + start_line = 6, # target the deleted lines + end_line = 6, + message = "Unused variable 'foo'", + annotation_level = "warning", + ), + ] + r = create_check_run( + ctx, + token = "", + owner = "thesayyn", + repo = "wasp", + name = "Build 2", + head_sha = "86fadc736fc76303a82f8ba05feff0f77e942847", + status = "in_progress", + output = build_output( + title = "build failed", + summary = "failed", + text = _format_build_state(build_state), + annotations = annotations + ) + ) + print(r) + elif event.kind == "build_finished": + print(_format_build_state(build_state)) + suggestions = [ + build_suggestion( + path = "README.md", + line = 7, + suggested_code = "fixed line 5", + message = "Lint: trailing whitespace", + ), + ] + + r = create_review( + ctx, + token = "", + owner = "thesayyn", + repo = "lint_example", + pull_number = 1, + body = "Lint findings", + event = "COMMENT", + comments = suggestions, + ) + print(r) + + +def configure_builtins(ctx: ConfigContext): + ctx.tasks.add(migrate) + + CI = ctx.std.env.var("BUILDKITE") + + # Read platform config from disk + platform_config = read_platform_config(ctx.std.fs) + # Read host config from environment + host_config = read_host_config(ctx.std.env, ctx.std.io) + + # Generate bazelrc content + flags = get_bazelrc_flags( + platform_config = platform_config, + host_config = host_config, + bazel_version = "7.0.0", + ) + + # Debugging information + if CI: + print(platform_config) + print(host_config) + print(flags) + + user = ctx.std.env.var("USER") + + bessie_endpoint = platform_config.get("bessie_endpoint", None) + bessie_sinks = [] + if user != "thesayyn" and bessie_endpoint: + bessie_sinks.append(bazel.build_events.grpc( + uri = bessie_endpoint, + metadata = {} # TODO: how does bessie authenticate? + )) + + for task in ctx.tasks: + if task.name == "build" and task.path.endswith("aspect/build.axl"): + if user != "thesayyn": + task.config.extra_startup_flags.extend(flags.get("startup", [])) + task.config.extra_flags.extend(flags.get("build", [])) + task.config.build_start = lambda: print("+++ :bazel: Building") + task.config.build_event_sinks.extend(bessie_sinks) + task.config.build_event = on_build_event \ No newline at end of file diff --git a/crates/axl-runtime/src/builtins/aspect/config/delivery.axl b/crates/axl-runtime/src/builtins/aspect/config/delivery.axl new file mode 100644 index 000000000..69da960ad --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/config/delivery.axl @@ -0,0 +1,97 @@ +"""Configures delivery task for Workflows""" + +load("../tasks/delivery.axl", "delivery") +load("../lib/platform.axl", + "read_platform_config", + "read_host_config", + "get_bazelrc_flags", + "DEFAULT_PLATFORM_DIR" +) + +def _check_deliveryd_health(ctx, socket_path): + """ + Check if deliveryd is healthy by calling the /health endpoint. + + Returns: + True if healthy, False otherwise + """ + http = ctx.http() + response = http.get( + url="http://localhost/health", + unix_socket=socket_path, + ) + + result = response.map_err(lambda e: str(e)).block() + if type(result) == "string": + return False + return result.status >= 200 and result.status < 300 + +def _start_deliveryd(ctx, delivery_db_endpoint, socket_path = "/tmp/deliveryd.sock"): + """ + Start the deliveryd process in the background if not already running. + + Args: + ctx: Config context + delivery_db_endpoint: Redis endpoint URL (redis:// or rediss:// for TLS) + socket_path: Unix socket path for deliveryd + """ + # Check if deliveryd is already running by checking for the socket + if ctx.std.fs.exists(socket_path): + # Socket exists, check if the instance is healthy + if _check_deliveryd_health(ctx, socket_path): + print("deliveryd already running and healthy (socket: {})".format(socket_path)) + return + else: + # Stale socket, remove it + print("deliveryd socket exists but instance is unhealthy, removing stale socket") + ctx.std.fs.remove_file(socket_path) + + cmd = ctx.std.process.command("deliveryd") + cmd.arg("--socket=" + socket_path) + cmd.arg("--redis-endpoint=" + delivery_db_endpoint) + + # Run in background + cmd.stdout("null") + cmd.stderr("null") + cmd.spawn() + + print("Started deliveryd (socket: {}, endpoint: {})".format(socket_path, delivery_db_endpoint)) + +def configure_delivery(ctx: ConfigContext): + # Add a delivery verb + ctx.tasks.add(delivery) + + CI = ctx.std.env.var("BUILDKITE") + + if CI: + print("--- :aspect-build: Configuring Workflows") + + # Read platform config from disk + platform_config = read_platform_config(ctx.std.fs) + + # Start deliveryd if delivery_db_endpoint is configured + delivery_db_endpoint = platform_config.get("delivery_db_endpoint") + if delivery_db_endpoint: + _start_deliveryd(ctx, delivery_db_endpoint) + + # Read host config from environment + host_config = read_host_config(ctx.std.env, ctx.std.io) + + # Generate bazelrc content + # TODO: use these flags? + flags = get_bazelrc_flags( + platform_config = platform_config, + host_config = host_config, + bazel_version = "7.0.0", + ) + + # Debugging information + if CI: + print(platform_config) + print(host_config) + print(flags) + + + for task in ctx.tasks: + if task.name == "delivery": + task.config.delivery_start = lambda: print("--- :bazel: Delivery") diff --git a/crates/axl-runtime/src/builtins/aspect/config/lint.axl b/crates/axl-runtime/src/builtins/aspect/config/lint.axl new file mode 100644 index 000000000..0e39a9ec0 --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/config/lint.axl @@ -0,0 +1,77 @@ +"""Configures rules_lint if its available""" + +load("../lib/platform.axl", + "read_platform_config", + "read_host_config", + "get_bazelrc_flags", + "DEFAULT_PLATFORM_DIR" +) +load( + "../lib/github.axl", + "create_check_run", + "update_check_run", + "complete_check_run", + "build_output", + "build_annotation", + "create_review", + "build_suggestion", +) +load( + "../lib/sarif.axl", + "sarif_to_annotations", + "get_sarif_summary" +) +load( + "@aspect_rules_lint//lint/lint.axl", + "StrategyHoldTheLine", +) +load("../lib/linting.axl", "make_github_strategy", "make_github_changed_files_provider") + + + +def configure_rules_lint(ctx: ConfigContext): + CI = ctx.std.env.var("BUILDKITE") + + # Read platform config from disk + platform_config = read_platform_config(ctx.std.fs) + + # Read host config from environment + host_config = read_host_config(ctx.std.env, ctx.std.io) + + # Generate bazelrc content + flags = get_bazelrc_flags( + platform_config = platform_config, + host_config = host_config, + bazel_version = "7.0.0", + ) + + # Debugging information + if CI: + print(platform_config) + print(host_config) + print(flags) + + + for task in ctx.tasks: + if task.name == "lint": + github_token = ctx.std.env.var("GITHUB_TOKEN") + if github_token: + # CI mode: GitHub-aware strategy with hold-the-line + github_repository = ctx.std.env.var("GITHUB_REPOSITORY") or "" + repo_parts = github_repository.split("/") + gh_owner = repo_parts[0] if len(repo_parts) >= 2 else "" + gh_repo = repo_parts[1] if len(repo_parts) >= 2 else "" + + task.config.strategy = make_github_strategy( + StrategyHoldTheLine, + token = github_token, + owner = gh_owner, + repo = gh_repo, + mode = "streaming", + ) + task.config.changed_files_provider = make_github_changed_files_provider( + token = github_token, + owner = gh_owner, + repo = gh_repo, + ) + # else: local dev uses defaults (StrategyHoldTheLine + GitDiffProvider) diff --git a/crates/axl-runtime/src/builtins/aspect/config/nolint.axl b/crates/axl-runtime/src/builtins/aspect/config/nolint.axl new file mode 100644 index 000000000..bf7ae89df --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/config/nolint.axl @@ -0,0 +1,8 @@ +"""Configures a dummy lint verb for migration.""" + +load("../tasks/dummy_lint.axl", "lint") +load("../tasks/dummy_format.axl", "format") + +def configure_dummy_lint(ctx: ConfigContext): + ctx.tasks.add(lint) + ctx.tasks.add(format) \ No newline at end of file diff --git a/crates/axl-runtime/src/builtins/aspect/lib/deliveryd.axl b/crates/axl-runtime/src/builtins/aspect/lib/deliveryd.axl new file mode 100644 index 000000000..2abd33dfb --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/lib/deliveryd.axl @@ -0,0 +1,104 @@ +""" +Client library for communicating with deliveryd. + +deliveryd is a Unix socket HTTP server that manages delivery state, +tracking which artifacts have been delivered and preventing re-delivery. +""" + +def query(ctx, socket_path, ci_host, commit_sha, workspace): + """ + Query deliveryd for delivery state of all targets in a commit. + Returns a dict mapping label -> {output_sha, delivered, delivered_by}. + """ + http = ctx.http() + response = http.post( + "http://localhost/query", + headers={"Content-Type": "application/json"}, + data=json.encode({ + "ci_host": ci_host, + "commit_sha": commit_sha, + "workspace": workspace, + }), + unix_socket=socket_path, + ).block() + + + if response.status < 200 or response.status >= 300: + fail("deliveryd query failed: " + response.body) + + data = json.decode(response.body) + + targets = data.get("targets", []) or [] + # Build lookup dict by label + result = {} + for target in targets: + result[target["label"]] = { + "output_sha": target["output_sha"], + "delivered": target["delivered"], + "delivered_by": target.get("delivered_by"), + } + return result + +def deliver(ctx, socket_path, ci_host, output_sha, workspace, signature): + """ + Mark a target as delivered by setting its delivery signature. + """ + http = ctx.http() + response = http.post( + "http://localhost/deliver", + headers={"Content-Type": "application/json"}, + data=json.encode({ + "ci_host": ci_host, + "output_sha": output_sha, + "workspace": workspace, + "signature": signature, + }), + unix_socket=socket_path, + ).block() + + if response.status < 200 or response.status >= 300: + fail("deliveryd deliver failed: " + response.body) + +def record(ctx, socket_path, ci_host, commit_sha, workspace, label, output_sha): + """ + Record a target's output SHA with deliveryd. + This must be called before the target can be queried or delivered. + """ + http = ctx.http() + response = http.post( + "http://localhost/record", + headers={"Content-Type": "application/json"}, + data=json.encode({ + "ci_host": ci_host, + "commit_sha": commit_sha, + "workspace": workspace, + "label": label, + "output_sha": output_sha, + }), + unix_socket=socket_path, + ).map_err(lambda e: e).block() + + if type(response) == "string": + fail("deliveryd record failed: " + response) + + if response.status < 200 or response.status >= 300: + fail("deliveryd record failed: " + response.body) + +def delete_artifact(ctx, socket_path, ci_host, output_sha, workspace): + """ + Delete artifact metadata (used for cleanup on failed deliveries). + """ + http = ctx.http() + response = http.post( + "http://localhost/artifact/delete", + headers={"Content-Type": "application/json"}, + data=json.encode({ + "ci_host": ci_host, + "output_sha": output_sha, + "workspace": workspace, + }), + unix_socket=socket_path, + ).block() + + if response.status < 200 or response.status >= 300: + fail("deliveryd artifact delete failed: " + response.body) diff --git a/crates/axl-runtime/src/builtins/aspect/lib/github.axl b/crates/axl-runtime/src/builtins/aspect/lib/github.axl new file mode 100644 index 000000000..bdd16dcd2 --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/lib/github.axl @@ -0,0 +1,727 @@ +""" +GitHub Check Runs Client Library + +Client for creating and updating GitHub Check Runs via the GitHub API. +""" + +DEFAULT_GITHUB_API = "https://api.github.com" + + +def _normalize_output(output): + """ + Normalize output parameter to the required dict format. + + If output is a string, wraps it in a dict with title and summary. + If output is already a dict, returns it as-is. + """ + if output == None: + return None + if type(output) == "string": + return { + "title": "Check Run Output", + "summary": output, + } + return output + + +def _do_request(ctx, method, url, token, payload = None): + """ + Make an HTTP request to GitHub API. + + Args: + ctx: Context with http() + method: HTTP method ("POST" or "PATCH") + url: Full URL to request + token: GitHub token (PAT or Actions token) + payload: Optional dict to send as JSON body + + Returns: + (success: bool, status: int, body: dict or str) + """ + http = ctx.http() + + headers = { + "Authorization": "Bearer " + token, + "Accept": "application/vnd.github+json", + "Content-Type": "application/json", + "X-GitHub-Api-Version": "2022-11-28", + } + + if method == "POST": + response = http.post( + url, + headers = headers, + data = json.encode(payload) if payload else None, + ).block() + elif method == "PATCH": + response = http.patch( + url, + headers = headers, + data = json.encode(payload) if payload else None, + ).block() + else: + return (False, 0, "unsupported method: " + method) + + success = response.status >= 200 and response.status < 300 + + # Try to parse response as JSON + body = response.body + if body: + body = json.decode(body) + + return (success, response.status, body) + + +def _do_get_request(ctx, url, token): + """ + Make a GET request to GitHub API. + + Args: + ctx: Context with http() + url: Full URL to request + token: GitHub token (PAT or Actions token) + + Returns: + (success: bool, status: int, body: dict or str) + """ + http = ctx.http() + + headers = { + "Authorization": "Bearer " + token, + "Accept": "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + } + + response = http.get( + url = url, + headers = headers, + ).block() + + success = response.status >= 200 and response.status < 300 + + body = response.body + if body: + body = json.decode(body) + + return (success, response.status, body) + + +def _do_delete_request(ctx, url, token): + """ + Make a DELETE request to GitHub API. + + Args: + ctx: Context with http() + url: Full URL to request + token: GitHub token (PAT or Actions token) + + Returns: + (success: bool, status: int, body: str or None) + """ + http = ctx.http() + + headers = { + "Authorization": "Bearer " + token, + "Accept": "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + } + + response = http.delete( + url = url, + headers = headers, + ).block() + + success = response.status >= 200 and response.status < 300 + + return (success, response.status, response.body) + + +def get_pull_request(ctx, token, owner, repo, pull_number, api_base = DEFAULT_GITHUB_API): + """ + Get a pull request by number. + + Args: + ctx: Context with http() + token: GitHub token + owner: Repository owner + repo: Repository name + pull_number: The PR number + api_base: GitHub API base URL + + Returns: + dict with "success" (bool), "pull_request" (dict) on success + dict with "success" (False), "error" (str), "status" (int) on failure + """ + url = api_base + "/repos/" + owner + "/" + repo + "/pulls/" + str(pull_number) + + success, status_code, body = _do_get_request(ctx, url, token) + + if success: + return { + "success": True, + "pull_request": body, + } + + error_msg = "request failed: " + str(status_code) + if body and type(body) == "dict" and body.get("message"): + error_msg = error_msg + " - " + body["message"] + + return {"success": False, "error": error_msg, "status": status_code} + + +def list_review_comments(ctx, token, owner, repo, pull_number, api_base = DEFAULT_GITHUB_API): + """ + List all review comments on a pull request. + + Handles pagination to retrieve all comments. + + Args: + ctx: Context with http() + token: GitHub token + owner: Repository owner + repo: Repository name + pull_number: The PR number + api_base: GitHub API base URL + + Returns: + dict with "success" (bool), "comments" (list) on success + dict with "success" (False), "error" (str), "status" (int) on failure + """ + all_comments = [] + + for page in range(1, 101): # max 100 pages (10,000 comments) + url = api_base + "/repos/" + owner + "/" + repo + "/pulls/" + str(pull_number) + "/comments?per_page=100&page=" + str(page) + + success, status_code, body = _do_get_request(ctx, url, token) + + if not success: + error_msg = "request failed: " + str(status_code) + if body and type(body) == "dict" and body.get("message"): + error_msg = error_msg + " - " + body["message"] + return {"success": False, "error": error_msg, "status": status_code} + + if not body or len(body) == 0: + break + + all_comments.extend(body) + + if len(body) < 100: + break + + return { + "success": True, + "comments": all_comments, + } + + +def delete_review_comment(ctx, token, owner, repo, comment_id, api_base = DEFAULT_GITHUB_API): + """ + Delete a review comment on a pull request. + + Args: + ctx: Context with http() + token: GitHub token + owner: Repository owner + repo: Repository name + comment_id: The comment ID to delete + api_base: GitHub API base URL + + Returns: + dict with "success" (bool) on success + dict with "success" (False), "error" (str), "status" (int) on failure + """ + url = api_base + "/repos/" + owner + "/" + repo + "/pulls/comments/" + str(comment_id) + + success, status_code, body = _do_delete_request(ctx, url, token) + + if success: + return {"success": True} + + error_msg = "request failed: " + str(status_code) + return {"success": False, "error": error_msg, "status": status_code} + + +def list_pull_request_files(ctx, token, owner, repo, pull_number, api_base = DEFAULT_GITHUB_API): + """ + List files changed in a pull request. + + Handles pagination to retrieve all files. + + Args: + ctx: Context with http() + token: GitHub token + owner: Repository owner + repo: Repository name + pull_number: The PR number + api_base: GitHub API base URL + + Returns: + dict with "success" (bool), "files" (list) on success + dict with "success" (False), "error" (str), "status" (int) on failure + """ + all_files = [] + + for page in range(1, 101): # max 100 pages (10,000 files) + url = api_base + "/repos/" + owner + "/" + repo + "/pulls/" + str(pull_number) + "/files?per_page=100&page=" + str(page) + + success, status_code, body = _do_get_request(ctx, url, token) + + if not success: + error_msg = "request failed: " + str(status_code) + if body and type(body) == "dict" and body.get("message"): + error_msg = error_msg + " - " + body["message"] + return {"success": False, "error": error_msg, "status": status_code} + + if not body or len(body) == 0: + break + + all_files.extend(body) + + if len(body) < 100: + break + + return { + "success": True, + "files": all_files, + } + + +def create_check_run(ctx, token, owner, repo, name, head_sha, status = None, output = None, details_url = None, external_id = None, started_at = None, api_base = DEFAULT_GITHUB_API): + """ + Create a new check run on a commit. + + Args: + ctx: Context with http() + token: GitHub token (PAT or GITHUB_TOKEN from Actions) + owner: Repository owner + repo: Repository name + name: Name of the check run + head_sha: The SHA of the commit to create the check on + status: Optional status ("queued", "in_progress", "completed") + output: Optional dict with "title", "summary", and optionally "text", "annotations" + details_url: Optional URL for more details + external_id: Optional external identifier + started_at: Optional ISO 8601 timestamp + api_base: GitHub API base URL (default: https://api.github.com) + + Returns: + dict with "success" (bool), "check_run_id" (int), "html_url" (str) on success + dict with "success" (False), "error" (str), "status" (int) on failure + """ + url = api_base + "/repos/" + owner + "/" + repo + "/check-runs" + + payload = { + "name": name, + "head_sha": head_sha, + } + + if status: + payload["status"] = status + if output: + payload["output"] = output + if details_url: + payload["details_url"] = details_url + if external_id: + payload["external_id"] = external_id + if started_at: + payload["started_at"] = started_at + + success, status_code, body = _do_request(ctx, "POST", url, token, payload) + + if success: + return { + "success": True, + "check_run_id": body.get("id"), + "html_url": body.get("html_url"), + "response": body, + } + + error_msg = "request failed: " + str(status_code) + if body and type(body) == "dict" and body.get("message"): + error_msg = error_msg + " - " + body["message"] + + return {"success": False, "error": error_msg, "status": status_code} + + +def update_check_run(ctx, token, owner, repo, check_run_id, status = None, conclusion = None, output = None, details_url = None, completed_at = None, api_base = DEFAULT_GITHUB_API): + """ + Update an existing check run. + + Args: + ctx: Context with http() + token: GitHub token + owner: Repository owner + repo: Repository name + check_run_id: The ID of the check run to update + status: Optional new status ("queued", "in_progress", "completed") + conclusion: Required if status is "completed". One of: + "action_required", "cancelled", "failure", "neutral", + "success", "skipped", "stale", "timed_out" + output: Optional dict with "title", "summary", and optionally "text", "annotations" + details_url: Optional URL for more details + completed_at: Optional ISO 8601 timestamp (required if conclusion is set) + api_base: GitHub API base URL + + Returns: + dict with "success" (bool), "check_run_id" (int) on success + dict with "success" (False), "error" (str), "status" (int) on failure + """ + url = api_base + "/repos/" + owner + "/" + repo + "/check-runs/" + str(check_run_id) + + payload = {} + + if status: + payload["status"] = status + if conclusion: + payload["conclusion"] = conclusion + if output: + payload["output"] = output + if details_url: + payload["details_url"] = details_url + if completed_at: + payload["completed_at"] = completed_at + + success, status_code, body = _do_request(ctx, "PATCH", url, token, payload) + + if success: + return { + "success": True, + "check_run_id": body.get("id"), + "html_url": body.get("html_url"), + "response": body, + } + + error_msg = "request failed: " + str(status_code) + if body and type(body) == "dict" and body.get("message"): + error_msg = error_msg + " - " + body["message"] + + return {"success": False, "error": error_msg, "status": status_code} + + +def complete_check_run(ctx, token, owner, repo, check_run_id, conclusion, output = None, api_base = DEFAULT_GITHUB_API): + """ + Complete a check run with a conclusion. + + Convenience wrapper around update_check_run for completing checks. + + Args: + ctx: Context with http() + token: GitHub token + owner: Repository owner + repo: Repository name + check_run_id: The ID of the check run to complete + conclusion: One of: "action_required", "cancelled", "failure", + "neutral", "success", "skipped", "stale", "timed_out" + output: Optional dict with "title", "summary" + api_base: GitHub API base URL + + Returns: + dict with "success" (bool), "check_run_id" (int) on success + dict with "success" (False), "error" (str), "status" (int) on failure + """ + return update_check_run( + ctx, + token, + owner, + repo, + check_run_id, + status = "completed", + conclusion = conclusion, + output = output, + api_base = api_base, + ) + + +def build_output(title, summary, text = None, annotations = None): + """ + Helper to build an output object for check runs. + + Args: + title: Title of the check run output + summary: Summary (supports markdown) + text: Optional detailed text (supports markdown) + annotations: Optional list of annotation dicts + + Returns: + dict suitable for the "output" parameter + """ + output = { + "title": title, + "summary": summary, + } + if text: + output["text"] = text + if annotations: + output["annotations"] = annotations + return output + + +def build_annotation(path, start_line, end_line, message, annotation_level = "warning", start_column = None, end_column = None, title = None, raw_details = None): + """ + Helper to build an annotation for check run output. + + Args: + path: Path of the file to annotate (relative to repo root) + start_line: Start line of the annotation + end_line: End line of the annotation + message: Short description of the feedback + annotation_level: "notice", "warning", or "failure" (default: "warning") + start_column: Optional start column + end_column: Optional end column + title: Optional title for the annotation + raw_details: Optional raw details string + + Returns: + dict suitable for the "annotations" list + """ + annotation = { + "path": path, + "start_line": start_line, + "end_line": end_line, + "annotation_level": annotation_level, + "message": message, + } + if start_column: + annotation["start_column"] = start_column + if end_column: + annotation["end_column"] = end_column + if title: + annotation["title"] = title + if raw_details: + annotation["raw_details"] = raw_details + return annotation + + +# ============================================================================= +# Pull Request Review Comments API +# ============================================================================= + +def create_review(ctx, token, owner, repo, pull_number, body = None, event = "COMMENT", comments = None, commit_id = None, api_base = DEFAULT_GITHUB_API): + """ + Create a pull request review with optional comments. + + This creates comments that appear directly on the PR diff page. + + Args: + ctx: Context with http() + token: GitHub token + owner: Repository owner + repo: Repository name + pull_number: The PR number + body: Optional review body text (shown at top of review) + event: Review action - "APPROVE", "REQUEST_CHANGES", or "COMMENT" (default) + comments: Optional list of review comment dicts (use build_review_comment) + commit_id: Optional commit SHA to review (defaults to PR head) + api_base: GitHub API base URL + + Returns: + dict with "success" (bool), "review_id" (int) on success + dict with "success" (False), "error" (str), "status" (int) on failure + """ + url = api_base + "/repos/" + owner + "/" + repo + "/pulls/" + str(pull_number) + "/reviews" + + payload = { + "event": event, + } + + if body: + payload["body"] = body + if comments: + payload["comments"] = comments + if commit_id: + payload["commit_id"] = commit_id + + success, status_code, response_body = _do_request(ctx, "POST", url, token, payload) + + if success: + return { + "success": True, + "review_id": response_body.get("id"), + "html_url": response_body.get("html_url"), + "response": response_body, + } + + error_msg = "request failed: " + str(status_code) + if response_body and type(response_body) == "dict" and response_body.get("message"): + error_msg = error_msg + " - " + response_body["message"] + + return {"success": False, "error": error_msg, "status": status_code} + + +def create_review_comment(ctx, token, owner, repo, pull_number, body, path, line = None, commit_id = None, side = "RIGHT", start_line = None, start_side = None, subject_type = None, api_base = DEFAULT_GITHUB_API): + """ + Create a single review comment on a PR diff. + + Args: + ctx: Context with http() + token: GitHub token + owner: Repository owner + repo: Repository name + pull_number: The PR number + body: The comment text (supports markdown) + path: File path relative to repo root + line: Line number in the diff to comment on (required unless using subject_type="file") + commit_id: Optional commit SHA (defaults to PR head) + side: "LEFT" (deletion) or "RIGHT" (addition, default) + start_line: For multi-line comments, the first line + start_side: Side for start_line ("LEFT" or "RIGHT") + subject_type: "line" (default) or "file" for file-level comments + api_base: GitHub API base URL + + Returns: + dict with "success" (bool), "comment_id" (int) on success + dict with "success" (False), "error" (str), "status" (int) on failure + """ + url = api_base + "/repos/" + owner + "/" + repo + "/pulls/" + str(pull_number) + "/comments" + + payload = { + "body": body, + "path": path, + } + + if subject_type: + payload["subject_type"] = subject_type + if line: + payload["line"] = line + payload["side"] = side + if commit_id: + payload["commit_id"] = commit_id + if start_line: + payload["start_line"] = start_line + if start_side: + payload["start_side"] = start_side + + success, status_code, response_body = _do_request(ctx, "POST", url, token, payload) + + if success: + return { + "success": True, + "comment_id": response_body.get("id"), + "html_url": response_body.get("html_url"), + "response": response_body, + } + + error_msg = "request failed: " + str(status_code) + if response_body and type(response_body) == "dict" and response_body.get("message"): + error_msg = error_msg + " - " + response_body["message"] + + return {"success": False, "error": error_msg, "status": status_code} + + +def build_review_comment(path, body, line = None, side = "RIGHT", start_line = None, start_side = None): + """ + Helper to build a review comment for use with create_review. + + Args: + path: File path relative to repo root + body: Comment text (supports markdown) + line: Line number in the diff (the ending line for multi-line) + side: "LEFT" (deletion) or "RIGHT" (addition, default) + start_line: For multi-line comments, the starting line + start_side: Side for start_line + + Returns: + dict suitable for the "comments" list in create_review + """ + comment = { + "path": path, + "body": body, + } + if line: + comment["line"] = line + comment["side"] = side + if start_line: + comment["start_line"] = start_line + if start_side: + comment["start_side"] = start_side + return comment + + +def build_suggestion(path, line, suggested_code, message = None, start_line = None): + """ + Helper to build a code suggestion comment that shows "Apply suggestion" button. + + Args: + path: File path relative to repo root + line: Line number to suggest replacement for (end line if multi-line) + suggested_code: The replacement code (what the line(s) should become) + message: Optional message to show above the suggestion + start_line: For multi-line suggestions, the starting line + + Returns: + dict suitable for the "comments" list in create_review + + Example: + # Single line suggestion + build_suggestion( + path = "src/main.py", + line = 42, + suggested_code = "const FOO = 'bar'", + message = "Use const instead of let for constants", + ) + + # Multi-line suggestion (replace lines 10-12 with new code) + build_suggestion( + path = "src/main.py", + start_line = 10, + line = 12, + suggested_code = "function foo() {\\n return bar\\n}", + ) + """ + body = "" + if message: + body = message + "\n\n" + body = body + "```suggestion\n" + suggested_code + "\n```" + + comment = { + "path": path, + "body": body, + "line": line, + "side": "RIGHT", + } + if start_line: + comment["start_line"] = start_line + comment["start_side"] = "RIGHT" + return comment + + +def create_suggestion(ctx, token, owner, repo, pull_number, path, line, suggested_code, message = None, start_line = None, commit_id = None, api_base = DEFAULT_GITHUB_API): + """ + Create a single code suggestion on a PR. + + This creates an "Apply suggestion" button on the PR diff. + + Args: + ctx: Context with http() + token: GitHub token + owner: Repository owner + repo: Repository name + pull_number: The PR number + path: File path relative to repo root + line: Line number to suggest replacement for + suggested_code: The replacement code + message: Optional message above the suggestion + start_line: For multi-line suggestions, the starting line + commit_id: Optional commit SHA (defaults to PR head) + api_base: GitHub API base URL + + Returns: + dict with "success" (bool), "comment_id" (int) on success + dict with "success" (False), "error" (str), "status" (int) on failure + """ + body = "" + if message: + body = message + "\n\n" + body = body + "```suggestion\n" + suggested_code + "\n```" + + return create_review_comment( + ctx, token, owner, repo, pull_number, + body = body, + path = path, + line = line, + commit_id = commit_id, + side = "RIGHT", + start_line = start_line, + start_side = "RIGHT" if start_line else None, + api_base = api_base, + ) diff --git a/crates/axl-runtime/src/builtins/aspect/lib/linting.axl b/crates/axl-runtime/src/builtins/aspect/lib/linting.axl new file mode 100644 index 000000000..155810725 --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/lib/linting.axl @@ -0,0 +1,393 @@ +"""GitHub-aware lint strategy and changed files provider.""" + +load("./github.axl", "create_review", "create_review_comment", "get_pull_request", "list_review_comments", "delete_review_comment", "list_pull_request_files") +load("./sarif.axl", "parse_sarif", "sarif_to_review_comments") +load("@aspect_rules_lint//lint/lint.axl", "Strategy", "ChangedFilesProvider") + + +def _parse_github_diff_patch(patch): + """ + Parse a GitHub file patch string to extract added line numbers. + + Args: + patch: The patch string from GitHub's files API + + Returns: + List of 0-based line numbers of added lines + """ + if not patch: + return [] + + lines = [] + current_line = 0 + for line in patch.split("\n"): + if line.startswith("@@"): + # Parse hunk header: @@ -old,count +new,count @@ + parts = line.split(" ") + for part in parts: + if part.startswith("+") and part != "+++": + plus = part.removeprefix("+") + if "," in plus: + current_line = int(plus.split(",")[0]) + else: + current_line = int(plus) + break + elif line.startswith("+"): + # Added line (0-based) + lines.append(current_line - 1) + current_line += 1 + elif line.startswith("-"): + # Deleted line, don't increment current_line + pass + else: + # Context line + current_line += 1 + + return lines + + +def make_github_changed_files_provider(token, owner, repo): + """ + Create a ChangedFilesProvider that fetches changed files from the GitHub API. + + Args: + token: GitHub token + owner: Repository owner + repo: Repository name + + Returns: + ChangedFilesProvider instance + """ + def get_changed_files(ctx, state): + ref = ctx.std.env.var("GITHUB_REF") or "" + if not (ref.startswith("refs/pull/") and ref.endswith("/merge")): + return [] # not a PR build + + pr_number = int(ref.removeprefix("refs/pull/").removesuffix("/merge")) + state["pr_number"] = pr_number + + # Fetch changed files from GitHub API + result = list_pull_request_files(ctx, token, owner, repo, pr_number) + if not result["success"]: + return [] + + all_files = [] + for f in result["files"]: + if f.get("status", "") == "removed": + continue + filename = f.get("filename", "") + patch = f.get("patch", "") + added_lines = _parse_github_diff_patch(patch) + all_files.append({"file": filename, "lines": added_lines}) + + state["changed_lines"] = {f["file"]: f["lines"] for f in all_files} + return all_files + + return ChangedFilesProvider(get_changed_files = get_changed_files) + + +def _enrich_with_suggestions(ctx, comments): + """Read source files and append suggestion blocks for fixable comments.""" + file_cache = {} + for comment in comments: + fixes = comment.get("_fixes") + if not fixes: + continue + + path = comment["path"] + if path not in file_cache: + file_cache[path] = ctx.std.fs.read_to_string(path) + content = file_cache[path] + if not content: + continue + + lines = content.split("\n") + line_num = comment["line"] + if line_num < 1 or line_num > len(lines): + continue + + # Calculate byte offset of the target line start + line_byte_start = 0 + for i in range(line_num - 1): + line_byte_start += len(lines[i]) + 1 # +1 for \n + + original_line = lines[line_num - 1] + + # Convert absolute byte offsets to line-relative, filter to this line + applicable = [] + for f in fixes: + rel_start = f["byteOffset"] - line_byte_start + rel_end = rel_start + f["byteLength"] + if 0 <= rel_start and rel_end <= len(original_line): + applicable.append({ + "start": rel_start, + "end": rel_end, + "replacement": f["replacement"], + }) + + if not applicable: + continue + + # Apply in reverse position order to preserve earlier offsets + applicable = sorted(applicable, key = lambda f: f["start"], reverse = True) + fixed = original_line + for f in applicable: + fixed = fixed[:f["start"]] + f["replacement"] + fixed[f["end"]:] + + if fixed != original_line: + comment["body"] += "\n\n```suggestion\n" + fixed + "\n```" + + # Clean up internal metadata + comment.pop("_fixes", None) + + +# ============================================================================= +# GitHub Strategy Wrapper +# ============================================================================= + +def _build_comment_marker(tool, file, line, rule_id): + """Build a hidden HTML comment marker for identifying lint comments.""" + return "".format(tool, file, line, rule_id) + + +def _extract_comment_marker(body): + """Extract the aspect-lint marker from a comment body, or None.""" + prefix = "" + if not body: + return None + idx = body.find(prefix) + if idx < 0: + return None + end = body.find(suffix, idx) + if end < 0: + return None + return body[idx:end + len(suffix)] + + +def _check_staleness(ctx, state): + """ + Check if the current run is stale (PR HEAD has moved past our commit). + + Returns True if stale, False otherwise. + On API failure, assumes NOT stale. + """ + gh = state["github"] + pr_number = state.get("pr_number") + if not pr_number: + return False + + result = get_pull_request( + ctx, + token = gh["token"], + owner = gh["owner"], + repo = gh["repo"], + pull_number = pr_number, + ) + + if not result["success"]: + # API failure: assume not stale (better to post stale comments than lose results) + return False + + pr = result["pull_request"] + head_sha = pr.get("head", {}).get("sha", "") + return head_sha != gh["head_sha"] + + +def _filter_by_diff(comments, changed_lines): + """Keep only comments that target lines within the PR diff.""" + if not changed_lines: + return list(comments) + return [ + c for c in comments + if (c.get("line", 0) - 1) in (changed_lines.get(c.get("path", "")) or []) + ] + + +def _get_existing_markers(ctx, gh, pr_number): + """Fetch all aspect-lint markers currently on the PR. Returns {marker: True}.""" + result = list_review_comments( + ctx, token = gh["token"], owner = gh["owner"], + repo = gh["repo"], pull_number = pr_number, + ) + if not result["success"]: + return {} + markers = {} + for c in result["comments"]: + marker = _extract_comment_marker(c.get("body", "")) + if marker: + markers[marker] = True + return markers + + +def _post_as_review(ctx, gh, pr_number, comments, existing_markers): + """Post comments as a single grouped review, skipping duplicates.""" + to_post = [ + c for c in comments + if _extract_comment_marker(c.get("body", "")) not in existing_markers + ] + if not to_post: + return + create_review( + ctx, token = gh["token"], owner = gh["owner"], + repo = gh["repo"], pull_number = pr_number, + body = "Lint findings", event = "COMMENT", + comments = to_post, commit_id = gh["head_sha"], + ) + + +def _post_individually(ctx, gh, pr_number, comments, existing_markers): + """Post comments one at a time, skipping duplicates.""" + for c in comments: + marker = _extract_comment_marker(c.get("body", "")) + if marker and marker in existing_markers: + continue + result = create_review_comment( + ctx, token = gh["token"], owner = gh["owner"], + repo = gh["repo"], pull_number = pr_number, + body = c["body"], path = c["path"], + line = c.get("line"), commit_id = gh["head_sha"], + side = c.get("side", "RIGHT"), + start_line = c.get("start_line"), + start_side = c.get("start_side"), + ) + if result["success"] and marker: + existing_markers[marker] = True + + +def _cleanup_comments(ctx, state): + """Delete stale comments and deduplicate.""" + gh = state["github"] + pr_number = state.get("pr_number") + if not pr_number: + return + + # Desired markers: diagnostics that are within the diff + changed_lines = state.get("changed_lines", {}) + desired = {} + for diag in state.get("diagnostics", []): + lines = changed_lines.get(diag["file"]) + if lines and (diag["line"] - 1) in lines: + marker = _build_comment_marker(diag["tool"], diag["file"], diag["line"], diag["rule_id"]) + desired[marker] = True + + # Fetch fresh state of comments on PR + result = list_review_comments( + ctx, token = gh["token"], owner = gh["owner"], + repo = gh["repo"], pull_number = pr_number, + ) + if not result["success"]: + return + + # Group by marker + by_marker = {} + for c in result["comments"]: + marker = _extract_comment_marker(c.get("body", "")) + if not marker: + continue + if marker not in by_marker: + by_marker[marker] = [] + by_marker[marker].append(c) + + # Delete stale (not desired) and duplicates (keep newest) + for marker, comments in by_marker.items(): + if marker not in desired: + for c in comments: + delete_review_comment( + ctx, token = gh["token"], owner = gh["owner"], + repo = gh["repo"], comment_id = c["id"], + ) + elif len(comments) > 1: + by_id = sorted(comments, key = lambda c: c["id"]) + for c in by_id[:-1]: + delete_review_comment( + ctx, token = gh["token"], owner = gh["owner"], + repo = gh["repo"], comment_id = c["id"], + ) + + +def make_github_strategy(base_strategy, token, owner, repo, mode = "grouped"): + """ + Create a GitHub-aware strategy that wraps a base strategy with GitHub reporting. + + Args: + base_strategy: The underlying Strategy to delegate to + token: GitHub token + owner: Repository owner + repo: Repository name + mode: "grouped" posts one review at the end, + "streaming" posts comments individually as linters finish + + Returns: + Strategy instance with GitHub integration + """ + def setup(ctx, state): + base_strategy.setup(ctx, state) + state["github"] = { + "token": token, + "owner": owner, + "repo": repo, + "head_sha": ctx.std.env.var("GITHUB_SHA") or "", + "pending_comments": [], + "stale": False, + } + + def process(ctx, state, filepath): + # Accumulate diagnostics and build review comments + diag_count_before = len(state.get("diagnostics", [])) + base_strategy.process(ctx, state, filepath) + + gh = state["github"] + if gh["stale"]: + return + + content = ctx.std.fs.read_to_string(filepath) + sarif = parse_sarif(content) + comments = sarif_to_review_comments(sarif) + _enrich_with_suggestions(ctx, comments) + + # Stamp each comment with a hidden marker for identity tracking + new_diagnostics = state.get("diagnostics", [])[diag_count_before:] + for i, comment in enumerate(comments): + if i < len(new_diagnostics): + diag = new_diagnostics[i] + marker = _build_comment_marker( + diag["tool"], diag["file"], diag["line"], diag["rule_id"]) + comment["body"] = marker + "\n" + comment["body"] + + gh["pending_comments"].extend(comments) + + # In streaming mode, post comments as they arrive + if mode == "streaming": + pr_number = state.get("pr_number") + if not pr_number: + return + if "existing_markers" not in gh: + gh["existing_markers"] = _get_existing_markers(ctx, gh, pr_number) + ready = _filter_by_diff(gh["pending_comments"], state.get("changed_lines", {})) + _post_individually(ctx, gh, pr_number, ready, gh["existing_markers"]) + gh["pending_comments"] = [] + + def finish(ctx, state): + gh = state["github"] + + if gh["stale"] or _check_staleness(ctx, state): + gh["stale"] = True + return base_strategy.finish(ctx, state) + + pr_number = state.get("pr_number") + if pr_number: + if mode == "grouped": + existing = _get_existing_markers(ctx, gh, pr_number) + ready = _filter_by_diff(gh["pending_comments"], state.get("changed_lines", {})) + _post_as_review(ctx, gh, pr_number, ready, existing) + _cleanup_comments(ctx, state) + + return base_strategy.finish(ctx, state) + + return Strategy( + needs_machine = base_strategy.needs_machine, + setup = setup, + process = process, + finish = finish, + ) diff --git a/crates/axl-runtime/src/builtins/aspect/lib/platform.axl b/crates/axl-runtime/src/builtins/aspect/lib/platform.axl new file mode 100644 index 000000000..2c43add0f --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/lib/platform.axl @@ -0,0 +1,462 @@ +""" +Platform Configuration and Bazelrc Generation Library + +Pure functions for reading platform/host configuration and generating bazelrc content. +This is the AXL equivalent of rosetta's bazel/flags.ts and related modules. +""" + +# Constants +DEFAULT_STORAGE_PATH = "/mnt/ephemeral" +DEFAULT_PLATFORM_DIR = "/etc/aspect/workflows/platform" +DEFAULT_BIN_DIR = "/etc/aspect/workflows/bin" + +# Map of logical keys to filenames +PLATFORM_CONFIG_KEYS = { + "remote_cache_endpoint": "remote_cache_endpoint", + "remote_cache_address": "remote_cache_address", + "storage_path": "storage_path", + "bessie_endpoint": "bessie_endpoint", + "build_result_ui_base_url": "build_result_ui_base_url", + "delivery_db_endpoint": "delivery_db_endpoint", +} + +# Bazel command lists +BAZEL_COMMANDS_ALL = [ + "analyze-profile", "aquery", "build", "canonicalize-flags", "clean", + "config", "coverage", "cquery", "dump", "fetch", "help", "info", + "license", "mobile-install", "print_action", "query", "run", + "shutdown", "sync", "test", "version", +] + +BAZEL_COMMANDS_BUILD = [ + "aquery", "build", "canonicalize-flags", "clean", "config", "coverage", + "cquery", "fetch", "info", "mobile-install", "print_action", "query", + "run", "sync", "test", +] + +BAZEL_COMMANDS_BUILD_TEST = [ + "aquery", "build", "canonicalize-flags", "clean", "config", "coverage", + "cquery", "info", "mobile-install", "print_action", "run", "test", +] + +BAZEL_COMMANDS_JOBS = ["build", "coverage", "cquery", "run", "test"] + +# ============================================================================= +# Flag Definitions +# ============================================================================= + +# Static boolean flags (no value) +STATIC_BOOLEAN_FLAGS = [ + {"name": "show_timestamps", "commands": BAZEL_COMMANDS_ALL}, + {"name": "remote_upload_local_results", "commands": BAZEL_COMMANDS_BUILD}, + {"name": "heap_dump_on_oom", "commands": BAZEL_COMMANDS_ALL}, + {"name": "keep_going", "commands": BAZEL_COMMANDS_BUILD}, + {"name": "generate_json_trace_profile", "commands": BAZEL_COMMANDS_ALL}, + {"name": "experimental_profile_include_target_label", "commands": BAZEL_COMMANDS_ALL}, + {"name": "incompatible_strict_action_env", "commands": BAZEL_COMMANDS_BUILD_TEST}, + {"name": "experimental_repository_cache_hardlinks", "commands": BAZEL_COMMANDS_ALL}, + {"name": "incompatible_exclusive_test_sandboxed", "commands": BAZEL_COMMANDS_BUILD_TEST}, + {"name": "experimental_reuse_sandbox_directories", "commands": BAZEL_COMMANDS_BUILD_TEST}, + {"name": "incompatible_default_to_explicit_init_py", "commands": BAZEL_COMMANDS_BUILD_TEST}, + {"name": "remote_accept_cached", "commands": BAZEL_COMMANDS_BUILD}, + {"name": "incompatible_remote_results_ignore_disk", "commands": BAZEL_COMMANDS_BUILD, "versions": "< 7"}, +] + +# Static value flags +STATIC_VALUE_FLAGS = [ + {"name": "tool_tag", "value": "aspect-workflows", "commands": BAZEL_COMMANDS_ALL}, + {"name": "color", "value": "yes", "commands": BAZEL_COMMANDS_ALL}, + {"name": "isatty", "value": "0", "commands": BAZEL_COMMANDS_ALL}, + {"name": "terminal_columns", "value": "143", "commands": BAZEL_COMMANDS_ALL}, + {"name": "disk_cache", "value": "", "commands": BAZEL_COMMANDS_BUILD}, + {"name": "symlink_prefix", "value": "bazel-", "commands": BAZEL_COMMANDS_BUILD_TEST}, + {"name": "experimental_convenience_symlinks", "value": "normal", "commands": BAZEL_COMMANDS_BUILD_TEST}, + {"name": "max_config_changes_to_show", "value": "-1", "commands": BAZEL_COMMANDS_BUILD_TEST}, + {"name": "remote_timeout", "value": "3600", "commands": BAZEL_COMMANDS_BUILD}, + {"name": "remote_retries", "value": "360", "commands": BAZEL_COMMANDS_BUILD}, + {"name": "grpc_keepalive_timeout", "value": "30s", "commands": BAZEL_COMMANDS_ALL}, + {"name": "experimental_repository_downloader_retries", "value": "2", "commands": BAZEL_COMMANDS_ALL}, + {"name": "bes_upload_mode", "value": "wait_for_upload_complete", "commands": BAZEL_COMMANDS_ALL}, + # Version-specific flags + {"name": "experimental_remote_build_event_upload", "value": "minimal", "commands": BAZEL_COMMANDS_BUILD, "versions": ">= 6 < 7"}, + {"name": "remote_build_event_upload", "value": "minimal", "commands": BAZEL_COMMANDS_BUILD, "versions": ">= 7"}, + # Cache compression flags (BuildBarn doesn't support compression) + {"name": "noexperimental_remote_cache_compression", "value": None, "commands": BAZEL_COMMANDS_BUILD, "versions": "< 8"}, + {"name": "noremote_cache_compression", "value": None, "commands": BAZEL_COMMANDS_BUILD, "versions": ">= 8"}, +] + +# Dynamic flags - value resolved from config dicts +DYNAMIC_FLAGS = [ + {"name": "remote_cache", "value": "dynamic:platform.remote_cache_endpoint", "commands": BAZEL_COMMANDS_BUILD}, + {"name": "remote_bytestream_uri_prefix", "value": "dynamic:platform.remote_cache_address", "commands": BAZEL_COMMANDS_BUILD}, + {"name": "repository_cache", "value": "dynamic:computed.repository_cache", "commands": BAZEL_COMMANDS_ALL}, + {"name": "curses", "value": "dynamic:host.curses", "commands": BAZEL_COMMANDS_ALL}, + {"name": "show_progress_rate_limit", "value": "dynamic:host.progress_rate_limit", "commands": BAZEL_COMMANDS_ALL}, + # RBE flags + {"name": "remote_executor", "value": "dynamic:rbe.remote_executor", "commands": BAZEL_COMMANDS_BUILD, "omit_if_none": True}, + {"name": "jobs", "value": "dynamic:rbe.jobs", "commands": BAZEL_COMMANDS_JOBS, "omit_if_none": True}, +] + +# Startup flags +STARTUP_FLAGS = [ + {"name": "output_user_root", "value": "dynamic:computed.output_user_root"}, + {"name": "output_base", "value": "dynamic:computed.output_base"}, +] + +# ============================================================================= +# Configuration Readers +# ============================================================================= + +def read_platform_config(fs, platform_dir = DEFAULT_PLATFORM_DIR): + """ + Read platform configuration from disk. + + Args: + fs: Filesystem interface (ctx.std.fs) + platform_dir: Path to platform config directory + + Returns: + dict with keys: remote_cache_endpoint, remote_cache_address, storage_path, + api_key, api_client_id, brs_api_endpoint, token, refresh_token, etc. + """ + config = {} + + for key, filename in PLATFORM_CONFIG_KEYS.items(): + path = platform_dir + "/" + filename + if fs.exists(path): + content = fs.read_to_string(path) + if content: + config[key] = content.strip() + + # Read rosetta_api_tokens (JSON file with token and refresh_token) + tokens_path = platform_dir + "/rosetta_api_tokens" + if fs.exists(tokens_path): + content = fs.read_to_string(tokens_path) + if content: + config["rosetta_api_tokens"] = json.decode(content) + + # Apply defaults + if "storage_path" not in config: + config["storage_path"] = DEFAULT_STORAGE_PATH + + return config + + +def read_warming_config(fs, platform_dir = DEFAULT_PLATFORM_DIR): + """ + Read warming-specific configuration from platform config files. + + Args: + fs: Filesystem interface (ctx.std.fs) + platform_dir: Path to platform config directory + + Returns: + dict with optional keys: warming_bucket, warming_additional_paths + """ + config = {} + + bucket_path = platform_dir + "/warming_bucket" + if fs.exists(bucket_path): + content = fs.read_to_string(bucket_path) + if content: + config["warming_bucket"] = content.strip() + + paths_path = platform_dir + "/warming_additional_paths" + if fs.exists(paths_path): + content = fs.read_to_string(paths_path) + if content: + config["warming_additional_paths"] = content.strip() + + return config + + +def read_host_config(env, io): + """ + Read host/CI configuration from environment. + + Args: + env: Environment interface (ctx.std.env) + io: IO interface (ctx.std.io) + + Returns: + dict with keys: supports_curses, scm_repo_name, ci_host + """ + config = { + "supports_curses": io.stdout.is_tty, + "scm_repo_name": None, + "ci_host": None, + } + + # Detect CI host and repo name + if env.var("BUILDKITE_REPO"): + config["ci_host"] = "buildkite" + config["scm_repo_name"] = _parse_git_url_name(env.var("BUILDKITE_REPO")) + config["supports_curses"] = True # BuildKite supports curses + elif env.var("GITHUB_REPOSITORY"): + config["ci_host"] = "github" + repo = env.var("GITHUB_REPOSITORY") + config["scm_repo_name"] = repo.split("/")[-1] if "/" in repo else repo + elif env.var("CIRCLE_PROJECT_REPONAME"): + config["ci_host"] = "circleci" + config["scm_repo_name"] = env.var("CIRCLE_PROJECT_REPONAME") + elif env.var("CI_PROJECT_NAME"): + config["ci_host"] = "gitlab" + config["scm_repo_name"] = env.var("CI_PROJECT_NAME") + + return config + + +def _parse_git_url_name(url): + """Extract repo name from git URL.""" + if not url: + return None + name = url.rstrip("/") + if name.endswith(".git"): + name = name[:-4] + return name.split("/")[-1].split(":")[-1] + +# ============================================================================= +# Version Comparison +# ============================================================================= + +def parse_version(version_str): + """Parse version string to tuple (major, minor, patch).""" + parts = version_str.split(".") + major = int(parts[0]) if len(parts) > 0 else 0 + minor = int(parts[1]) if len(parts) > 1 else 0 + patch_str = parts[2].split("-")[0] if len(parts) > 2 else "0" + patch = int(patch_str) if patch_str else 0 + return (major, minor, patch) + + +def version_satisfies(version, constraint): + """ + Check if version satisfies semver constraint. + + Args: + version: Version string like "7.0.0" + constraint: Constraint like "< 7", ">= 6 < 7", ">= 8" + + Returns: + True if version satisfies the constraint + """ + if not version or constraint == "*": + return True + + v = parse_version(version) + + # Parse constraint like "< 7", ">= 6 < 7", ">= 8" + parts = constraint.split() + for i in range(0, len(parts), 2): + if i + 1 >= len(parts): + break + op = parts[i] + target = parse_version(parts[i + 1]) + + if op == "<" and not (v < target): + return False + elif op == "<=" and not (v <= target): + return False + elif op == ">" and not (v > target): + return False + elif op == ">=" and not (v >= target): + return False + elif op == "=" and v != target: + return False + + return True + +# ============================================================================= +# Dynamic Value Resolution +# ============================================================================= + +def _sanitize_filename(name): + """Sanitize string for use in filesystem paths.""" + if not name: + return "" + result = "" + for c in name.elems(): + if c.isalnum() or c in "-_.": + result += c + else: + result += "_" + return result + + +def _compute_output_path(platform_config, host_config, segment, workspace): + """Compute output_base or output_user_root path.""" + mount = platform_config.get("storage_path", DEFAULT_STORAGE_PATH) + + # Normalize workspace name + subdir = "__main__" if workspace == "." else workspace.replace("/", "_") + + # Include repo name if available + repo_name = host_config.get("scm_repo_name") + if repo_name: + return mount + "/" + segment + "/" + _sanitize_filename(repo_name) + "/" + subdir + else: + return mount + "/" + segment + "/" + subdir + + +def resolve_dynamic_value(key, platform_config, host_config, rbe_config, workspace): + """ + Resolve a dynamic value from config dicts. + + Args: + key: Dynamic key like "platform.remote_cache_endpoint" or "computed.repository_cache" + platform_config: Platform configuration dict + host_config: Host configuration dict + rbe_config: RBE configuration dict (may be None) + workspace: Workspace name + + Returns: + Resolved value string or None + """ + if key.startswith("platform."): + config_key = key[len("platform."):] + return platform_config.get(config_key) + + elif key.startswith("host."): + host_key = key[len("host."):] + if host_key == "curses": + return "yes" if host_config.get("supports_curses") else "no" + elif host_key == "progress_rate_limit": + return "5" if host_config.get("supports_curses") else "60" + return host_config.get(host_key) + + elif key.startswith("computed."): + computed_key = key[len("computed."):] + if computed_key == "repository_cache": + mount = platform_config.get("storage_path", DEFAULT_STORAGE_PATH) + return mount + "/caches/repository" + elif computed_key == "output_user_root": + return _compute_output_path(platform_config, host_config, "bazel", workspace) + elif computed_key == "output_base": + return _compute_output_path(platform_config, host_config, "output", workspace) + + elif key.startswith("rbe."): + if not rbe_config: + return None + rbe_key = key[len("rbe."):] + if rbe_key == "remote_executor": + # RBE uses the same endpoint as remote cache + return platform_config.get("remote_cache_endpoint") + elif rbe_key == "jobs": + jobs = rbe_config.get("jobs") + return str(jobs) if jobs else None + + return None + +# ============================================================================= +# Flag Resolution +# ============================================================================= + +def resolve_flag_value(flag_def, platform_config, host_config, rbe_config, workspace): + """ + Resolve a flag's value from config dicts. + + Args: + flag_def: Flag definition dict + platform_config: Platform configuration dict + host_config: Host configuration dict + rbe_config: RBE configuration dict (may be None) + workspace: Workspace name + + Returns: + Resolved value string or None + """ + value = flag_def.get("value") + + if value == None: + return None # Boolean flag, no value + + if type(value) == "string" and value.startswith("dynamic:"): + dynamic_key = value[len("dynamic:"):] + return resolve_dynamic_value(dynamic_key, platform_config, host_config, rbe_config, workspace) + + return value # Static value + + +def stringify_flag(name, value): + """Convert flag name and value to string.""" + if value == None: + return "--" + name + return "--" + name + "=" + str(value) + + +def should_include_flag(flag_def, version): + """ + Check if flag should be included based on version. + + Args: + flag_def: Flag definition dict + version: Bazel version string (e.g., "7.0.0") or None + + Returns: + True if flag should be included + """ + constraint = flag_def.get("versions", "*") + return version_satisfies(version, constraint) + +# ============================================================================= +# Bazelrc Generation +# ============================================================================= + +def get_bazelrc_flags(platform_config, host_config, bazel_version = None, rbe_config = None, workspace = ".", verbs = None): + """ + Generate bazelrc flags organized by verb. + + Args: + platform_config: dict from read_platform_config() + host_config: dict from read_host_config() + bazel_version: str like "7.0.0" or None + rbe_config: dict with RBE config (e.g., {"jobs": 50}) or None + workspace: workspace name (default ".") + verbs: list of verbs to generate flags for (default: all verbs) + + Returns: + dict: Flags organized by verb, e.g.: + { + "build": ["--flag1", "--flag2"], + "test": ["--flag1", "--test_flag"], + "startup": ["--output_base=...", "--output_user_root=..."], + } + """ + if verbs == None: + verbs = BAZEL_COMMANDS_ALL + + result = {} + + all_flags = STATIC_BOOLEAN_FLAGS + STATIC_VALUE_FLAGS + DYNAMIC_FLAGS + + for flag_def in all_flags: + if not should_include_flag(flag_def, bazel_version): + continue + + value = resolve_flag_value(flag_def, platform_config, host_config, rbe_config, workspace) + + # Skip if omit_if_none and value is None (for dynamic flags that may not resolve) + if flag_def.get("omit_if_none") and value == None: + continue + + flag_str = stringify_flag(flag_def["name"], value) + + # Add for each applicable command that's in the requested verbs + for cmd in flag_def.get("commands", ["build"]): + if cmd not in verbs: + continue + if cmd not in result: + result[cmd] = [] + result[cmd].append(flag_str) + + # Add startup flags + startup_flags = [] + for flag_def in STARTUP_FLAGS: + value = resolve_flag_value(flag_def, platform_config, host_config, rbe_config, workspace) + if value: + startup_flags.append(stringify_flag(flag_def["name"], value)) + + if startup_flags: + result["startup"] = startup_flags + + return result diff --git a/crates/axl-runtime/src/builtins/aspect/lib/sarif.axl b/crates/axl-runtime/src/builtins/aspect/lib/sarif.axl new file mode 100644 index 000000000..2acdb2941 --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/lib/sarif.axl @@ -0,0 +1,228 @@ +""" +SARIF (Static Analysis Results Interchange Format) GitHub Translation + +Converts SARIF output from linters into GitHub PR review comments and annotations. +Base parsing utilities (parse_sarif, get_sarif_summary) are loaded from rules_lint. +""" + +load("@aspect_rules_lint//lint/sarif.axl", "parse_sarif", "get_sarif_summary") + + +def _get_level_emoji(level): + """Map SARIF level to display text.""" + if level == "error": + return "error" + elif level == "warning": + return "warning" + elif level == "note": + return "note" + return level or "warning" + + +def sarif_result_to_comment(result, tool_name): + """ + Convert a single SARIF result to a GitHub review comment dict. + + Args: + result: A single result from runs[].results[] + tool_name: Name of the tool (from runs[].tool.driver.name) + + Returns: + dict suitable for create_review comments list, or None if invalid + """ + locations = result.get("locations", []) + if not locations: + return None + + location = locations[0] + physical = location.get("physicalLocation") + if not physical: + return None + + artifact = physical.get("artifactLocation", {}) + path = artifact.get("uri") + if not path: + return None + + region = physical.get("region", {}) + start_line = region.get("startLine") + end_line = region.get("endLine", start_line) + + if not start_line: + return None + + # Build comment body + level = _get_level_emoji(result.get("level", "warning")) + message_obj = result.get("message", {}) + message = message_obj.get("text", "") + + body = "**{}** ({})".format(tool_name, level) + if message: + body = body + "\n\n" + message + + comment = { + "path": path, + "line": end_line, + "side": "RIGHT", + "body": body, + } + + # Multi-line comment if start != end + if start_line != end_line: + comment["start_line"] = start_line + comment["start_side"] = "RIGHT" + + # Extract fix hints from relatedLocations + related = result.get("relatedLocations", []) + fixes = [] + for loc in related: + msg = loc.get("message", {}).get("text", "") + if not msg.startswith("try"): + continue + region = loc.get("physicalLocation", {}).get("region", {}) + byte_offset = region.get("byteOffset") + byte_length = region.get("byteLength") + if byte_offset == None or byte_length == None: + continue + # Parse replacement text from "try" message + if msg == "try": + replacement = "" + else: + text = msg[4:] # strip "try " + # Strip decorative outer quotes (clippy wraps replacements in quotes) + if len(text) >= 2 and text[0] == '"' and text[-1] == '"': + text = text[1:-1] + replacement = text + fixes.append({ + "byteOffset": byte_offset, + "byteLength": byte_length, + "replacement": replacement, + }) + if fixes: + comment["_fixes"] = fixes + + return comment + + +def sarif_to_review_comments(sarif): + """ + Convert SARIF output to GitHub review comments. + + Args: + sarif: Parsed SARIF dict (or JSON string) + + Returns: + List of comment dicts suitable for create_review + """ + if type(sarif) == "string": + sarif = json.decode(sarif) + + comments = [] + runs = sarif.get("runs", []) + + for run in runs: + tool = run.get("tool", {}) + driver = tool.get("driver", {}) + tool_name = driver.get("name", "Linter") + + results = run.get("results", []) + for result in results: + comment = sarif_result_to_comment(result, tool_name) + if comment: + comments.append(comment) + + return comments + + +def sarif_to_annotations(sarif): + """ + Convert SARIF output to GitHub Check Run annotations. + + Args: + sarif: Parsed SARIF dict (or JSON string) + + Returns: + List of annotation dicts suitable for build_output + """ + if type(sarif) == "string": + sarif = json.decode(sarif) + + annotations = [] + runs = sarif.get("runs", []) + + for run in runs: + tool = run.get("tool", {}) + driver = tool.get("driver", {}) + tool_name = driver.get("name", "Linter") + + results = run.get("results", []) + for result in results: + annotation = sarif_result_to_annotation(result, tool_name) + if annotation: + annotations.append(annotation) + + return annotations + + +def sarif_result_to_annotation(result, tool_name): + """ + Convert a single SARIF result to a GitHub Check Run annotation. + + Args: + result: A single result from runs[].results[] + tool_name: Name of the tool + + Returns: + dict suitable for check run annotations list, or None if invalid + """ + locations = result.get("locations", []) + if not locations: + return None + + location = locations[0] + physical = location.get("physicalLocation") + if not physical: + return None + + artifact = physical.get("artifactLocation", {}) + path = artifact.get("uri") + if not path: + return None + + region = physical.get("region", {}) + start_line = region.get("startLine") + end_line = region.get("endLine", start_line) + + if not start_line: + return None + + # Map SARIF level to GitHub annotation level + sarif_level = result.get("level", "warning") + if sarif_level == "error": + annotation_level = "failure" + elif sarif_level == "warning": + annotation_level = "warning" + else: + annotation_level = "notice" + + message_obj = result.get("message", {}) + message = message_obj.get("text", "") + + annotation = { + "path": path, + "start_line": start_line, + "end_line": end_line, + "annotation_level": annotation_level, + "message": message, + "title": tool_name, + } + + # Add column info if available + start_column = region.get("startColumn") + end_column = region.get("endColumn") + if start_column: + annotation["start_column"] = start_column + if end_column: + annotation["end_column"] = end_column + + return annotation diff --git a/crates/axl-runtime/src/builtins/aspect/tasks/delivery.axl b/crates/axl-runtime/src/builtins/aspect/tasks/delivery.axl new file mode 100644 index 000000000..c6627c511 --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/tasks/delivery.axl @@ -0,0 +1,239 @@ +""" +Delivery task that coordinates artifact delivery via deliveryd. + +Delivers each target via bazel run with stamping enabled, and signs artifacts +to prevent re-delivery. + +Uses deliveryd (Unix socket HTTP server) for all delivery state operations. +""" + +load( + "../lib/deliveryd.axl", + deliveryd_query = "query", + deliveryd_deliver = "deliver", + deliveryd_record = "record", + deliveryd_delete_artifact = "delete_artifact", +) + + +# ANSI codes +_BOLD = "\033[1m" +_GREEN = "\033[32m" +_YELLOW = "\033[33m" +_RED = "\033[31m" +_RESET = "\033[0m" + +def _style(text, codes, is_tty): + """Wrap text in ANSI codes if terminal is TTY.""" + if is_tty: + return codes + text + _RESET + return text + +def _run_bazel(ctx, verb, target, flags): + """ + Run a bazel command and return the exit code. + TODO: Implement ctx.bazel.run() when available. + """ + print(" [TODO] bazel {} {} {}".format(verb, " ".join(flags), target)) + return 0 # Simulate success + +# Helper to pad string to width +def pad(s, width): + return s + " " * (width - len(s)) + + +def _deliver_target(ctx, socket_path, ci_host, workspace, build_url, bazel_flags, label, is_forced, target_state, is_tty): + """ + Deliver a single target. + + Args: + is_forced: If True, skip signature check and always deliver. + target_state: Dict with {output_sha, delivered, delivered_by} from deliveryd, or None. + is_tty: Whether terminal supports colors. + + Returns (status: str, message: str) where status is one of: + - "success": Successfully delivered + - "skipped": Already delivered (only for non-forced) + - "build_failed": Bazel build failed + - "run_failed": Bazel run failed + """ + output_sha = target_state.get("output_sha") if target_state else None + + # For non-forced targets, check if already delivered + if not is_forced: + if target_state: + if target_state.get("delivered"): + return ("skipped", "Already delivered by {}".format(target_state.get("delivered_by"))) + else: + # No state found - target may have been added before signatures + # were introduced. Proceed with delivery. + print(" {}: No delivery state found for {}, bypassing signature check".format( + _style("Warning", _BOLD + _YELLOW, is_tty), label)) + + # Run bazel to deliver the target with stamping + print(" {} {}...".format(_style("Delivering", _BOLD, is_tty), label)) + exit_code = _run_bazel(ctx, "run", label, bazel_flags) + + if exit_code != 0: + # Delivery failed - delete artifact metadata so it can be retried + if output_sha: + deliveryd_delete_artifact(ctx, socket_path, ci_host, output_sha, workspace) + return ("run_failed", "Delivery failed with exit code {}".format(exit_code)) + + # Sign the artifact to mark as delivered + if output_sha: + deliveryd_deliver(ctx, socket_path, ci_host, output_sha, workspace, build_url) + + return ("success", "Delivered successfully") + +def _delivery_impl(ctx): + ctx.config.delivery_start() + + # Check if terminal supports colors + is_tty = ctx.std.io.stdout.is_tty + + # deliveryd socket path + socket_path = ctx.args.socket + + # Delivery context + ci_host = ctx.args.ci_host + workspace = ctx.args.workspace + build_url = ctx.args.build_url + commit_sha = ctx.args.commit_sha + + # Build bazel flags for delivery + # Default: --stamp --noremote_upload_local_results --remote_download_outputs=toplevel + bazel_flags = ctx.args.bazel_flag + if not bazel_flags: + bazel_flags = ["--stamp"] + + # Add flags that Workflows forces during delivery + bazel_flags.append("--noremote_upload_local_results") + bazel_flags.append("--remote_download_outputs=toplevel") + + print(_style("Delivery:", _BOLD, is_tty)) + print(" {}: {}".format(_style("deliveryd", _BOLD, is_tty), socket_path)) + print(" {}: {}".format(_style("Host", _BOLD, is_tty), ci_host)) + print(" {}: {}".format(_style("Commit", _BOLD, is_tty), commit_sha)) + print(" {}: {}".format(_style("Workspace", _BOLD, is_tty), workspace)) + print(" {}: {}".format(_style("URL", _BOLD, is_tty), build_url)) + print(" {}: {}".format(_style("Flags", _BOLD, is_tty), bazel_flags)) + print() + + + targets = ctx.args.targets + forced_targets = ctx.args.force_target + + if not targets: + print(_style("No targets to deliver", _BOLD + _YELLOW, is_tty)) + return 0 + + print(_style("Found {} target(s) to deliver:".format(len(targets)), _BOLD, is_tty)) + for t in targets: + forced_marker = _style(" (forced)", _YELLOW, is_tty) if t in forced_targets else "" + print(" - {}{}".format(t, forced_marker)) + print("") + + # Record each target with deliveryd (so they can be queried/signed) + for label in targets: + # Use hash of commit_sha + label as output_sha + # TODO: query remote-cache action key to determine target hash. + output_sha = hash(commit_sha + label) + deliveryd_record(ctx, socket_path, ci_host, commit_sha, workspace, label, str(output_sha)) + + # Query deliveryd for delivery state of all targets + delivery_state = deliveryd_query(ctx, socket_path, ci_host, commit_sha, workspace) + + # Track results + results = [] # List of (label, status, delivered_by) + success_count = 0 + skipped_count = 0 + failed_count = 0 + + for label in targets: + is_forced = label in forced_targets + target_state = delivery_state.get(label) + status, message = _deliver_target( + ctx, socket_path, ci_host, workspace, build_url, + bazel_flags, label, is_forced, target_state, is_tty + ) + + ctx.config.deliver_target(label, is_forced) + + forced_marker = " (FORCED)" if is_forced else "" + if status == "success": + success_count += 1 + results.append((label, "OK" + forced_marker, "ok", "-")) + elif status == "skipped": + skipped_count += 1 + delivered_by = target_state.get("delivered_by") if target_state else "-" + results.append((label, "SKIP", "skip", delivered_by or "-")) + else: # build_failed or run_failed + failed_count += 1 + results.append((label, "FAIL" + forced_marker, "fail", "-")) + + # Calculate column width for alignment + max_label_width = len("TARGET") + for label, _, _, _ in results: + if len(label) > max_label_width: + max_label_width = len(label) + + + # Calculate status column width + max_status_width = len("STATUS") + for _, status_text, _, _ in results: + if len(status_text) > max_status_width: + max_status_width = len(status_text) + + # Style mapping for status types (bold + color) + status_styles = {"ok": _BOLD + _GREEN, "skip": _BOLD + _YELLOW, "fail": _BOLD + _RED} + + # Print table header (bold) + print("") + header = " {} {} {}".format(pad("TARGET", max_label_width), pad("STATUS", max_status_width), "DELIVERED BY") + print(_style(header, _BOLD, is_tty)) + for label, status_text, status_type, delivered_by in results: + styled_status = _style(status_text, status_styles[status_type], is_tty) + # Pad based on original text length, then apply style + padding = " " * (max_status_width - len(status_text)) + print(" {} {}{} {}".format(pad(label, max_label_width), styled_status, padding, delivered_by)) + + # Summary (single line with bold colors) + print("") + summary_parts = [ + _style("{} delivered".format(success_count), _BOLD + _GREEN, is_tty), + _style("{} skipped".format(skipped_count), _BOLD + _YELLOW, is_tty), + _style("{} failed".format(failed_count), _BOLD + _RED, is_tty), + ] + print("{} {}".format(_style("Summary:", _BOLD, is_tty), ", ".join(summary_parts))) + + ctx.config.delivery_end() + + if failed_count > 0: + return 1 + + return 0 + + + +DeliveryConfig = spec( + delivery_start = attr(typing.Callable[[], None], lambda: None), + delivery_end = attr(typing.Callable[[], None], lambda: None), + deliver_target = attr(typing.Callable[[str, bool], None], lambda label, is_forced: None), +) + +delivery = task( + name = "delivery", + implementation = _delivery_impl, + config = DeliveryConfig, + args = { + "socket": args.string(default = "/tmp/deliveryd.sock"), + "ci_host": args.string(default = "bk"), + "commit_sha": args.string(), + "workspace": args.string(default = "."), + "build_url": args.string(default = "-"), + "bazel_flag": args.string(), + "force_target": args.string_list(default = []), + "targets": args.trailing_var_args() + }, +) diff --git a/crates/axl-runtime/src/builtins/aspect/tasks/dummy_format.axl b/crates/axl-runtime/src/builtins/aspect/tasks/dummy_format.axl new file mode 100644 index 000000000..5d6470063 --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/tasks/dummy_format.axl @@ -0,0 +1,22 @@ +""" +A stub 'format' task registered when aspect_rules_lint is not installed. +Prints a helpful message directing the user to install the lint package. +""" + +def _format_impl(ctx: TaskContext) -> int: + ctx.std.io.stderr.write("Error: The format task requires the aspect_rules_lint package.\n") + ctx.std.io.stderr.write("\n") + ctx.std.io.stderr.write("Install it by running:\n") + ctx.std.io.stderr.write("\n") + ctx.std.io.stderr.write(" aspect axl add gh:aspect-build/rules_lint\n") + ctx.std.io.stderr.write("\n") + return 1 + +format = task( + name = "format", + implementation = _format_impl, + description = "Format source code (requires aspect_rules_lint)", + args = { + "all": args.positional(minimum = 0, maximum = 1000) + } +) diff --git a/crates/axl-runtime/src/builtins/aspect/tasks/dummy_lint.axl b/crates/axl-runtime/src/builtins/aspect/tasks/dummy_lint.axl new file mode 100644 index 000000000..345211053 --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/tasks/dummy_lint.axl @@ -0,0 +1,22 @@ +""" +A stub 'lint' task registered when aspect_rules_lint is not installed. +Prints a helpful message directing the user to install the lint package. +""" + +def _lint_impl(ctx: TaskContext) -> int: + ctx.std.io.stderr.write("Error: The lint task requires the aspect_rules_lint package.\n") + ctx.std.io.stderr.write("\n") + ctx.std.io.stderr.write("Install it by running:\n") + ctx.std.io.stderr.write("\n") + ctx.std.io.stderr.write(" aspect axl add gh:aspect-build/rules_lint\n") + ctx.std.io.stderr.write("\n") + return 1 + +lint = task( + name = "lint", + implementation = _lint_impl, + description = "Run linters (requires aspect_rules_lint)", + args = { + "all": args.positional(minimum = 0, maximum = 1000) + } +) diff --git a/crates/axl-runtime/src/builtins/aspect/tasks/migrate.axl b/crates/axl-runtime/src/builtins/aspect/tasks/migrate.axl new file mode 100644 index 000000000..d67e561e4 --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/tasks/migrate.axl @@ -0,0 +1,741 @@ +""" +CI Config Generator Task + +Generates CI workflow files (GitHub Actions, Buildkite, etc.) from +.aspect/workflows/config.yaml. + +Usage: + aspect workflows migrate --host=github --config=.aspect/workflows/config.yaml + +This generates a standalone workflow file at .github/workflows/aspect-workflows.yaml +""" + +# Task types that should generate CI jobs +BAZEL_TASK_TYPES = [ + "build", + "test", + "lint", + "format", + "configure", + "gazelle", + "buildifier", +] + +# Task types that are pre-task hooks +PRE_TASK_TYPES = [ + "checkout", + "bazel_health_probe", +] + +# Task types that are post-task hooks +POST_TASK_TYPES = [ + "finalization", + "delivery_manifest", +] + +# Task types that are CI infrastructure (not Bazel tasks, not hooks) +EXCLUDED_TASK_TYPES = [ + "delivery", + "warming", + "noop", +] + +# Tasks that need deeper git history for diff operations +NEEDS_DEEP_CHECKOUT = [ + "lint", + "format", +] + +# Default timeout in minutes for tasks +DEFAULT_TIMEOUT = 60 + + +def parse_yaml(ctx, content: str) -> dict: + """Parse YAML content to a dictionary using yq.""" + child = ctx.std.process.command("yq") \ + .arg("-o=json") \ + .arg(".") \ + .stdin("piped") \ + .stdout("piped") \ + .stderr("piped") \ + .spawn() + + stdin = child.stdin() + stdin.write(content) + stdin.close() + + stdout = child.stdout() + json_output = stdout.read_to_string() + + status = child.wait() + if status.code != 0: + stderr = child.stderr() + error_msg = stderr.read_to_string() + fail("Failed to parse YAML: " + error_msg) + + return json.decode(json_output) + + +def get_task_entry(task_def: dict) -> tuple: + """ + Extract task name, type, and config from a task definition. + + Returns: + Tuple of (task_name, task_type, task_config) + """ + if type(task_def) != "dict" or len(task_def) == 0: + return (None, None, None) + + for task_name, task_config in task_def.items(): + task_type = task_name + parts = task_name.rsplit("_", 1) + if len(parts) == 2 and parts[1].isdigit(): + task_type = parts[0] + + if task_config == None: + task_config = {} + + return (task_name, task_type, task_config) + + return (None, None, None) + + +def find_task_config(tasks: list, task_type: str): + """Find a task config by type in the tasks list. Returns None if not found or disabled.""" + for task_def in tasks: + name, ttype, config = get_task_entry(task_def) + if ttype == task_type: + # Check if task is disabled + if config.get("without", False): + return None + return config + return None + + +def generate_checkout_step(task_type: str) -> list: + """ + Generate checkout step for a job. + + Args: + task_type: The main task type (affects fetch-depth) + + Returns: + List of YAML lines for checkout step + """ + lines = [] + + # Determine fetch-depth based on task type + # lint/format need 2 commits for diff comparison + fetch_depth = 1 + if task_type in NEEDS_DEEP_CHECKOUT: + fetch_depth = 2 + + lines.append("- uses: actions/checkout@v4") + if fetch_depth != 1: + lines.append(" with:") + lines.append(" fetch-depth: " + str(fetch_depth)) + + return lines + + +def generate_pre_hooks(tasks: list, task_type: str) -> list: + """ + Generate pre-task hook steps. + + Args: + tasks: Full tasks list from config + task_type: The main task type + + Returns: + List of YAML lines for pre-hook steps + """ + lines = [] + + # Note: checkout task with update_strategy (rebase/merge) is handled by + # the Aspect Workflows infrastructure via branch_freshness_strategy binary. + # In standalone GitHub Actions, this is not available - users should + # configure branch protection rules or use GitHub's merge queue instead. + + # Check for bazel_health_probe task + health_config = find_task_config(tasks, "bazel_health_probe") + if health_config != None: + timeout = health_config.get("timeout_in_minutes", 10) + lines.append("- name: Bazel health check") + lines.append(" run: aspect bazel_health_probe") + lines.append(" timeout-minutes: " + str(timeout)) + + return lines + + +def generate_post_hooks(tasks: list, task_name: str) -> list: + """ + Generate post-task hook steps. + + Args: + tasks: Full tasks list from config + task_name: The main task name (for delivery manifest) + + Returns: + List of YAML lines for post-hook steps + """ + lines = [] + + # Check for delivery_manifest task + manifest_config = find_task_config(tasks, "delivery_manifest") + if manifest_config != None: + timeout = manifest_config.get("timeout_in_minutes", 10) + lines.append("- name: Delivery manifest") + lines.append(" if: success()") + lines.append(" run: aspect delivery_manifest --data TARGETS_SOURCE=" + task_name) + lines.append(" timeout-minutes: " + str(timeout)) + + # Check for finalization task + finalize_config = find_task_config(tasks, "finalization") + if finalize_config != None: + timeout = finalize_config.get("timeout_in_minutes", 10) + lines.append("- name: Finalization") + lines.append(" if: always()") + lines.append(" run: aspect finalization") + lines.append(" timeout-minutes: " + str(timeout)) + + return lines + + +def generate_task_hooks(task_config: dict, hook_type: str) -> list: + """ + Generate task-specific hook steps (before_task or after_task). + + Args: + task_config: The task's configuration dict + hook_type: Either "before_task" or "after_task" + + Returns: + List of YAML lines for hook steps + """ + lines = [] + + hooks = task_config.get("hooks", []) + if type(hooks) != "list": + return lines + + hook_index = 0 + for hook in hooks: + if type(hook) != "dict": + continue + + if hook.get("type") != hook_type: + continue + + command = hook.get("command", "") + if not command: + continue + + hook_index += 1 + hook_name = hook_type.replace("_", " ").title() + if hook_index > 1: + hook_name = hook_name + " " + str(hook_index) + + # Check if command is multiline + if "\n" in command: + lines.append("- name: " + hook_name) + lines.append(" run: |") + for cmd_line in command.split("\n"): + lines.append(" " + cmd_line) + else: + lines.append("- name: " + hook_name) + lines.append(" run: " + command) + + return lines + + +def generate_github_workflow(ctx, config_data: dict, config_path: str, workflow_name: str) -> str: + """ + Generate a GitHub Actions workflow YAML from config data. + + Args: + ctx: TaskContext + config_data: Parsed config.yaml data + config_path: Path to config file (for reference in workflow) + workflow_name: Name for the workflow + + Returns: + Generated workflow YAML as string + """ + tasks = config_data.get("tasks", []) + workspaces = config_data.get("workspaces", ["."]) + env_vars = config_data.get("env", {}) + default_queue = config_data.get("queue", "aspect-default") + + # Normalize workspaces to list + if type(workspaces) != "list": + workspaces = ["."] + + # Build the workflow structure + lines = [] + lines.append("# Generated by: aspect workflows migrate --host=github") + lines.append("# Source: " + config_path) + lines.append("# DO NOT EDIT - regenerate with 'aspect workflows migrate'") + lines.append("") + lines.append("name: " + workflow_name) + lines.append("") + lines.append("on:") + lines.append(" push:") + lines.append(" branches: [main]") + lines.append(" pull_request:") + lines.append(" branches: [main]") + lines.append(" workflow_dispatch:") + lines.append("") + + # Add global env vars if present + if env_vars and len(env_vars) > 0: + lines.append("env:") + for key, value in env_vars.items(): + lines.append(" " + key + ": " + json.encode(str(value))) + lines.append("") + + lines.append("jobs:") + + # Generate a job for each Bazel task + job_count = 0 + for task_def in tasks: + task_name, task_type, task_config = get_task_entry(task_def) + + if task_name == None: + continue + + # Skip non-Bazel tasks and hooks (hooks are embedded in jobs) + if task_type in EXCLUDED_TASK_TYPES: + continue + if task_type in PRE_TASK_TYPES: + continue + if task_type in POST_TASK_TYPES: + continue + + if task_type not in BAZEL_TASK_TYPES: + print("Warning: Unknown task type '{}', skipping".format(task_type)) + continue + + # Check if task is disabled + if task_config.get("without", False): + continue + + # Get task-specific config + timeout = task_config.get("timeout_in_minutes", DEFAULT_TIMEOUT) + queue = task_config.get("queue", default_queue) + nice_name = task_config.get("name", task_name.replace("_", " ").title()) + + # Get task-specific hooks + before_task_hooks = generate_task_hooks(task_config, "before_task") + after_task_hooks = generate_task_hooks(task_config, "after_task") + + # Get task-specific env vars + task_env = task_config.get("env", {}) + if task_env == None: + task_env = {} + + # Get task-specific artifact paths + artifact_paths = task_config.get("artifact_paths", []) + if artifact_paths == None: + artifact_paths = [] + + # Determine if using matrix for multiple workspaces + use_matrix = len(workspaces) > 1 + + job_id = task_name.replace("-", "_") + lines.append("") + lines.append(" " + job_id + ":") + + if use_matrix: + lines.append(" name: " + nice_name + " (${{ matrix.workspace }})") + else: + lines.append(" name: " + nice_name) + + lines.append(" runs-on: [self-hosted, aspect-workflows, " + queue + "]") + + if use_matrix: + lines.append(" strategy:") + lines.append(" fail-fast: false") + lines.append(" matrix:") + lines.append(" workspace:") + for ws in workspaces: + lines.append(" - " + json.encode(ws)) + lines.append(" defaults:") + lines.append(" run:") + lines.append(" working-directory: ${{ matrix.workspace }}") + else: + workspace = workspaces[0] + if workspace != ".": + lines.append(" defaults:") + lines.append(" run:") + lines.append(" working-directory: " + workspace) + + lines.append(" steps:") + + # 1. Checkout step + checkout_lines = generate_checkout_step(task_type) + for line in checkout_lines: + lines.append(" " + line) + + # 2. Pre-task hooks (checkout, health probe) + pre_hook_lines = generate_pre_hooks(tasks, task_type) + for line in pre_hook_lines: + lines.append(" " + line) + + # 3. Task-specific before_task hooks + for line in before_task_hooks: + lines.append(" " + line) + + # 4. Main task + lines.append(" - name: " + nice_name) + lines.append(" run: aspect " + task_name) + lines.append(" timeout-minutes: " + str(timeout)) + if task_env and len(task_env) > 0: + lines.append(" env:") + for env_key, env_value in task_env.items(): + lines.append(" " + env_key + ": " + json.encode(str(env_value))) + + # 5. Task-specific after_task hooks + for line in after_task_hooks: + lines.append(" " + line) + + # 6. Post-task hooks (delivery manifest, finalization) + post_hook_lines = generate_post_hooks(tasks, task_name) + for line in post_hook_lines: + lines.append(" " + line) + + # 7. Upload artifacts (always, even on failure) + lines.append(" - name: Upload artifacts") + lines.append(" if: always()") + lines.append(" uses: actions/upload-artifact@v4") + lines.append(" with:") + lines.append(" name: " + task_name + "-artifacts") + lines.append(" path: |") + lines.append(" bazel-out/**/testlogs/**") + lines.append(" bazel-out/**/test.log") + # Add custom artifact paths + for artifact_path in artifact_paths: + lines.append(" " + artifact_path) + lines.append(" if-no-files-found: ignore") + + job_count += 1 + + if job_count == 0: + fail("No Bazel tasks found in config. Expected tasks like: build, test, lint, format, configure, gazelle, buildifier") + + lines.append("") + return "\n".join(lines) + + +def generate_buildkite_pipeline(ctx, config_data: dict, config_path: str) -> str: + """ + Generate a Buildkite pipeline YAML from config data. + + Args: + ctx: TaskContext + config_data: Parsed config.yaml data + config_path: Path to config file (for reference) + + Returns: + Generated pipeline YAML as string + """ + tasks = config_data.get("tasks", []) + workspaces = config_data.get("workspaces", ["."]) + env_vars = config_data.get("env", {}) + default_queue = config_data.get("queue", "aspect-default") + + # Normalize workspaces to list of strings + workspace_list = [] + if type(workspaces) == "list": + for ws in workspaces: + if type(ws) == "str": + workspace_list.append(ws) + elif type(ws) == "dict": + for ws_name in ws.keys(): + workspace_list.append(ws_name) + else: + workspace_list = ["."] + + lines = [] + lines.append("# Generated by: aspect workflows migrate --host=buildkite") + lines.append("# Source: " + config_path) + lines.append("# DO NOT EDIT - regenerate with 'aspect workflows migrate'") + lines.append("") + lines.append("steps:") + + # Collect step keys for finalization depends_on + step_keys = [] + + # Generate steps for each task and workspace combination + for task_def in tasks: + task_name, task_type, task_config = get_task_entry(task_def) + + if task_name == None: + continue + + # Skip non-Bazel tasks and hooks + if task_type in EXCLUDED_TASK_TYPES: + continue + if task_type in PRE_TASK_TYPES: + continue + if task_type in POST_TASK_TYPES: + continue + + if task_type not in BAZEL_TASK_TYPES: + print("Warning: Unknown task type '{}', skipping".format(task_type)) + continue + + # Check if task is disabled + if task_config.get("without", False): + continue + + # Get task-specific config + timeout = task_config.get("timeout_in_minutes", DEFAULT_TIMEOUT) + queue = task_config.get("queue", default_queue) + nice_name = task_config.get("name", task_name.replace("_", " ").title()) + icon = task_config.get("icon", "bazel") + + # Get task-specific env, hooks, and artifact paths + task_env = task_config.get("env", {}) + if task_env == None: + task_env = {} + before_hooks = get_task_hook_commands(task_config, "before_task") + after_hooks = get_task_hook_commands(task_config, "after_task") + artifact_paths = task_config.get("artifact_paths", []) + if artifact_paths == None: + artifact_paths = [] + + # Generate step for each workspace + for workspace in workspace_list: + workspace_key = workspace.replace("/", "-").replace(".", "__main__") + step_key = workspace_key + "::" + task_name + step_keys.append(step_key) + + # Label with workspace suffix if multiple workspaces + if len(workspace_list) > 1 and workspace != ".": + label = ":{}: {} - {}".format(icon, nice_name, workspace) + else: + label = ":{}: {}".format(icon, nice_name) + + lines.append(" - key: " + step_key) + lines.append(" label: \"" + label + "\"") + lines.append(" agents:") + lines.append(" queue: " + queue) + lines.append(" timeout_in_minutes: " + str(timeout)) + + # Environment variables + lines.append(" env:") + lines.append(" ASPECT_WORKFLOWS_CONFIG: " + config_path) + for key, value in env_vars.items(): + lines.append(" " + key + ": " + json.encode(str(value))) + for key, value in task_env.items(): + lines.append(" " + key + ": " + json.encode(str(value))) + + # Commands + lines.append(" command:") + + # Before task hooks + for cmd in before_hooks: + if "\n" in cmd: + lines.append(" - |") + for cmd_line in cmd.split("\n"): + lines.append(" " + cmd_line) + else: + lines.append(" - " + json.encode(cmd)) + + # Main task command + if workspace != ".": + lines.append(" - 'echo \"--- :{}: {}\"'".format(icon, nice_name)) + lines.append(" - aspect " + task_name + " --workspace " + workspace) + else: + lines.append(" - 'echo \"--- :{}: {}\"'".format(icon, nice_name)) + lines.append(" - aspect " + task_name) + + # After task hooks + for cmd in after_hooks: + if "\n" in cmd: + lines.append(" - |") + for cmd_line in cmd.split("\n"): + lines.append(" " + cmd_line) + else: + lines.append(" - " + json.encode(cmd)) + + # Artifact paths + if len(artifact_paths) > 0: + lines.append(" artifact_paths:") + for path in artifact_paths: + lines.append(" - " + json.encode(path)) + + # Retry config + lines.append(" retry:") + lines.append(" automatic:") + lines.append(" - exit_status: -1") + lines.append(" limit: 1") + lines.append(" manual:") + lines.append(" allowed: true") + lines.append(" permit_on_passed: false") + lines.append("") + + # Add finalization step if configured + finalize_config = find_task_config(tasks, "finalization") + if finalize_config != None and len(step_keys) > 0: + timeout = finalize_config.get("timeout_in_minutes", 10) + lines.append(" - key: finalization") + lines.append(" label: \":checkered_flag: Finalization\"") + lines.append(" agents:") + lines.append(" queue: " + default_queue) + lines.append(" timeout_in_minutes: " + str(timeout)) + lines.append(" command:") + lines.append(" - aspect finalization") + lines.append(" depends_on:") + for key in step_keys: + lines.append(" - step: " + key) + lines.append(" allow_failure: true") + lines.append(" soft_fail: true") + lines.append("") + + return "\n".join(lines) + + +def get_task_hook_commands(task_config: dict, hook_type: str) -> list: + """ + Get list of commands from task hooks. + + Args: + task_config: Task configuration dict + hook_type: Either "before_task" or "after_task" + + Returns: + List of command strings + """ + commands = [] + hooks = task_config.get("hooks", []) + if type(hooks) != "list": + return commands + + for hook in hooks: + if type(hook) != "dict": + continue + if hook.get("type") != hook_type: + continue + command = hook.get("command", "") + if command: + commands.append(command) + + return commands + + +def _migrate_impl(ctx: TaskContext) -> int: + """ + Implementation of the migrate task. + + Reads config.yaml and generates CI workflow files. + """ + host = ctx.args.host + config_path = ctx.args.config + output = ctx.args.output + workflow_name = ctx.args.name + + # Validate host + supported_hosts = ["github", "buildkite"] + if host not in supported_hosts: + print("Error: Unsupported host '{}'. Supported hosts: {}".format( + host, ", ".join(supported_hosts) + )) + return 1 + + # Resolve config path + root = ctx.std.env.root_dir() + if not config_path.startswith("/"): + full_config_path = root + "/" + config_path + else: + full_config_path = config_path + + # Check if config exists + if not ctx.std.fs.exists(full_config_path): + print("Error: Config file not found: " + full_config_path) + return 1 + + # Read and parse config + print("Reading config: " + full_config_path) + content = ctx.std.fs.read_to_string(full_config_path) + config_data = parse_yaml(ctx, content) + + # Determine output path + if not output: + if host == "github": + output = root + "/.github/workflows/aspect-workflows.yaml" + elif host == "buildkite": + output = root + "/.buildkite/pipeline.yaml" + + # Ensure output directory exists + output_dir = output.rsplit("/", 1)[0] + if not ctx.std.fs.exists(output_dir): + ctx.std.fs.create_dir_all(output_dir) + + # Generate workflow + print("Generating {} workflow...".format(host)) + + workflow_content = "" + if host == "github": + workflow_content = generate_github_workflow( + ctx, + config_data, + config_path, # Use original relative path in comment + workflow_name, + ) + elif host == "buildkite": + workflow_content = generate_buildkite_pipeline( + ctx, + config_data, + config_path, + ) + + # Write output + print("Writing: " + output) + ctx.std.fs.write(output, workflow_content) + + print("") + print("Generated workflow file: " + output) + print("") + print("Features enabled:") + + # Report what hooks were detected + tasks = config_data.get("tasks", []) + if find_task_config(tasks, "bazel_health_probe") != None: + print(" - Bazel health probe") + if find_task_config(tasks, "finalization") != None: + print(" - Finalization hook (runs on success/failure)") + if find_task_config(tasks, "delivery_manifest") != None: + print(" - Delivery manifest generation") + + # Warn about features not supported in standalone mode + if find_task_config(tasks, "checkout") != None: + print("") + print("Note: 'checkout' task with update_strategy is not supported in standalone mode.") + print(" Use GitHub branch protection rules or merge queue instead.") + + print("") + print("To use this workflow:") + print(" 1. Commit the generated file to your repository") + print(" 2. Push to trigger the workflow") + print("") + + return 0 + + +# Register the migrate task +migrate = task( + name = "migrate", + group = ["workflows"], + implementation = _migrate_impl, + args = { + # CI host platform + "host": args.string(default = "github"), + # Path to workflows config.yaml + "config": args.string(default = ".aspect/workflows/config.yaml"), + # Output file path (auto-detected if not specified) + "output": args.string(default = ""), + # Workflow name + "name": args.string(default = "Aspect Workflows"), + }, +) diff --git a/crates/axl-runtime/src/builtins/aspect/tasks/warming.axl b/crates/axl-runtime/src/builtins/aspect/tasks/warming.axl new file mode 100644 index 000000000..2a91a8fcf --- /dev/null +++ b/crates/axl-runtime/src/builtins/aspect/tasks/warming.axl @@ -0,0 +1,139 @@ +""" +Warming tasks for archiving and restoring Bazel caches on CI runners. + +These tasks wrap the existing warming shell scripts that handle tarring up +caches (repository cache, bazel server, output base) and streaming them +to/from cloud storage (S3 or GCS). + +- update: Archives the current caches and uploads to cloud storage after + a `bazel build --nobuild` warming pass (which is a separate concern). +- restore: Downloads and extracts cached archives during runner bootstrap + so subsequent builds start warm. + +Both tasks require the Aspect Workflows runner infrastructure to be present. +""" + +load( + "../lib/platform.axl", + "read_warming_config", + "DEFAULT_BIN_DIR", + "DEFAULT_PLATFORM_DIR", +) + + +def _is_workflows_runner(fs, platform_dir): + """Check if we are running on an Aspect Workflows runner.""" + return fs.exists(platform_dir + "/aws") or fs.exists(platform_dir + "/gcp") + + +def _warmup_update_impl(ctx): + """Archive warming caches and upload to cloud storage.""" + platform_dir = ctx.args.platform_dir + bin_dir = ctx.args.bin_dir + archive_bin = bin_dir + "/warming_archive" + + if not _is_workflows_runner(ctx.std.fs, platform_dir): + print("Error: Not running on an Aspect Workflows runner.") + print("The warming update task requires the Workflows runner infrastructure.") + return 1 + + if not ctx.std.fs.exists(archive_bin): + print("Error: warming archive binary not found at: " + archive_bin) + print("Ensure the Workflows runner bootstrap has completed.") + return 1 + + print("Archiving warming caches...") + + child = ctx.std.process.command(archive_bin) \ + .stdout("inherit") \ + .stderr("inherit") \ + .spawn() + + status = child.wait() + + if status.code != 0: + print("Error: warming archive failed with exit code " + str(status.code)) + + return status.code + + +update = task( + name = "update", + implementation = _warmup_update_impl, + group = ["workflows", "runner", "warming"], + args = { + "bin_dir": args.string(default = DEFAULT_BIN_DIR), + "platform_dir": args.string(default = DEFAULT_PLATFORM_DIR), + }, +) + + +def _warmup_restore_impl(ctx): + """Restore warming caches from cloud storage.""" + platform_dir = ctx.args.platform_dir + bin_dir = ctx.args.bin_dir + restore_bin = bin_dir + "/warming_restore" + + if not _is_workflows_runner(ctx.std.fs, platform_dir): + print("Error: Not running on an Aspect Workflows runner.") + print("The warming restore task requires the Workflows runner infrastructure.") + return 1 + + if not ctx.std.fs.exists(restore_bin): + print("Error: warming restore binary not found at: " + restore_bin) + print("Ensure the Workflows runner bootstrap has completed.") + return 1 + + # Resolve bucket: CLI arg > platform config file + bucket = ctx.args.bucket + additional_paths = ctx.args.additional_paths + + if not bucket or not additional_paths: + warming_config = read_warming_config(ctx.std.fs, platform_dir) + if not bucket: + bucket = warming_config.get("warming_bucket", "") + if not additional_paths: + additional_paths = warming_config.get("warming_additional_paths", "") + + if not bucket: + print("Error: warming bucket not specified.") + print("Use --bucket= or write the bucket name to " + platform_dir + "/warming_bucket") + return 1 + + print("Restoring warming caches from bucket: " + bucket) + + child = ctx.std.process.command(restore_bin) \ + .arg(bucket) \ + .arg(additional_paths) \ + .stdout("inherit") \ + .stderr("inherit") \ + .spawn() + + status = child.wait() + + if status.code != 0: + print("Warning: warming restore exited with code " + str(status.code)) + # Exit codes 10, 11 mean the agent is warm but from a fallback. + # Exit codes 20-24 mean the agent is cold. + if status.code in [10, 11]: + print("Agent is warm (restored from fallback cache).") + elif status.code in [20, 21, 23, 24]: + print("Agent is cold (cache restore failed).") + else: + print("Unexpected exit code from warming restore.") + + return status.code + + +# aspect runner warming restore +restore = task( + name = "restore", + implementation = _warmup_restore_impl, + group = ["workflows", "runner", "warming"], + args = { + "bin_dir": args.string(default = DEFAULT_BIN_DIR), + "platform_dir": args.string(default = DEFAULT_PLATFORM_DIR), + "bucket": args.string(default = ""), + "additional_paths": args.string(default = ""), + }, +) diff --git a/crates/axl-runtime/src/builtins/mod.rs b/crates/axl-runtime/src/builtins/mod.rs index babd7f65e..a31d23ef4 100644 --- a/crates/axl-runtime/src/builtins/mod.rs +++ b/crates/axl-runtime/src/builtins/mod.rs @@ -1,12 +1,82 @@ use std::path::PathBuf; +/// A builtin module: name and its embedded files (relative path, content). +#[cfg(not(debug_assertions))] +struct Builtin { + name: &'static str, + files: &'static [(&'static str, &'static str)], +} + +#[cfg(not(debug_assertions))] +const ASPECT: Builtin = Builtin { + name: "aspect", + files: &[ + ("bazel.axl", include_str!("./aspect/bazel.axl")), + ("build.axl", include_str!("./aspect/build.axl")), + ("test.axl", include_str!("./aspect/test.axl")), + ("axl_add.axl", include_str!("./aspect/axl_add.axl")), + ("MODULE.aspect", include_str!("./aspect/MODULE.aspect")), + // config/ + ( + "config/builtins.axl", + include_str!("./aspect/config/builtins.axl"), + ), + ( + "config/delivery.axl", + include_str!("./aspect/config/delivery.axl"), + ), + ("config/lint.axl", include_str!("./aspect/config/lint.axl")), + ( + "config/nolint.axl", + include_str!("./aspect/config/nolint.axl"), + ), + // tasks/ + ( + "tasks/delivery.axl", + include_str!("./aspect/tasks/delivery.axl"), + ), + ( + "tasks/dummy_lint.axl", + include_str!("./aspect/tasks/dummy_lint.axl"), + ), + ( + "tasks/dummy_format.axl", + include_str!("./aspect/tasks/dummy_format.axl"), + ), + ( + "tasks/migrate.axl", + include_str!("./aspect/tasks/migrate.axl"), + ), + ( + "tasks/warming.axl", + include_str!("./aspect/tasks/warming.axl"), + ), + // lib/ + ( + "lib/deliveryd.axl", + include_str!("./aspect/lib/deliveryd.axl"), + ), + ("lib/github.axl", include_str!("./aspect/lib/github.axl")), + ( + "lib/linting.axl", + include_str!("./aspect/lib/linting.axl"), + ), + ( + "lib/platform.axl", + include_str!("./aspect/lib/platform.axl"), + ), + ("lib/sarif.axl", include_str!("./aspect/lib/sarif.axl")), + ], +}; + +#[cfg(not(debug_assertions))] +const ALL: &[&Builtin] = &[&ASPECT]; + #[cfg(debug_assertions)] pub fn expand_builtins( _root_dir: PathBuf, _broot: PathBuf, ) -> std::io::Result> { - // Use CARGO_MANIFEST_DIR to locate builtins relative to this crate's source, - // not the user's project root (which could be /tmp or anywhere) let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); Ok(vec![( "aspect".to_string(), @@ -19,27 +89,37 @@ pub fn expand_builtins( _root_dir: PathBuf, broot: PathBuf, ) -> std::io::Result> { - use aspect_telemetry::cargo_pkg_version; use std::fs; - let builtins_root = broot.join(sha256::digest(cargo_pkg_version())); - fs::create_dir_all(&builtins_root)?; + // Hash all builtin content to detect staleness across versions + let content_hash = { + let mut combined = String::new(); + for builtin in ALL { + combined.push_str(builtin.name); + for (path, content) in builtin.files { + combined.push_str(path); + combined.push_str(content); + } + } + sha256::digest(combined) + }; - let builtins = vec![ - ("aspect/build.axl", include_str!("./aspect/build.axl")), - ("aspect/test.axl", include_str!("./aspect/test.axl")), - ("aspect/axl_add.axl", include_str!("./aspect/axl_add.axl")), - ( - "aspect/MODULE.aspect", - include_str!("./aspect/MODULE.aspect"), - ), - ]; + let builtins_root = broot.join(content_hash); - for (path, content) in builtins { - let out_path = &builtins_root.join(path); - fs::create_dir_all(&out_path.parent().unwrap())?; - fs::write(out_path, content)?; + // Extract each builtin into its own directory + for builtin in ALL { + let dir = builtins_root.join(builtin.name); + if !dir.exists() { + for (path, content) in builtin.files { + let out_path = dir.join(path); + fs::create_dir_all(out_path.parent().unwrap())?; + fs::write(&out_path, content)?; + } + } } - Ok(vec![("aspect".to_string(), builtins_root.join("aspect"))]) + Ok(ALL + .iter() + .map(|b| (b.name.to_string(), builtins_root.join(b.name))) + .collect()) } diff --git a/crates/axl-runtime/src/engine/async/future.rs b/crates/axl-runtime/src/engine/async/future.rs index a2140d2da..a464a8e6d 100644 --- a/crates/axl-runtime/src/engine/async/future.rs +++ b/crates/axl-runtime/src/engine/async/future.rs @@ -35,14 +35,26 @@ impl<'v> AllocValue<'v> for Box { } pub type FutOutput = Result, anyhow::Error>; +#[derive(Clone, Copy)] +pub enum Transform<'v> { + MapOk(values::Value<'v>), + MapErr(values::Value<'v>), + MapOkOrElse { + map_ok: values::Value<'v>, + map_err: values::Value<'v>, + }, +} + #[derive(Display, Allocative, ProvidesStaticType, NoSerialize)] #[display("Future")] -pub struct StarlarkFuture { +pub struct StarlarkFuture<'v> { #[allocative(skip)] inner: Rc>>>, + #[allocative(skip)] + transforms: Rc>>>, } -impl StarlarkFuture { +impl<'v> StarlarkFuture<'v> { pub fn from_future( fut: impl Future> + Send + 'static, ) -> Self { @@ -52,6 +64,7 @@ impl StarlarkFuture { fut.map_ok_or_else(|e| Err(e), |r| Ok(Box::new(r) as Box)) .boxed(), ))), + transforms: Rc::new(RefCell::new(Vec::new())), } } @@ -63,9 +76,18 @@ impl StarlarkFuture { r.into_future() } + + fn with_transform(&self, transform: Transform<'v>) -> Self { + let mut new_transforms = self.transforms.borrow().clone(); + new_transforms.push(transform); + Self { + inner: self.inner.clone(), + transforms: Rc::new(RefCell::new(new_transforms)), + } + } } -impl Future for StarlarkFuture { +impl<'v> Future for StarlarkFuture<'v> { type Output = FutOutput; fn poll( @@ -76,54 +98,131 @@ impl Future for StarlarkFuture { } } -unsafe impl<'v> Trace<'v> for StarlarkFuture { - fn trace(&mut self, _tracer: &Tracer<'v>) {} +unsafe impl<'v> Trace<'v> for StarlarkFuture<'v> { + fn trace(&mut self, tracer: &Tracer<'v>) { + for transform in self.transforms.borrow_mut().iter_mut() { + match transform { + Transform::MapOk(v) => v.trace(tracer), + Transform::MapErr(v) => v.trace(tracer), + Transform::MapOkOrElse { map_ok, map_err } => { + map_ok.trace(tracer); + map_err.trace(tracer); + } + } + } + } } -impl Debug for StarlarkFuture { +impl<'v> Debug for StarlarkFuture<'v> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Future").finish() } } -impl<'v> AllocValue<'v> for StarlarkFuture { +impl<'v> AllocValue<'v> for StarlarkFuture<'v> { fn alloc_value(self, heap: &'v Heap) -> values::Value<'v> { heap.alloc_complex_no_freeze(self) } } -impl<'v> UnpackValue<'v> for StarlarkFuture { +impl<'v> UnpackValue<'v> for StarlarkFuture<'v> { type Error = anyhow::Error; fn unpack_value_impl(value: values::Value<'v>) -> Result, Self::Error> { let fut = value.downcast_ref_err::()?; Ok(Some(Self { inner: fut.inner.clone(), + transforms: fut.transforms.clone(), })) } } #[starlark_value(type = "Future")] -impl<'v> values::StarlarkValue<'v> for StarlarkFuture { +impl<'v> values::StarlarkValue<'v> for StarlarkFuture<'v> { fn get_methods() -> Option<&'static Methods> { static RES: MethodsStatic = MethodsStatic::new(); RES.methods(future_methods) } } +fn apply_transforms<'v>( + result: FutOutput, + transforms: &[Transform<'v>], + eval: &mut Evaluator<'v, '_, '_>, +) -> starlark::Result> { + let heap = eval.heap(); + let mut current: Result, anyhow::Error> = + result.map(|boxed| boxed.alloc_value_fut(heap)); + + for transform in transforms { + current = match (current, transform) { + (Ok(val), Transform::MapOk(f)) => eval + .eval_function(*f, &[val], &[]) + .map_err(|e| anyhow::anyhow!("{}", e)), + (Err(e), Transform::MapOk(_)) => Err(e), + + (Err(e), Transform::MapErr(f)) => { + let err_str = heap.alloc_str(&e.to_string()).to_value(); + eval.eval_function(*f, &[err_str], &[]) + .map_err(|e| anyhow::anyhow!("{}", e)) + } + (Ok(v), Transform::MapErr(_)) => Ok(v), + + (Ok(val), Transform::MapOkOrElse { map_ok, .. }) => eval + .eval_function(*map_ok, &[val], &[]) + .map_err(|e| anyhow::anyhow!("{}", e)), + (Err(e), Transform::MapOkOrElse { map_err, .. }) => { + let err_str = heap.alloc_str(&e.to_string()).to_value(); + eval.eval_function(*map_err, &[err_str], &[]) + .map_err(|e| anyhow::anyhow!("{}", e)) + } + }; + } + + current.map_err(|e| starlark::Error::from(anyhow::anyhow!("{}", e))) +} + #[starlark_module] pub(crate) fn future_methods(registry: &mut MethodsBuilder) { fn block<'v>( - #[allow(unused)] this: values::Value<'v>, + this: values::Value<'v>, eval: &mut Evaluator<'v, '_, '_>, ) -> starlark::Result> { let store = AxlStore::from_eval(eval)?; let this = this.downcast_ref_err::()?; + let fut = this .inner .replace(None) .ok_or(anyhow::anyhow!("future has already been awaited"))?; - let value = store.rt.block_on(fut)?; - Ok(value.alloc_value_fut(eval.heap())) + let transforms = this.transforms.borrow().clone(); + + let result = store.rt.block_on(fut); + apply_transforms(result, &transforms, eval) + } + + fn map_ok<'v>( + this: values::Value<'v>, + callable: values::Value<'v>, + ) -> starlark::Result> { + let this_fut = this.downcast_ref_err::()?; + Ok(this_fut.with_transform(Transform::MapOk(callable))) + } + + fn map_err<'v>( + this: values::Value<'v>, + callable: values::Value<'v>, + ) -> starlark::Result> { + let this_fut = this.downcast_ref_err::()?; + Ok(this_fut.with_transform(Transform::MapErr(callable))) + } + + fn map_ok_or_else<'v>( + this: values::Value<'v>, + #[starlark(require = named)] map_ok: values::Value<'v>, + #[starlark(require = named)] map_err: values::Value<'v>, + ) -> starlark::Result> { + let this_fut = this.downcast_ref_err::()?; + Ok(this_fut.with_transform(Transform::MapOkOrElse { map_ok, map_err })) } } diff --git a/crates/axl-runtime/src/engine/bazel/build.rs b/crates/axl-runtime/src/engine/bazel/build.rs index 87c5b9fe8..9eb726cbd 100644 --- a/crates/axl-runtime/src/engine/bazel/build.rs +++ b/crates/axl-runtime/src/engine/bazel/build.rs @@ -187,7 +187,7 @@ impl Build { if debug_mode() { eprintln!( - "running {}", + "exec: {}", format_bazel_command(&startup_flags, verb, &flags, &targets) ); } diff --git a/crates/axl-runtime/src/engine/config/context.rs b/crates/axl-runtime/src/engine/config/context.rs index 34af5f023..c09279f3e 100644 --- a/crates/axl-runtime/src/engine/config/context.rs +++ b/crates/axl-runtime/src/engine/config/context.rs @@ -70,6 +70,12 @@ impl<'v> ConfigContext<'v> { .collect() } + /// Get task values for iteration (used during config evaluation). + pub fn task_values(&self) -> Vec> { + let list = self.tasks.downcast_ref::().unwrap(); + list.0.borrow().content.clone() + } + /// Add a config module for lifetime management. pub fn add_config_module(&self, module: FrozenModule) { self.config_modules.borrow_mut().push(module); diff --git a/crates/axl-runtime/src/engine/config/tasks/configured_task.rs b/crates/axl-runtime/src/engine/config/tasks/configured_task.rs index 1c2926133..9d7a94d90 100644 --- a/crates/axl-runtime/src/engine/config/tasks/configured_task.rs +++ b/crates/axl-runtime/src/engine/config/tasks/configured_task.rs @@ -1,7 +1,4 @@ -//! ConfiguredTask - A task with its configuration, using frozen values. -//! -//! This type uses `OwnedFrozenValue` to manage heap lifetimes automatically, -//! following Buck2's pattern for safe frozen value management. +//! ConfiguredTask - A task with its configuration. use std::cell::RefCell; use std::path::PathBuf; @@ -13,6 +10,7 @@ use starlark::environment::FrozenModule; use starlark::environment::Methods; use starlark::environment::MethodsBuilder; use starlark::environment::MethodsStatic; +use starlark::eval::Evaluator; use starlark::starlark_module; use starlark::values; use starlark::values::Heap; @@ -32,16 +30,14 @@ use crate::engine::task::FrozenTask; use crate::engine::task::TaskLike; use crate::eval::EvalError; -use super::frozen::freeze_value; - -/// A task bundled with its configuration, using frozen values for safe heap management. +/// A task bundled with its configuration. /// /// This type: /// - Has no lifetime parameter (easy to store and pass around) -/// - Uses `OwnedFrozenValue` to keep heaps alive automatically +/// - Uses `OwnedFrozenValue` for frozen values (task definition) +/// - Stores config binding for lazy evaluation during config phase /// - Is a `StarlarkValue` that config functions can modify via `set_attr` -/// - Can be created from a `FrozenModule` -#[derive(Debug, Clone, ProvidesStaticType, Display, NoSerialize, Allocative)] +#[derive(Debug, ProvidesStaticType, Display, NoSerialize, Allocative, Clone)] #[display("")] pub struct ConfiguredTask { /// The frozen task definition (contains implementation function) @@ -51,26 +47,34 @@ pub struct ConfiguredTask { pub name: RefCell, /// Task group (may be overridden by config) pub group: RefCell>, - /// Configured config value + /// The frozen config binding (callable that returns config) + #[allocative(skip)] + config_binding: OwnedFrozenValue, + /// The lazily evaluated config value (mutable, stays on the heap) + /// SAFETY: This value lives on the same heap as ConfiguredTask + /// None until evaluate_config() is called #[allocative(skip)] - pub config: RefCell, + evaluated_config: RefCell>>, /// Symbol name in the module pub symbol: String, /// Path to the .axl file pub path: PathBuf, } -// ConfiguredTask doesn't need tracing since it only contains frozen values unsafe impl Trace<'_> for ConfiguredTask { fn trace(&mut self, _tracer: &values::Tracer<'_>) { - // OwnedFrozenValue manages its own heap lifetime, no tracing needed + // The evaluated_config value uses 'static lifetime as a workaround, but the actual + // value lives on the same heap as ConfiguredTask. Since ConfiguredTask + // is allocated with alloc_complex_no_freeze, the heap manages tracing. + // The config_binding is an OwnedFrozenValue which manages its own lifetime. } } impl ConfiguredTask { /// Create a ConfiguredTask from a FrozenModule. /// - /// Extracts the task definition and initial config from the frozen module. + /// Stores the config binding for lazy evaluation. Call `evaluate_config()` + /// during config phase when an Evaluator is available. pub fn from_frozen_module( frozen: &FrozenModule, symbol: &str, @@ -94,18 +98,72 @@ impl ConfiguredTask { frozen_task.name.clone() }; let group = frozen_task.group.clone(); - let initial_config = OwnedFrozenValue::alloc(frozen_task.config); + + // Store the config binding for lazy evaluation + let config_binding = task_def.map(|_| frozen_task.config()); Ok(ConfiguredTask { task_def, name: RefCell::new(name), group: RefCell::new(group), - config: RefCell::new(initial_config), + config_binding, + evaluated_config: RefCell::new(None), symbol: symbol.to_string(), path, }) } + /// Evaluate the config binding and store the result. + /// + /// This must be called during config phase when an Evaluator is available. + /// After this call, `get_config()` will return the evaluated value. + pub fn evaluate_config<'v>(&self, eval: &mut Evaluator<'v, '_, '_>) -> Result<(), EvalError> { + // Get the frozen binding value - this has 'static lifetime from OwnedFrozenValue + let binding = self.config_binding.value(); + // SAFETY: FrozenValue has 'static lifetime, we need to convert to Value<'v> + // to call eval_function. The actual value is frozen and valid for 'static. + let binding_value: Value<'v> = unsafe { std::mem::transmute(binding) }; + + let config_value: Value<'v> = if binding_value.is_none() { + binding_value + } else { + eval.eval_function(binding_value, &[], &[]).map_err(|e| { + EvalError::UnknownError(anyhow!("failed to evaluate config binding: {:?}", e)) + })? + }; + + // SAFETY: The config value lives on the evaluator's heap. The lifetime is valid + // as long as the heap outlives this ConfiguredTask. + let config: Value<'static> = unsafe { std::mem::transmute(config_value) }; + self.evaluated_config.replace(Some(config)); + Ok(()) + } + + /// Create a ConfiguredTask with an already-evaluated config value. + /// + /// Use this when an Evaluator is available (e.g., in task_list.add()). + pub fn new_with_evaluated_config<'v>( + task_def: OwnedFrozenValue, + config_binding: OwnedFrozenValue, + name: String, + group: Vec, + config: Value<'v>, + symbol: String, + path: PathBuf, + ) -> Self { + // SAFETY: Same as evaluate_config - config lives on the same heap + let config: Value<'static> = unsafe { std::mem::transmute(config) }; + ConfiguredTask { + task_def, + config_binding, + name: RefCell::new(name), + group: RefCell::new(group), + evaluated_config: RefCell::new(Some(config)), + symbol, + path, + } + } + /// Get a reference to the underlying FrozenTask. pub fn as_frozen_task(&self) -> Option<&FrozenTask> { self.task_def.value().downcast_ref::() @@ -123,8 +181,29 @@ impl ConfiguredTask { } /// Get the current config value. - pub fn get_config(&self) -> OwnedFrozenValue { - self.config.borrow().clone() + /// + /// Panics if `evaluate_config()` has not been called. + pub fn get_config<'v>(&self) -> Value<'v> { + let config = self.evaluated_config.borrow(); + let config = config.expect("config not yet evaluated - call evaluate_config() first"); + // SAFETY: Transmute back to the caller's lifetime - valid because + // the heap outlives this call + unsafe { std::mem::transmute(config) } + } + + /// Try to get the current config value, returning None if not yet evaluated. + pub fn try_get_config<'v>(&self) -> Option> { + let config = *self.evaluated_config.borrow(); + // SAFETY: Transmute back to the caller's lifetime - valid because + // the heap outlives this call + config.map(|c| unsafe { std::mem::transmute(c) }) + } + + /// Set the config value. + pub fn set_config<'v>(&self, value: Value<'v>) { + // SAFETY: Same lifetime reasoning as get_config + let value: Value<'static> = unsafe { std::mem::transmute(value) }; + self.evaluated_config.replace(Some(value)); } /// Get the current name. @@ -136,6 +215,11 @@ impl ConfiguredTask { pub fn get_group(&self) -> Vec { self.group.borrow().clone() } + + /// Get the config binding (for creating new ConfiguredTasks with evaluated config). + pub fn config_binding(&self) -> &OwnedFrozenValue { + &self.config_binding + } } #[starlark_value(type = "ConfiguredTask")] @@ -151,10 +235,7 @@ impl<'v> values::StarlarkValue<'v> for ConfiguredTask { self.group.replace(unpack.items); } "config" => { - // Freeze the config value so it can be safely stored - let frozen = - freeze_value(value).map_err(|e| anyhow!("failed to freeze config: {:?}", e))?; - self.config.replace(frozen); + self.set_config(value); } _ => return ValueError::unsupported(self, &format!(".{}=", attribute)), }; @@ -165,14 +246,7 @@ impl<'v> values::StarlarkValue<'v> for ConfiguredTask { match attribute { "name" => Some(heap.alloc_str(&self.name.borrow()).to_value()), "group" => Some(heap.alloc(AllocList(self.group.borrow().iter()))), - "config" => { - // Return the frozen config value - let config = self.config.borrow(); - let value = config.value(); - // SAFETY: The OwnedFrozenValue keeps its heap alive, and we're - // returning a Value that will be used within this evaluation. - Some(unsafe { std::mem::transmute::, Value<'v>>(value) }) - } + "config" => Some(self.get_config()), "symbol" => Some(heap.alloc_str(&self.symbol).to_value()), "path" => Some(heap.alloc_str(&self.path.to_string_lossy()).to_value()), _ => None, diff --git a/crates/axl-runtime/src/engine/config/tasks/value.rs b/crates/axl-runtime/src/engine/config/tasks/value.rs index dafc08a8b..4c99e71fb 100644 --- a/crates/axl-runtime/src/engine/config/tasks/value.rs +++ b/crates/axl-runtime/src/engine/config/tasks/value.rs @@ -16,7 +16,6 @@ use starlark::typing::Ty; use starlark::values::AllocValue; use starlark::values::Heap; use starlark::values::NoSerialize; -use starlark::values::OwnedFrozenValue; use starlark::values::StarlarkValue; use starlark::values::Trace; use starlark::values::Value; @@ -80,22 +79,33 @@ pub(crate) fn task_list_methods(registry: &mut MethodsBuilder) { .get(&symbol) .map_err(|e| anyhow::anyhow!("failed to get frozen task: {:?}", e))?; - // Get initial config from the frozen task + // Get config binding from the frozen task and evaluate it let frozen_task = task_def .value() .downcast_ref::() .ok_or_else(|| anyhow::anyhow!("expected FrozenTask after freeze"))?; - let initial_config = OwnedFrozenValue::alloc(frozen_task.config); - // Create ConfiguredTask with frozen values - let task_mut = ConfiguredTask { + // Get the config binding as OwnedFrozenValue + let config_binding = task_def.map(|_| frozen_task.config()); + + // Evaluate the config binding (always a callable) + let binding = frozen_task.config(); + let config_value = if binding.to_value().is_none() { + binding.to_value() + } else { + eval.eval_function(binding.to_value(), &[], &[])? + }; + + // Create ConfiguredTask with evaluated config + let task_mut = ConfiguredTask::new_with_evaluated_config( task_def, - name: RefCell::new(name), - group: RefCell::new(task_like.group().to_vec()), - config: RefCell::new(initial_config), + config_binding, + name, + task_like.group().to_vec(), + config_value, symbol, - path: PathBuf::from(store.script_path.to_string_lossy().to_string()), - }; + PathBuf::from(store.script_path.to_string_lossy().to_string()), + ); this.aref.content.push(eval.heap().alloc(task_mut)); Ok(NoneType) diff --git a/crates/axl-runtime/src/engine/http.rs b/crates/axl-runtime/src/engine/http.rs index ed2633e17..c36295c01 100644 --- a/crates/axl-runtime/src/engine/http.rs +++ b/crates/axl-runtime/src/engine/http.rs @@ -2,11 +2,16 @@ use allocative::Allocative; use derive_more::Display; use futures::FutureExt; use futures::TryStreamExt; +use http_body_util::{BodyExt, Empty, Full}; +use hyper::body::Bytes; +use hyper_util::client::legacy::Client as HyperClient; +use hyperlocal::{UnixClientExt, Uri as UnixUri}; use reqwest::redirect::Policy; use ssri::{Integrity, IntegrityChecker}; use starlark::environment::{Methods, MethodsBuilder, MethodsStatic}; use starlark::values::AllocValue; use starlark::values::dict::UnpackDictEntries; +use starlark::values::none::NoneOr; use starlark::values::{Heap, NoSerialize, ProvidesStaticType, ValueLike}; use starlark::values::{StarlarkValue, starlark_value}; use starlark::{starlark_module, starlark_simple_value, values}; @@ -117,7 +122,7 @@ pub(crate) fn http_methods(registry: &mut MethodsBuilder) { headers: UnpackDictEntries, #[starlark(require = named)] integrity: Option, #[starlark(require = named)] sha256: Option, - ) -> starlark::Result { + ) -> starlark::Result> { let client = &this.downcast_ref_err::()?.client; let mut req = client.get(url.as_str().to_string()); for (key, value) in headers.entries { @@ -170,46 +175,224 @@ pub(crate) fn http_methods(registry: &mut MethodsBuilder) { } fn get<'v>( - #[allow(unused)] this: values::Value<'v>, + this: values::Value<'v>, #[starlark(require = named)] url: values::StringValue, #[starlark(require = named, default = UnpackDictEntries::default())] headers: UnpackDictEntries, - ) -> starlark::Result { - let client = &this.downcast_ref_err::()?.client; - let mut req = client.get(url.as_str().to_string()); - for (key, value) in headers.entries { - req = req.header(key.as_str(), value.as_str()); + #[starlark(require = named, default = NoneOr::None)] unix_socket: NoneOr, + ) -> starlark::Result> { + let url_str = url.as_str().to_string(); + let headers_vec: Vec<(String, String)> = headers + .entries + .into_iter() + .map(|(k, v)| (k.as_str().to_string(), v.as_str().to_string())) + .collect(); + + match unix_socket.into_option() { + Some(socket) => { + let fut = async move { + let client = HyperClient::unix(); + let parsed = url::Url::parse(&url_str) + .map_err(|e| anyhow::anyhow!("invalid url: {}", e))?; + let uri: hyper::Uri = UnixUri::new(&socket, parsed.path()).into(); + + let mut req = hyper::Request::builder().method("GET").uri(uri); + for (key, value) in &headers_vec { + req = req.header(key.as_str(), value.as_str()); + } + let req = req + .body(Empty::::new()) + .map_err(|e| anyhow::anyhow!("failed to build request: {}", e))?; + + let res = client + .request(req) + .await + .map_err(|e| anyhow::anyhow!("request failed: {}", e))?; + + let status = res.status().as_u16(); + let resp_headers: Vec<(String, String)> = res + .headers() + .iter() + .map(|(n, v)| (n.to_string(), v.to_str().unwrap_or("").to_string())) + .collect(); + let body = res + .into_body() + .collect() + .await + .map_err(|e| anyhow::anyhow!("failed to read body: {}", e))? + .to_bytes(); + let body = String::from_utf8_lossy(&body).to_string(); + + Ok(HttpResponse { + status, + headers: resp_headers, + body, + }) + }; + Ok(StarlarkFuture::from_future(fut.boxed())) + } + None => { + let client = this.downcast_ref_err::()?.client.clone(); + let fut = async move { + let mut req = client.get(&url_str); + for (key, value) in &headers_vec { + req = req.header(key.as_str(), value.as_str()); + } + let res = req.send().await?; + let response = HttpResponse::from_response(res).await?; + Ok(response) + }; + Ok(StarlarkFuture::from_future(fut.boxed())) + } } + } - let fut = async { - let res = req.send().await?; - let response = HttpResponse::from_response(res).await?; - Ok(response) - }; + fn delete<'v>( + this: values::Value<'v>, + #[starlark(require = named)] url: values::StringValue, + #[starlark(require = named, default = UnpackDictEntries::default())] + headers: UnpackDictEntries, + #[starlark(require = named, default = NoneOr::None)] unix_socket: NoneOr, + ) -> starlark::Result> { + let url_str = url.as_str().to_string(); + let headers_vec: Vec<(String, String)> = headers + .entries + .into_iter() + .map(|(k, v)| (k.as_str().to_string(), v.as_str().to_string())) + .collect(); - Ok(StarlarkFuture::from_future(fut.boxed())) + match unix_socket.into_option() { + Some(socket) => { + let fut = async move { + let client = HyperClient::unix(); + let parsed = url::Url::parse(&url_str) + .map_err(|e| anyhow::anyhow!("invalid url: {}", e))?; + let uri: hyper::Uri = UnixUri::new(&socket, parsed.path()).into(); + + let mut req = hyper::Request::builder().method("DELETE").uri(uri); + for (key, value) in &headers_vec { + req = req.header(key.as_str(), value.as_str()); + } + let req = req + .body(Empty::::new()) + .map_err(|e| anyhow::anyhow!("failed to build request: {}", e))?; + + let res = client + .request(req) + .await + .map_err(|e| anyhow::anyhow!("request failed: {}", e))?; + + let status = res.status().as_u16(); + let resp_headers: Vec<(String, String)> = res + .headers() + .iter() + .map(|(n, v)| (n.to_string(), v.to_str().unwrap_or("").to_string())) + .collect(); + let body = res + .into_body() + .collect() + .await + .map_err(|e| anyhow::anyhow!("failed to read body: {}", e))? + .to_bytes(); + let body = String::from_utf8_lossy(&body).to_string(); + + Ok(HttpResponse { + status, + headers: resp_headers, + body, + }) + }; + Ok(StarlarkFuture::from_future(fut.boxed())) + } + None => { + let client = this.downcast_ref_err::()?.client.clone(); + let fut = async move { + let mut req = client.delete(&url_str); + for (key, value) in &headers_vec { + req = req.header(key.as_str(), value.as_str()); + } + let res = req.send().await?; + let response = HttpResponse::from_response(res).await?; + Ok(response) + }; + Ok(StarlarkFuture::from_future(fut.boxed())) + } + } } fn post<'v>( - #[allow(unused)] this: values::Value<'v>, + this: values::Value<'v>, url: values::StringValue, #[starlark(require = named, default = UnpackDictEntries::default())] headers: UnpackDictEntries, data: String, - ) -> starlark::Result { - let client = &this.downcast_ref_err::()?.client; - let mut req = client.post(url.as_str().to_string()); - for (key, value) in headers.entries { - req = req.header(key.as_str(), value.as_str()); - } - req = req.body(data); - let fut = async { - let res = req.send().await?; - let response = HttpResponse::from_response(res).await?; - Ok(response) - }; + #[starlark(require = named, default = NoneOr::None)] unix_socket: NoneOr, + ) -> starlark::Result> { + let url_str = url.as_str().to_string(); + let headers_vec: Vec<(String, String)> = headers + .entries + .into_iter() + .map(|(k, v)| (k.as_str().to_string(), v.as_str().to_string())) + .collect(); - Ok(StarlarkFuture::from_future(fut)) + match unix_socket.into_option() { + Some(socket) => { + let fut = async move { + let client = HyperClient::unix(); + let parsed = url::Url::parse(&url_str) + .map_err(|e| anyhow::anyhow!("invalid url: {}", e))?; + let uri: hyper::Uri = UnixUri::new(&socket, parsed.path()).into(); + + let mut req = hyper::Request::builder().method("POST").uri(uri); + for (key, value) in &headers_vec { + req = req.header(key.as_str(), value.as_str()); + } + let req = req + .body(Full::new(Bytes::from(data))) + .map_err(|e| anyhow::anyhow!("failed to build request: {}", e))?; + + let res = client + .request(req) + .await + .map_err(|e| anyhow::anyhow!("request failed: {}", e))?; + + let status = res.status().as_u16(); + let resp_headers: Vec<(String, String)> = res + .headers() + .iter() + .map(|(n, v)| (n.to_string(), v.to_str().unwrap_or("").to_string())) + .collect(); + let body = res + .into_body() + .collect() + .await + .map_err(|e| anyhow::anyhow!("failed to read body: {}", e))? + .to_bytes(); + let body = String::from_utf8_lossy(&body).to_string(); + + Ok(HttpResponse { + status, + headers: resp_headers, + body, + }) + }; + Ok(StarlarkFuture::from_future(fut)) + } + None => { + let client = this.downcast_ref_err::()?.client.clone(); + let fut = async move { + let mut req = client.post(&url_str); + for (key, value) in &headers_vec { + req = req.header(key.as_str(), value.as_str()); + } + req = req.body(data); + let res = req.send().await?; + let response = HttpResponse::from_response(res).await?; + Ok(response) + }; + Ok(StarlarkFuture::from_future(fut)) + } + } } } diff --git a/crates/axl-runtime/src/engine/mod.rs b/crates/axl-runtime/src/engine/mod.rs index 7634a7136..09772eee5 100644 --- a/crates/axl-runtime/src/engine/mod.rs +++ b/crates/axl-runtime/src/engine/mod.rs @@ -1,3 +1,4 @@ +use axl_proto::build::bazel::remote::execution as remote_execution; use starlark::{ environment::GlobalsBuilder, starlark_module, values::starlark_value_as_type::StarlarkValueAsType, @@ -37,9 +38,16 @@ pub fn register_globals(globals: &mut GlobalsBuilder) { globals::register_globals(globals); r#async::register_globals(globals); task::register_globals(globals); + types::spec::register_globals(globals); globals.namespace("args", task_arg::register_globals); globals.namespace("bazel", bazel::register_globals); + globals.namespace("remote", |g| { + g.namespace("execution", |g| { + remote_execution::action_cache_service(g); + remote_execution::v2_toplevels(g); + }); + }); globals.namespace("std", std::register_globals); globals.namespace("wasm", wasm::register_wasm_types); } diff --git a/crates/axl-runtime/src/engine/task.rs b/crates/axl-runtime/src/engine/task.rs index 6f1bba3ac..921a8aa2f 100644 --- a/crates/axl-runtime/src/engine/task.rs +++ b/crates/axl-runtime/src/engine/task.rs @@ -190,7 +190,7 @@ pub fn register_globals(globals: &mut GlobalsBuilder) { /// task_args = { /// "target": args.string(), /// }, - /// config = None # Optional user-defined config (e.g., a record); defaults to None if not provided + /// config = lambda: MyConfig(key = "value") # Optional lambda that returns config; evaluated at task creation /// ) /// ``` fn task<'v>( @@ -222,7 +222,7 @@ pub fn register_globals(globals: &mut GlobalsBuilder) { description, group: group.items, name, - config: config.into_option().unwrap_or(values::Value::new_none()), + config: config.into_option().unwrap_or(Value::new_none()), }) } } diff --git a/crates/axl-runtime/src/engine/types/mod.rs b/crates/axl-runtime/src/engine/types/mod.rs index ad0049c4c..e244e2bc9 100644 --- a/crates/axl-runtime/src/engine/types/mod.rs +++ b/crates/axl-runtime/src/engine/types/mod.rs @@ -1 +1,2 @@ pub mod bytes; +pub mod spec; diff --git a/crates/axl-runtime/src/engine/types/spec.rs b/crates/axl-runtime/src/engine/types/spec.rs new file mode 100644 index 000000000..1cc6bdd62 --- /dev/null +++ b/crates/axl-runtime/src/engine/types/spec.rs @@ -0,0 +1,924 @@ +use std::cell::Cell; +use std::fmt::{self, Display, Write}; +use std::sync::atomic::{AtomicU64, Ordering}; + +use allocative::Allocative; +use dupe::Dupe; +use starlark::environment::{GlobalsBuilder, Methods, MethodsBuilder, MethodsStatic}; +use starlark::starlark_module; +use starlark::values::dict::AllocDict; +use starlark::values::list::AllocList; +use starlark::values::typing::TypeCompiled; +use starlark::values::{ + AllocFrozenValue, AllocValue, Freeze, FreezeError, Freezer, FrozenHeap, FrozenValue, Heap, + NoSerialize, ProvidesStaticType, StarlarkValue, Trace, Tracer, Value, ValueLike, + starlark_value, +}; +use starlark_map::small_map::SmallMap; + +static SPEC_TYPE_ID: AtomicU64 = AtomicU64::new(0); + +fn next_spec_type_id() -> u64 { + SPEC_TYPE_ID.fetch_add(1, Ordering::SeqCst) +} + +// ----------------------------------------------------------------------------- +// Field +// ----------------------------------------------------------------------------- + +/// A field definition for a spec, containing a type and optional default value. +#[derive(Debug, Clone, ProvidesStaticType, Allocative)] +pub struct Field<'v> { + pub(crate) typ: TypeCompiled>, + pub(crate) typ_value: Value<'v>, + pub(crate) default: Option>, +} + +impl<'v> Display for Field<'v> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match &self.default { + None => write!(f, "field({})", self.typ), + Some(d) => write!(f, "field({}, {})", self.typ, d), + } + } +} + +unsafe impl<'v> Trace<'v> for Field<'v> { + fn trace(&mut self, tracer: &Tracer<'v>) { + self.typ.trace(tracer); + self.typ_value.trace(tracer); + if let Some(ref mut d) = self.default { + d.trace(tracer); + } + } +} + +impl<'v> Field<'v> { + pub fn freeze(self, freezer: &Freezer) -> Result { + Ok(FrozenField { + typ: self.typ.freeze(freezer)?, + typ_value: self.typ_value.freeze(freezer)?, + default: self.default.map(|d| d.freeze(freezer)).transpose()?, + }) + } +} + +/// A frozen field definition. +#[derive(Debug, Clone, ProvidesStaticType, Allocative)] +pub struct FrozenField { + pub(crate) typ: TypeCompiled, + pub(crate) typ_value: FrozenValue, + pub(crate) default: Option, +} + +impl Display for FrozenField { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match &self.default { + None => write!(f, "field({})", self.typ), + Some(d) => write!(f, "field({}, {})", self.typ, d), + } + } +} + +// ----------------------------------------------------------------------------- +// FieldValue - a wrapper for field() function return +// ----------------------------------------------------------------------------- + +#[derive(Debug, ProvidesStaticType, NoSerialize, Allocative)] +pub struct FieldValue<'v> { + pub(crate) typ: TypeCompiled>, + pub(crate) typ_value: Value<'v>, + pub(crate) default: Option>, +} + +impl<'v> Display for FieldValue<'v> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match &self.default { + None => write!(f, "field({})", self.typ), + Some(d) => write!(f, "field({}, {})", self.typ, d), + } + } +} + +unsafe impl<'v> Trace<'v> for FieldValue<'v> { + fn trace(&mut self, tracer: &Tracer<'v>) { + self.typ.trace(tracer); + self.typ_value.trace(tracer); + if let Some(ref mut d) = self.default { + d.trace(tracer); + } + } +} + +impl<'v> AllocValue<'v> for FieldValue<'v> { + fn alloc_value(self, heap: &'v Heap) -> Value<'v> { + heap.alloc_complex(self) + } +} + +#[starlark_value(type = "field")] +impl<'v> StarlarkValue<'v> for FieldValue<'v> { + fn collect_repr(&self, collector: &mut String) { + write!(collector, "{}", self).unwrap(); + } +} + +/// Frozen version of FieldValue. +#[derive(Debug, ProvidesStaticType, NoSerialize, Allocative)] +pub struct FrozenFieldValue { + pub(crate) typ: TypeCompiled, + pub(crate) typ_value: FrozenValue, + pub(crate) default: Option, +} + +impl Display for FrozenFieldValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match &self.default { + None => write!(f, "field({})", self.typ), + Some(d) => write!(f, "field({}, {})", self.typ, d), + } + } +} + +unsafe impl<'v> Trace<'v> for FrozenFieldValue { + fn trace(&mut self, _tracer: &Tracer<'v>) { + // Frozen values don't need tracing + } +} + +impl AllocFrozenValue for FrozenFieldValue { + fn alloc_frozen_value(self, heap: &FrozenHeap) -> FrozenValue { + heap.alloc_simple(self) + } +} + +#[starlark_value(type = "field")] +impl<'v> StarlarkValue<'v> for FrozenFieldValue { + type Canonical = FieldValue<'v>; + + fn collect_repr(&self, collector: &mut String) { + write!(collector, "{}", self).unwrap(); + } +} + +impl Freeze for FieldValue<'_> { + type Frozen = FrozenFieldValue; + + fn freeze(self, freezer: &Freezer) -> Result { + Ok(FrozenFieldValue { + typ: self.typ.freeze(freezer)?, + typ_value: self.typ_value.freeze(freezer)?, + default: self.default.map(|d| d.freeze(freezer)).transpose()?, + }) + } +} + +/// Deep-copy a default value if it's a mutable container (list or dict). +/// This ensures each spec instance gets its own mutable copy rather than +/// sharing the (potentially frozen) default. +fn copy_default_value<'v>(value: Value<'v>, heap: &'v Heap) -> starlark::Result> { + match value.get_type() { + "list" => { + let items: Vec> = value.iterate(heap)?.collect(); + Ok(heap.alloc(AllocList(items))) + } + "dict" => { + let keys: Vec> = value.iterate(heap)?.collect(); + let items: Vec<(Value<'v>, Value<'v>)> = keys + .into_iter() + .map(|k| { + let v = value.at(k, heap)?; + Ok((k, v)) + }) + .collect::>()?; + Ok(heap.alloc(AllocDict(items))) + } + _ => Ok(value), + } +} + +/// Create fresh TypeCompiled values from field type values at runtime. +/// This ensures type checking works correctly for types like starlark Records +/// whose frozen TypeCompiled matchers may not function properly. +fn build_type_checkers<'v>( + fields: impl Iterator>, + heap: &'v Heap, +) -> starlark::Result>>> { + fields + .map(|typ_value| TypeCompiled::new(typ_value, heap).map_err(starlark::Error::new_other)) + .collect() +} + +// ----------------------------------------------------------------------------- +// SpecType +// ----------------------------------------------------------------------------- + +/// The type of a spec, created by `spec(field1=type1, field2=type2, ...)`. +/// Calling this type creates a `Spec` instance. +#[derive(Debug, ProvidesStaticType, NoSerialize, Allocative)] +pub struct SpecType<'v> { + /// Unique identifier for this spec type + pub(crate) id: u64, + /// Name of the spec type (set when assigned to a variable) + pub(crate) name: Option, + /// Fields with their types and optional defaults + pub(crate) fields: SmallMap>, +} + +impl<'v> Display for SpecType<'v> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match &self.name { + Some(name) => write!(f, "spec[{}]", name), + None => write!(f, "spec[anon]"), + } + } +} + +unsafe impl<'v> Trace<'v> for SpecType<'v> { + fn trace(&mut self, tracer: &Tracer<'v>) { + for (_, field) in self.fields.iter_mut() { + field.trace(tracer); + } + } +} + +impl<'v> AllocValue<'v> for SpecType<'v> { + fn alloc_value(self, heap: &'v Heap) -> Value<'v> { + heap.alloc_complex(self) + } +} + +#[starlark_value(type = "spec")] +impl<'v> StarlarkValue<'v> for SpecType<'v> { + fn collect_repr(&self, collector: &mut String) { + write!(collector, "{}", self).unwrap(); + } + + fn export_as( + &self, + variable_name: &str, + _eval: &mut starlark::eval::Evaluator<'v, '_, '_>, + ) -> starlark::Result<()> { + // This is called when the spec type is assigned to a variable. + // We use unsafe to mutate the name, which is safe because this is only + // called during module loading. + let this = self as *const Self as *mut Self; + unsafe { + (*this).name = Some(variable_name.to_string()); + } + Ok(()) + } + + fn invoke( + &self, + _me: Value<'v>, + args: &starlark::eval::Arguments<'v, '_>, + eval: &mut starlark::eval::Evaluator<'v, '_, '_>, + ) -> starlark::Result> { + // Build fresh type checkers from the original type values + let type_checkers = + build_type_checkers(self.fields.values().map(|f| f.typ_value), eval.heap())?; + + // Parse the arguments according to our field definitions + let mut values: Vec>> = Vec::with_capacity(self.fields.len()); + + // Get the named arguments + args.no_positional_args(eval.heap())?; + let kwargs = args.names_map()?; + + // Build values in field order + for ((field_name, field), tc) in self.fields.iter().zip(type_checkers.iter()) { + let value = if let Some(v) = kwargs.get(field_name.as_str()) { + *v + } else if let Some(default) = field.default { + copy_default_value(default, eval.heap())? + } else { + return Err(starlark::Error::new_other(anyhow::anyhow!( + "Missing required field `{}` for {}", + field_name, + self + ))); + }; + + // Type check the value using the fresh TypeCompiled + if !tc.matches(value) { + return Err(starlark::Error::new_other(anyhow::anyhow!( + "Field `{}` expected type `{}`, got `{}`", + field_name, + tc, + value.get_type() + ))); + } + + values.push(Cell::new(value)); + } + + // Check for unexpected kwargs + for (name, _) in kwargs.iter() { + if !self.fields.contains_key(name.as_str()) { + return Err(starlark::Error::new_other(anyhow::anyhow!( + "Unexpected field `{}` for {}", + name, + self + ))); + } + } + + let spec = Spec { + typ: _me, + values: values.into_boxed_slice(), + type_checkers: type_checkers.into_boxed_slice(), + }; + Ok(eval.heap().alloc(spec)) + } + + fn get_methods() -> Option<&'static Methods> { + static RES: MethodsStatic = MethodsStatic::new(); + RES.methods(spec_type_methods) + } +} + +#[starlark_module] +fn spec_type_methods(_builder: &mut MethodsBuilder) {} + +// ----------------------------------------------------------------------------- +// FrozenSpecType +// ----------------------------------------------------------------------------- + +/// Frozen version of SpecType. +#[derive(Debug, ProvidesStaticType, NoSerialize, Allocative)] +pub struct FrozenSpecType { + pub(crate) id: u64, + pub(crate) name: Option, + pub(crate) fields: SmallMap, +} + +impl Display for FrozenSpecType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match &self.name { + Some(name) => write!(f, "spec[{}]", name), + None => write!(f, "spec[anon]"), + } + } +} + +unsafe impl<'v> Trace<'v> for FrozenSpecType { + fn trace(&mut self, _tracer: &Tracer<'v>) { + // Frozen values don't need tracing + } +} + +impl AllocFrozenValue for FrozenSpecType { + fn alloc_frozen_value(self, heap: &FrozenHeap) -> FrozenValue { + heap.alloc_simple(self) + } +} + +#[starlark_value(type = "spec")] +impl<'v> StarlarkValue<'v> for FrozenSpecType { + type Canonical = SpecType<'v>; + + fn collect_repr(&self, collector: &mut String) { + write!(collector, "{}", self).unwrap(); + } + + fn invoke( + &self, + _me: Value<'v>, + args: &starlark::eval::Arguments<'v, '_>, + eval: &mut starlark::eval::Evaluator<'v, '_, '_>, + ) -> starlark::Result> { + // Build fresh type checkers from the original type values + let type_checkers = build_type_checkers( + self.fields.values().map(|f| f.typ_value.to_value()), + eval.heap(), + )?; + + let mut values: Vec>> = Vec::with_capacity(self.fields.len()); + + args.no_positional_args(eval.heap())?; + let kwargs = args.names_map()?; + + for ((field_name, field), tc) in self.fields.iter().zip(type_checkers.iter()) { + let value = if let Some(v) = kwargs.get(field_name.as_str()) { + *v + } else if let Some(default) = field.default { + copy_default_value(default.to_value(), eval.heap())? + } else { + return Err(starlark::Error::new_other(anyhow::anyhow!( + "Missing required field `{}` for {}", + field_name, + self + ))); + }; + + // Type check using the fresh TypeCompiled + if !tc.matches(value) { + return Err(starlark::Error::new_other(anyhow::anyhow!( + "Field `{}` expected type `{}`, got `{}`", + field_name, + tc, + value.get_type() + ))); + } + + values.push(Cell::new(value)); + } + + for (name, _) in kwargs.iter() { + if !self.fields.contains_key(name.as_str()) { + return Err(starlark::Error::new_other(anyhow::anyhow!( + "Unexpected field `{}` for {}", + name, + self + ))); + } + } + + let spec = Spec { + typ: _me, + values: values.into_boxed_slice(), + type_checkers: type_checkers.into_boxed_slice(), + }; + Ok(eval.heap().alloc(spec)) + } + + fn get_methods() -> Option<&'static Methods> { + static RES: MethodsStatic = MethodsStatic::new(); + RES.methods(spec_type_methods) + } +} + +impl Freeze for SpecType<'_> { + type Frozen = FrozenSpecType; + + fn freeze(self, freezer: &Freezer) -> Result { + let mut frozen_fields = SmallMap::with_capacity(self.fields.len()); + for (name, field) in self.fields.into_iter() { + frozen_fields.insert(name, field.freeze(freezer)?); + } + Ok(FrozenSpecType { + id: self.id, + name: self.name, + fields: frozen_fields, + }) + } +} + +// ----------------------------------------------------------------------------- +// Spec +// ----------------------------------------------------------------------------- + +/// An instance of a spec type, containing field values. +#[derive(Debug, ProvidesStaticType, NoSerialize, Allocative)] +pub struct Spec<'v> { + /// The spec type this instance belongs to + pub(crate) typ: Value<'v>, + /// Field values in the same order as the type's field definitions (mutable via Cell) + #[allocative(skip)] + pub(crate) values: Box<[Cell>]>, + /// Fresh type checkers created at construction time for runtime type checking. + /// These are re-derived from the field type values to avoid issues with frozen TypeCompiled. + #[allocative(skip)] + pub(crate) type_checkers: Box<[TypeCompiled>]>, +} + +impl<'v> Display for Spec<'v> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}(", self.typ)?; + if let Some(spec_type) = self.typ.downcast_ref::() { + let mut first = true; + for ((name, _), value) in spec_type.fields.iter().zip(self.values.iter()) { + if !first { + write!(f, ", ")?; + } + first = false; + write!(f, "{}={}", name, value.get())?; + } + } else if let Some(frozen_type) = self.typ.downcast_ref::() { + let mut first = true; + for ((name, _), value) in frozen_type.fields.iter().zip(self.values.iter()) { + if !first { + write!(f, ", ")?; + } + first = false; + write!(f, "{}={}", name, value.get())?; + } + } + write!(f, ")") + } +} + +unsafe impl<'v> Trace<'v> for Spec<'v> { + fn trace(&mut self, tracer: &Tracer<'v>) { + self.typ.trace(tracer); + for cell in self.values.iter() { + let mut v = cell.get(); + v.trace(tracer); + cell.set(v); + } + for tc in self.type_checkers.iter_mut() { + tc.trace(tracer); + } + } +} + +impl<'v> AllocValue<'v> for Spec<'v> { + fn alloc_value(self, heap: &'v Heap) -> Value<'v> { + heap.alloc_complex(self) + } +} + +impl<'v> Spec<'v> { + fn get_field_names(&self) -> Vec<&str> { + if let Some(spec_type) = self.typ.downcast_ref::() { + spec_type.fields.keys().map(|s| s.as_str()).collect() + } else if let Some(frozen_type) = self.typ.downcast_ref::() { + frozen_type.fields.keys().map(|s| s.as_str()).collect() + } else { + vec![] + } + } +} + +#[starlark_value(type = "spec")] +impl<'v> StarlarkValue<'v> for Spec<'v> { + fn collect_repr(&self, collector: &mut String) { + write!(collector, "{}", self).unwrap(); + } + + fn get_attr(&self, attribute: &str, _heap: &'v Heap) -> Option> { + if let Some(spec_type) = self.typ.downcast_ref::() { + if let Some(idx) = spec_type.fields.get_index_of(attribute) { + return Some(self.values[idx].get()); + } + } else if let Some(frozen_type) = self.typ.downcast_ref::() { + if let Some(idx) = frozen_type.fields.get_index_of(attribute) { + return Some(self.values[idx].get()); + } + } + None + } + + fn set_attr(&self, attribute: &str, value: Value<'v>) -> starlark::Result<()> { + // Get field index + let idx = if let Some(spec_type) = self.typ.downcast_ref::() { + spec_type.fields.get_index_of(attribute) + } else if let Some(frozen_type) = self.typ.downcast_ref::() { + frozen_type.fields.get_index_of(attribute) + } else { + return Err(starlark::Error::new_other(anyhow::anyhow!( + "Invalid spec type" + ))); + }; + + let idx = match idx { + Some(idx) => idx, + None => { + return Err(starlark::Error::new_other(anyhow::anyhow!( + "Spec {} has no field `{}`", + self.typ, + attribute + ))); + } + }; + + // Type check using the fresh type checker created at construction time + let tc = &self.type_checkers[idx]; + if !tc.matches(value) { + return Err(starlark::Error::new_other(anyhow::anyhow!( + "Field `{}` expected type `{}`, got `{}`", + attribute, + tc, + value.get_type() + ))); + } + + // Set the value + self.values[idx].set(value); + Ok(()) + } + + fn has_attr(&self, attribute: &str, _heap: &'v Heap) -> bool { + if let Some(spec_type) = self.typ.downcast_ref::() { + spec_type.fields.contains_key(attribute) + } else if let Some(frozen_type) = self.typ.downcast_ref::() { + frozen_type.fields.contains_key(attribute) + } else { + false + } + } + + fn dir_attr(&self) -> Vec { + self.get_field_names() + .into_iter() + .map(|s| s.to_string()) + .collect() + } + + fn equals(&self, other: Value<'v>) -> starlark::Result { + if let Some(other_spec) = other.downcast_ref::() { + // Check that they have the same spec type + let self_id = self + .typ + .downcast_ref::() + .map(|t| t.id) + .or_else(|| self.typ.downcast_ref::().map(|t| t.id)); + let other_id = other_spec + .typ + .downcast_ref::() + .map(|t| t.id) + .or_else(|| { + other_spec + .typ + .downcast_ref::() + .map(|t| t.id) + }); + + if self_id != other_id { + return Ok(false); + } + + // Compare all values + if self.values.len() != other_spec.values.len() { + return Ok(false); + } + for (a, b) in self.values.iter().zip(other_spec.values.iter()) { + if !a.get().equals(b.get())? { + return Ok(false); + } + } + Ok(true) + } else if let Some(other_frozen) = other.downcast_ref::() { + let self_id = self + .typ + .downcast_ref::() + .map(|t| t.id) + .or_else(|| self.typ.downcast_ref::().map(|t| t.id)); + let other_id = other_frozen + .typ + .downcast_ref::() + .map(|t| t.id); + + if self_id != other_id { + return Ok(false); + } + + if self.values.len() != other_frozen.values.len() { + return Ok(false); + } + for (a, b) in self.values.iter().zip(other_frozen.values.iter()) { + if !a.get().equals(b.to_value())? { + return Ok(false); + } + } + Ok(true) + } else { + Ok(false) + } + } +} + +// ----------------------------------------------------------------------------- +// FrozenSpec +// ----------------------------------------------------------------------------- + +/// Frozen version of Spec. +#[derive(Debug, ProvidesStaticType, NoSerialize, Allocative)] +pub struct FrozenSpec { + pub(crate) typ: FrozenValue, + pub(crate) values: Box<[FrozenValue]>, +} + +impl Display for FrozenSpec { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}(", self.typ)?; + if let Some(frozen_type) = self.typ.downcast_ref::() { + let mut first = true; + for ((name, _), value) in frozen_type.fields.iter().zip(self.values.iter()) { + if !first { + write!(f, ", ")?; + } + first = false; + write!(f, "{}={}", name, value)?; + } + } + write!(f, ")") + } +} + +unsafe impl<'v> Trace<'v> for FrozenSpec { + fn trace(&mut self, _tracer: &Tracer<'v>) { + // Frozen values don't need tracing + } +} + +impl AllocFrozenValue for FrozenSpec { + fn alloc_frozen_value(self, heap: &FrozenHeap) -> FrozenValue { + heap.alloc_simple(self) + } +} + +#[starlark_value(type = "spec")] +impl<'v> StarlarkValue<'v> for FrozenSpec { + type Canonical = Spec<'v>; + + fn collect_repr(&self, collector: &mut String) { + write!(collector, "{}", self).unwrap(); + } + + fn get_attr(&self, attribute: &str, _heap: &'v Heap) -> Option> { + if let Some(frozen_type) = self.typ.downcast_ref::() { + if let Some(idx) = frozen_type.fields.get_index_of(attribute) { + return Some(self.values[idx].to_value()); + } + } + None + } + + fn has_attr(&self, attribute: &str, _heap: &'v Heap) -> bool { + if let Some(frozen_type) = self.typ.downcast_ref::() { + frozen_type.fields.contains_key(attribute) + } else { + false + } + } + + fn dir_attr(&self) -> Vec { + if let Some(frozen_type) = self.typ.downcast_ref::() { + frozen_type.fields.keys().map(|s| s.to_string()).collect() + } else { + vec![] + } + } + + fn equals(&self, other: Value<'v>) -> starlark::Result { + if let Some(other_frozen) = other.downcast_ref::() { + let self_id = self.typ.downcast_ref::().map(|t| t.id); + let other_id = other_frozen + .typ + .downcast_ref::() + .map(|t| t.id); + + if self_id != other_id { + return Ok(false); + } + + if self.values.len() != other_frozen.values.len() { + return Ok(false); + } + for (a, b) in self.values.iter().zip(other_frozen.values.iter()) { + if !a.to_value().equals(b.to_value())? { + return Ok(false); + } + } + Ok(true) + } else if let Some(other_spec) = other.downcast_ref::() { + let self_id = self.typ.downcast_ref::().map(|t| t.id); + let other_id = other_spec + .typ + .downcast_ref::() + .map(|t| t.id) + .or_else(|| { + other_spec + .typ + .downcast_ref::() + .map(|t| t.id) + }); + + if self_id != other_id { + return Ok(false); + } + + if self.values.len() != other_spec.values.len() { + return Ok(false); + } + for (a, b) in self.values.iter().zip(other_spec.values.iter()) { + if !a.to_value().equals(b.get())? { + return Ok(false); + } + } + Ok(true) + } else { + Ok(false) + } + } +} + +impl Freeze for Spec<'_> { + type Frozen = FrozenSpec; + + fn freeze(self, freezer: &Freezer) -> Result { + let typ = self.typ.freeze(freezer)?; + let values: Result, _> = self + .values + .iter() + .map(|v| v.get().freeze(freezer)) + .collect(); + Ok(FrozenSpec { + typ, + values: values?.into_boxed_slice(), + }) + } +} + +// ----------------------------------------------------------------------------- +// Global functions +// ----------------------------------------------------------------------------- + +#[starlark_module] +pub fn register_globals(globals: &mut GlobalsBuilder) { + /// Creates a spec type with the given fields. + /// + /// Each field can be a bare type (required, no default) or an `attr()` + /// definition (with type and optional default). + /// + /// Mutable defaults (lists, dicts) are deep-copied per instance, so each + /// instance gets its own independent copy. No `default_factory` needed. + /// + /// Example: + /// ```starlark + /// MySpec = spec(host=str, port=int) + /// r = MySpec(host="localhost", port=80) + /// print(r.host) # "localhost" + /// print(r.port) # 80 + /// + /// # Mutable defaults are safe: + /// ListSpec = spec(items=attr(list[str], [])) + /// a = ListSpec() + /// b = ListSpec() + /// a.items.append("x") + /// print(b.items) # [] — each instance has its own list + /// ``` + fn spec<'v>( + #[starlark(kwargs)] kwargs: SmallMap<&str, Value<'v>>, + eval: &mut starlark::eval::Evaluator<'v, '_, '_>, + ) -> starlark::Result> { + let mut fields = SmallMap::with_capacity(kwargs.len()); + + for (name, value) in kwargs.into_iter() { + let field = if let Some(field_value) = value.downcast_ref::() { + // It's already a field() definition + Field { + typ: field_value.typ.dupe(), + typ_value: field_value.typ_value, + default: field_value.default, + } + } else { + // It's a type, convert to a field without default + let typ = TypeCompiled::new(value, eval.heap())?; + Field { + typ, + typ_value: value, + default: None, + } + }; + fields.insert(name.to_string(), field); + } + + Ok(SpecType { + id: next_spec_type_id(), + name: None, + fields, + }) + } + + /// Creates a field definition with a type and optional default value. + /// + /// Mutable defaults (lists, dicts) are deep-copied when a spec instance is + /// created, so each instance gets its own independent copy. + /// + /// Example: + /// ```starlark + /// MySpec = spec(host=str, port=attr(int, 80)) + /// r = MySpec(host="localhost") # port defaults to 80 + /// + /// # Mutable defaults are copied per instance: + /// attr(list[str], []) # each instance gets a fresh [] + /// attr(dict[str, int], {}) # each instance gets a fresh {} + /// ``` + fn attr<'v>( + #[starlark(require = pos)] typ: Value<'v>, + #[starlark(require = pos)] default: Option>, + eval: &mut starlark::eval::Evaluator<'v, '_, '_>, + ) -> starlark::Result> { + let compiled_type = TypeCompiled::new(typ, eval.heap())?; + + // Validate that the default matches the type if provided + if let Some(d) = default { + if !compiled_type.matches(d) { + return Err(starlark::Error::new_other(anyhow::anyhow!( + "Default value `{}` does not match field type `{}`", + d, + compiled_type + ))); + } + } + + Ok(FieldValue { + typ: compiled_type, + typ_value: typ, + default, + }) + } +} diff --git a/crates/axl-runtime/src/engine/wasm/mod.rs b/crates/axl-runtime/src/engine/wasm/mod.rs index 6df64c131..86578614b 100644 --- a/crates/axl-runtime/src/engine/wasm/mod.rs +++ b/crates/axl-runtime/src/engine/wasm/mod.rs @@ -298,6 +298,7 @@ fn instantiate_with_imports<'v>( // Use instantiate() instead of instantiate_and_start() so that _start // is not called automatically. The user should call instance.start() manually. + #[allow(deprecated)] let pre_instance = linker.instantiate(&mut store, &module)?; let instance = pre_instance.ensure_no_start(&mut store).map_err(|e| { anyhow::anyhow!("WASM module has start function that must be called: {}", e) diff --git a/crates/axl-runtime/src/eval/config.rs b/crates/axl-runtime/src/eval/config.rs index 70561b0e4..cea51497c 100644 --- a/crates/axl-runtime/src/eval/config.rs +++ b/crates/axl-runtime/src/eval/config.rs @@ -47,18 +47,16 @@ impl<'l, 'p> ConfigEvaluator<'l, 'p> { /// /// This method: /// 1. Creates a ConfigContext with the tasks - /// 2. Evaluates each config file, calling its `config` function - /// 3. Returns references to the modified tasks + /// 2. Evaluates config bindings for all tasks (lazy evaluation) + /// 3. Evaluates each config file, calling its `config` function + /// 4. Returns references to the modified tasks /// /// The tasks are modified in place via set_attr calls from config functions. pub fn run_all( &self, - scope: ModuleScope, - config_paths: Vec, + scoped_configs: Vec<(ModuleScope, PathBuf, String)>, tasks: Vec, ) -> Result, EvalError> { - self.loader.module_stack.borrow_mut().push(scope.clone()); - // Create temporary modules for evaluation let eval_module = Box::leak(Box::new(Module::new())); let context_module = Box::leak(Box::new(Module::new())); @@ -70,8 +68,22 @@ impl<'l, 'p> ConfigEvaluator<'l, 'p> { .downcast_ref::() .expect("just allocated ConfigContext"); - // Evaluate each config file - for path in &config_paths { + // Evaluate config bindings for all tasks (lazy evaluation) + { + let mut eval = Evaluator::new(eval_module); + eval.set_loader(self.loader); + for task_value in ctx.task_values() { + let task = task_value + .downcast_ref::() + .expect("task_values should contain ConfiguredTask"); + task.evaluate_config(&mut eval)?; + } + } + + // Evaluate each config file with its associated scope + for (scope, path, function_name) in &scoped_configs { + self.loader.module_stack.borrow_mut().push(scope.clone()); + let rel_path = path .strip_prefix(&scope.path) .map_err(|e| EvalError::UnknownError(anyhow!("Failed to strip prefix: {e}")))? @@ -87,8 +99,8 @@ impl<'l, 'p> ConfigEvaluator<'l, 'p> { // Get the config function let def = frozen - .get("config") - .map_err(|_| EvalError::MissingSymbol("config".into()))?; + .get(function_name) + .map_err(|_| EvalError::MissingSymbol(function_name.clone()))?; let func = def.value(); @@ -107,12 +119,13 @@ impl<'l, 'p> ConfigEvaluator<'l, 'p> { // Keep the frozen module alive for the duration ctx.add_config_module(frozen); + + self.loader.module_stack.borrow_mut().pop(); } // Clone tasks from the context to return let result_tasks: Vec = ctx.tasks().iter().map(|t| (*t).clone()).collect(); - self.loader.module_stack.borrow_mut().pop(); Ok(result_tasks) } } diff --git a/crates/axl-runtime/src/eval/load.rs b/crates/axl-runtime/src/eval/load.rs index aadecb26f..8d6873a65 100644 --- a/crates/axl-runtime/src/eval/load.rs +++ b/crates/axl-runtime/src/eval/load.rs @@ -58,6 +58,12 @@ impl<'p> AxlLoader<'p> { AxlStore::new(self.cli_version.clone(), self.repo_root.clone(), path) } + /// Caches a frozen module by its absolute path so that subsequent `load()` calls + /// for the same path return the cached module instead of re-evaluating. + pub fn cache_module(&self, path: PathBuf, module: FrozenModule) { + self.loaded_modules.borrow_mut().insert(path, module); + } + pub(super) fn eval_module(&self, path: &Path) -> Result { assert!(path.is_absolute()); @@ -124,15 +130,29 @@ impl<'p> FileLoader for AxlLoader<'p> { .last() .expect("module name stack should not be empty"); + // Track whether we need to push/pop a new module scope for dependency loads. + let new_module_scope = match &load_path { + LoadPath::ModuleSpecifier { module, .. } => Some(ModuleScope { + name: module.clone(), + path: self.deps_root.join(module), + }), + _ => None, + }; + let resolved_script_path = match &load_path { LoadPath::ModuleSpecifier { module, subpath } => { self.resolve_in_deps_root(&module, &subpath)? } LoadPath::ModuleSubpath(subpath) => self.resolve(&module_info.path, subpath)?, LoadPath::RelativePath(relpath) => { - let parent = parent_script_path - .strip_prefix(&module_info.path) - .expect("parent script path should have same prefix as current module"); + let parent = parent_script_path.strip_prefix(&module_info.path).expect( + format!( + "parent script path {} should have same prefix as current module {}", + parent_script_path.display(), + module_info.path.display(), + ) + .as_str(), + ); if let Some(parent) = parent.parent() { self.resolve(&module_info.path, &parent.join(relpath))? } else { @@ -167,8 +187,13 @@ impl<'p> FileLoader for AxlLoader<'p> { drop(load_stack); - // Push the resolved path to the stack so that relative imports from the file still works. - // load_stack.push(resolved_script_path.clone()); + // If loading a dependency module, push its scope so relative imports resolve correctly. + if let Some(scope) = &new_module_scope { + drop(module_stack); + self.module_stack.borrow_mut().push(scope.clone()); + } else { + drop(module_stack); + } // Read and parse the file content into an AST. let frozen_module = self @@ -176,6 +201,11 @@ impl<'p> FileLoader for AxlLoader<'p> { .map_err(|e| Into::::into(e))? .freeze()?; + // Pop the dependency module scope if we pushed one. + if new_module_scope.is_some() { + self.module_stack.borrow_mut().pop(); + } + // Pop the load stack after successful load // self.load_stack.borrow_mut().pop(); diff --git a/crates/axl-runtime/src/eval/task.rs b/crates/axl-runtime/src/eval/task.rs index 18f4a7476..1da0ff89c 100644 --- a/crates/axl-runtime/src/eval/task.rs +++ b/crates/axl-runtime/src/eval/task.rs @@ -75,9 +75,8 @@ pub fn execute_task( store: AxlStore, args: HashMap, ) -> Result, EvalError> { - // Get config first - it needs to outlive the evaluator - let config = task.get_config(); - let config_value = config.value(); + // Get config value + let config_value = task.get_config(); // Get the task implementation function let task_impl = task @@ -120,9 +119,8 @@ pub fn execute_task_with_args( store: AxlStore, args_builder: impl FnOnce(&Heap) -> TaskArgs, ) -> Result, EvalError> { - // Get config first - it needs to outlive the evaluator - let config = task.get_config(); - let config_value = config.value(); + // Get config value + let config_value = task.get_config(); // Get the task implementation function let task_impl = task @@ -192,8 +190,15 @@ impl<'l, 'p> TaskEvaluator<'l, 'p> { .expect("just pushed a scope"); // Freeze immediately - module + let frozen = module .freeze() - .map_err(|e| EvalError::UnknownError(anyhow!(e))) + .map_err(|e| EvalError::UnknownError(anyhow!(e)))?; + + // Cache the frozen module so that subsequent load() calls for the same + // path (e.g., from config files) return this module instead of + // re-evaluating and creating new type instances with different IDs. + self.loader.cache_module(abs_path, frozen.clone()); + + Ok(frozen) } } diff --git a/crates/axl-runtime/src/module/disk_store.rs b/crates/axl-runtime/src/module/disk_store.rs index f7613062f..1d6e28f25 100644 --- a/crates/axl-runtime/src/module/disk_store.rs +++ b/crates/axl-runtime/src/module/disk_store.rs @@ -226,7 +226,7 @@ impl DiskStore { pub async fn expand_store( &self, store: &ModuleStore, - ) -> Result, StoreError> { + ) -> Result, StoreError> { let root = self.root(); fs::create_dir_all(&root).await?; fs::create_dir_all(self.deps_path()).await?; @@ -247,6 +247,7 @@ impl DiskStore { path: path, // Builtins tasks are always auto used auto_use_tasks: true, + use_config: true, }), ) }) @@ -261,11 +262,11 @@ impl DiskStore { let dep_path = self.dep_path(dep.name()); match dep { - Dep::Local(local) if local.auto_use_tasks => { - module_roots.push((local.name.clone(), dep_path.clone())) + Dep::Local(local) if local.auto_use_tasks || local.use_config => { + module_roots.push((local.name.clone(), dep_path.clone(), local.use_config)) } - Dep::Remote(remote) if remote.auto_use_tasks => { - module_roots.push((remote.name.clone(), dep_path.clone())) + Dep::Remote(remote) if remote.auto_use_tasks || remote.use_config => { + module_roots.push((remote.name.clone(), dep_path.clone(), remote.use_config)) } _ => {} }; diff --git a/crates/axl-runtime/src/module/eval.rs b/crates/axl-runtime/src/module/eval.rs index 4e8fa55cf..34864c4e8 100644 --- a/crates/axl-runtime/src/module/eval.rs +++ b/crates/axl-runtime/src/module/eval.rs @@ -21,7 +21,7 @@ use crate::module::Dep; use super::super::eval::{EvalError, validate_module_name}; -use super::store::{AxlArchiveDep, ModuleStore}; +use super::store::{AxlArchiveDep, ModuleStore, UseConfigEntry}; #[starlark_module] pub fn register_globals(globals: &mut GlobalsBuilder) { @@ -69,6 +69,7 @@ pub fn register_globals(globals: &mut GlobalsBuilder) { #[starlark(require = named)] urls: UnpackList, #[starlark(require = named)] dev: bool, #[starlark(require = named, default = false)] auto_use_tasks: bool, + #[starlark(require = named, default = false)] use_config: bool, #[starlark(require = named, default = String::new())] strip_prefix: String, eval: &mut Evaluator<'v, '_, '_>, ) -> anyhow::Result { @@ -107,6 +108,7 @@ pub fn register_globals(globals: &mut GlobalsBuilder) { integrity, dev: true, auto_use_tasks, + use_config, }), ); @@ -121,6 +123,7 @@ pub fn register_globals(globals: &mut GlobalsBuilder) { #[starlark(require = named)] name: String, #[starlark(require = named)] path: String, #[starlark(require = named, default = false)] auto_use_tasks: bool, + #[starlark(require = named, default = false)] use_config: bool, eval: &mut Evaluator<'v, '_, '_>, ) -> anyhow::Result { if name == AXL_ROOT_MODULE_NAME { @@ -149,6 +152,7 @@ pub fn register_globals(globals: &mut GlobalsBuilder) { name: name.clone(), path: abs_path, auto_use_tasks, + use_config, }), ); @@ -174,6 +178,59 @@ pub fn register_globals(globals: &mut GlobalsBuilder) { Ok(values::none::NoneType) } + + fn use_config<'v>( + #[starlark(require = pos)] path: String, + #[starlark(require = pos)] function: String, + #[starlark(require = named, default = UnpackList::default())] requires: UnpackList< + values::Value<'v>, + >, + #[starlark(require = named, default = UnpackList::default())] conflicts: UnpackList, + eval: &mut Evaluator<'v, '_, '_>, + ) -> anyhow::Result { + let store = ModuleStore::from_eval(eval)?; + let heap = eval.heap(); + + let mut parsed_requires = Vec::new(); + for req in requires.items { + if let Some(s) = req.unpack_str() { + parsed_requires.push((s.to_string(), None)); + } else if req.get_type() == "tuple" { + let len = req.length().map_err(|e| anyhow::anyhow!("{}", e))?; + if len != 2 { + anyhow::bail!( + "requires tuple must have exactly 2 elements (package, version_constraint)" + ); + } + let pkg = req + .at(heap.alloc(0), heap) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let constraint = req + .at(heap.alloc(1), heap) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let pkg = pkg.unpack_str().ok_or_else(|| { + anyhow::anyhow!("requires tuple first element must be a string") + })?; + let constraint = constraint.unpack_str().ok_or_else(|| { + anyhow::anyhow!("requires tuple second element must be a string") + })?; + parsed_requires.push((pkg.to_string(), Some(constraint.to_string()))); + } else { + anyhow::bail!( + "requires elements must be strings or tuples of (package, version_constraint)" + ); + } + } + + store.configs.borrow_mut().push(UseConfigEntry { + path, + function, + requires: parsed_requires, + conflicts: conflicts.items, + }); + + Ok(values::none::NoneType) + } } pub const AXL_MODULE_FILE: &str = "MODULE.aspect"; diff --git a/crates/axl-runtime/src/module/mod.rs b/crates/axl-runtime/src/module/mod.rs index 0dfc876b2..2c9298778 100644 --- a/crates/axl-runtime/src/module/mod.rs +++ b/crates/axl-runtime/src/module/mod.rs @@ -7,4 +7,4 @@ pub use eval::{ AXL_CONFIG_EXTENSION, AXL_MODULE_FILE, AXL_ROOT_MODULE_NAME, AXL_SCRIPT_EXTENSION, AXL_VERSION_EXTENSION, AxlModuleEvaluator, register_globals, }; -pub use store::{AxlArchiveDep, AxlLocalDep, Dep, ModuleStore}; +pub use store::{AxlArchiveDep, AxlLocalDep, Dep, ModuleStore, UseConfigEntry}; diff --git a/crates/axl-runtime/src/module/store.rs b/crates/axl-runtime/src/module/store.rs index 5b72d41ac..eeadd2501 100644 --- a/crates/axl-runtime/src/module/store.rs +++ b/crates/axl-runtime/src/module/store.rs @@ -14,6 +14,14 @@ use starlark::values::ProvidesStaticType; use starlark::values::StarlarkValue; use starlark::values::starlark_value; +#[derive(Clone, Debug)] +pub struct UseConfigEntry { + pub path: String, + pub function: String, + pub requires: Vec<(String, Option)>, + pub conflicts: Vec, +} + #[derive(Debug, ProvidesStaticType, Default)] pub struct ModuleStore { pub root_dir: PathBuf, @@ -21,6 +29,7 @@ pub struct ModuleStore { pub module_root: PathBuf, pub deps: Rc>>, pub tasks: Rc)>>>, + pub configs: Rc>>, } impl ModuleStore { @@ -31,6 +40,7 @@ impl ModuleStore { module_root, deps: Rc::new(RefCell::new(HashMap::new())), tasks: Rc::new(RefCell::new(HashMap::new())), + configs: Rc::new(RefCell::new(Vec::new())), } } @@ -46,6 +56,7 @@ impl ModuleStore { module_root: value.module_root.clone(), deps: Rc::clone(&value.deps), tasks: Rc::clone(&value.tasks), + configs: Rc::clone(&value.configs), }) } } @@ -63,6 +74,13 @@ impl Dep { Dep::Remote(remote) => &remote.name, } } + + pub fn use_config(&self) -> bool { + match self { + Dep::Local(local) => local.use_config, + Dep::Remote(remote) => remote.use_config, + } + } } #[derive(Clone, Debug, ProvidesStaticType, NoSerialize, Allocative, Display)] @@ -71,6 +89,7 @@ pub struct AxlLocalDep { pub name: String, pub path: PathBuf, pub auto_use_tasks: bool, + pub use_config: bool, } #[starlark_value(type = "AxlLocalDep")] @@ -88,6 +107,7 @@ pub struct AxlArchiveDep { pub name: String, pub strip_prefix: String, pub auto_use_tasks: bool, + pub use_config: bool, } #[starlark_value(type = "AxlArchiveDep")] diff --git a/crates/starbuf-derive/src/lib.rs b/crates/starbuf-derive/src/lib.rs index e0242b6b3..889a074f3 100644 --- a/crates/starbuf-derive/src/lib.rs +++ b/crates/starbuf-derive/src/lib.rs @@ -7,7 +7,8 @@ use darling::{FromField, FromMeta, FromVariant}; use proc_macro2::{Span, TokenStream}; use quote::{ToTokens, quote}; use syn::Item; -use syn::{Attribute, Field, parse_str, spanned::Spanned}; +use syn::parse::Parser; +use syn::{Attribute, Field, ItemStruct, Lit, Meta, parse_str, spanned::Spanned}; use syn::{ Data, DataEnum, DataStruct, DeriveInput, Expr, Fields, FieldsNamed, FieldsUnnamed, Ident, Type, Variant, @@ -99,11 +100,9 @@ fn try_types(input: TokenStream) -> Result { if let Item::Mod(smod) = subitem { let subident = &smod.ident; let subpath = quote! {#subpath::#subident}; + let sub_key = subpath.to_string(); let subgenerator_fn = Ident::new( - &format!( - "{}_toplevels", - subpath.to_string().replace("::", "_").replace(" ", "") - ), + &format!("{}_toplevels", sub_key.replace("::", "_").replace(" ", "")), Span::call_site(), ); let subidentstr = subident.to_string(); @@ -111,6 +110,8 @@ fn try_types(input: TokenStream) -> Result { .or_insert_with(|| (vec![], vec![])) .1 .push(quote! { globals.namespace(#subidentstr, #subgenerator_fn); }); + // Ensure the sub-module entry exists even if it has no processable items + defs.entry(sub_key).or_insert_with(|| (vec![], vec![])); } traverse(subpath, defs, &subitem) @@ -123,13 +124,25 @@ fn try_types(input: TokenStream) -> Result { .0 .push(quote! {}); } - Item::Struct(st) => { + Item::Struct(st) if st.generics.params.is_empty() => { let ident = &st.ident; let subpaths = prefix.to_string(); - defs.entry(subpaths).or_insert_with(|| (vec![], vec![])).0.push(quote! { - const #ident: ::starlark::values::starlark_value_as_type::StarlarkValueAsType<#prefix::#ident> = - starlark::values::starlark_value_as_type::StarlarkValueAsType::new(); - }); + defs.entry(subpaths.clone()) + .or_insert_with(|| (vec![], vec![])) + .0 + .push(quote! { + const #ident: ::starlark::values::starlark_value_as_type::StarlarkValueAsType<#prefix::#ident> = + starlark::values::starlark_value_as_type::StarlarkValueAsType::new(); + }); + let ident_snake = snake(ident.to_string()); + let constructor_fn = + Ident::new(&format!("{}_constructor", ident_snake), ident.span()); + defs.entry(subpaths) + .or_insert_with(|| (vec![], vec![])) + .1 + .push(quote! { + #prefix::#constructor_fn(globals); + }); } _ => {} }; @@ -175,6 +188,115 @@ fn try_types(input: TokenStream) -> Result { Ok(expanded) } +struct ServiceRpc { + name: Ident, + method: Ident, + request: Type, + response: Type, +} + +fn parse_service_attr(attr: TokenStream) -> Result<(Type, Vec), Error> { + let args: syn::punctuated::Punctuated = + syn::punctuated::Punctuated::parse_terminated.parse2(attr)?; + + let mut client: Option = None; + let mut methods: Vec = Vec::new(); + + for arg in args { + match arg { + Meta::NameValue(nv) if nv.path.is_ident("client") => { + let syn::Expr::Lit(expr_lit) = &nv.value else { + bail!("client must be a string literal type path"); + }; + let Lit::Str(lit) = &expr_lit.lit else { + bail!("client must be a string literal type path"); + }; + let ty: Type = parse_str(&lit.value())?; + client = Some(ty); + } + Meta::List(list) if list.path.is_ident("methods") => { + let mut name: Option = None; + let mut method: Option = None; + let mut request: Option = None; + let mut response: Option = None; + + let nested: syn::punctuated::Punctuated = + syn::punctuated::Punctuated::parse_terminated.parse2(list.tokens.clone())?; + + for nested_meta in nested.iter() { + match nested_meta { + Meta::NameValue(nv) if nv.path.is_ident("name") => { + let syn::Expr::Lit(expr_lit) = &nv.value else { + bail!("method name must be a string literal"); + }; + let Lit::Str(lit) = &expr_lit.lit else { + bail!("method name must be a string literal"); + }; + name = Some(Ident::new(&lit.value(), lit.span())); + } + Meta::NameValue(nv) if nv.path.is_ident("method") => { + let syn::Expr::Lit(expr_lit) = &nv.value else { + bail!("tonic method must be a string literal"); + }; + let Lit::Str(lit) = &expr_lit.lit else { + bail!("tonic method must be a string literal"); + }; + method = Some(Ident::new(&lit.value(), lit.span())); + } + Meta::NameValue(nv) if nv.path.is_ident("request") => { + let syn::Expr::Lit(expr_lit) = &nv.value else { + bail!("request must be a string literal type path"); + }; + let Lit::Str(lit) = &expr_lit.lit else { + bail!("request must be a string literal type path"); + }; + request = Some(parse_str(&lit.value())?); + } + Meta::NameValue(nv) if nv.path.is_ident("response") => { + let syn::Expr::Lit(expr_lit) = &nv.value else { + bail!("response must be a string literal type path"); + }; + let Lit::Str(lit) = &expr_lit.lit else { + bail!("response must be a string literal type path"); + }; + response = Some(parse_str(&lit.value())?); + } + _ => {} + } + } + let Some(name) = name else { + bail!("each methods(...) entry must include name"); + }; + let Some(method) = method else { + bail!("each methods(...) entry must include method"); + }; + let Some(request) = request else { + bail!("each methods(...) entry must include request"); + }; + let Some(response) = response else { + bail!("each methods(...) entry must include response"); + }; + + methods.push(ServiceRpc { + name, + method, + request, + response, + }); + } + _ => {} + } + } + + let client = client + .ok_or_else(|| anyhow::anyhow!("service attribute requires client = \"path::Type\""))?; + if methods.is_empty() { + bail!("service attribute requires at least one methods(...) entry"); + } + + Ok((client, methods)) +} + #[proc_macro_attribute] pub fn types( _attr: proc_macro::TokenStream, @@ -371,10 +493,422 @@ fn try_message(input: TokenStream) -> Result { } }); - let ident_snake = snake(ident.to_string()); + let ident_str = ident.to_string(); + let ident_snake = snake(ident_str.clone()); let methods_ident = Ident::new(&format!("{}_methods", &ident_snake), ident.span()); + let constructor_fn_ident = Ident::new(&format!("{}_constructor", &ident_snake), ident.span()); + + let constructor_arms: Vec = fields + .iter() + .filter_map(|(field, sattrs, attrs, _)| { + let has_deprecated = field.attrs.iter().any(|v| v.path().is_ident("deprecated")); + if sattrs.skip + || has_deprecated + || sattrs.any + || sattrs.duration + || sattrs.timestamp + || attrs.bytes.is_some() + || attrs.map.is_some() + || attrs.oneof.is_some() + { + return None; + } + + let fident = field.ident.as_ref()?; + let fident_str = fident.to_string(); + + let conversion = if attrs.optional && attrs.message { + let inner_ty = extract_inner_type(&field.ty, "core::option::Option")?; + let inner_str = inner_ty.to_token_stream().to_string().replace(' ', ""); + if matches!(inner_str.as_str(), "u32" | "i32" | "u64" | "i64" | "f32" | "f64" | "bool") { + // prost wraps proto3 optional scalars as message+optional — skip in constructor + return None; + } + quote! { + use ::starlark::values::ValueLike; + result.#fident = Some(value.downcast_ref_err::<#inner_ty>()?.clone()); + } + } else if attrs.optional && attrs.string { + quote! { + result.#fident = Some(value.unpack_str() + .ok_or_else(|| ::anyhow::anyhow!("field '{}' expects a string", #fident_str))? + .to_string()); + } + } else if attrs.optional && (attrs.int32 || attrs.uint32 || attrs.int64 || attrs.uint64) { + quote! { + result.#fident = Some(::unpack_value(value)? + .ok_or_else(|| ::anyhow::anyhow!("expected int"))? as _); + } + } else if attrs.optional && attrs.bool { + quote! { + result.#fident = Some(value.unpack_bool() + .ok_or_else(|| ::anyhow::anyhow!("field '{}' expects a bool", #fident_str))?); + } + } else if attrs.optional && attrs.enumeration.is_some() { + quote! { + result.#fident = Some(::unpack_value(value)? + .ok_or_else(|| ::anyhow::anyhow!("expected int"))? as i32); + } + } else if attrs.repeated && attrs.message { + let inner_ty = extract_inner_type(&field.ty, "prost::alloc::vec::Vec")?; + quote! { + use ::starlark::values::ValueLike; + let list = ::starlark::values::list::ListRef::from_value(value) + .ok_or_else(|| ::anyhow::anyhow!("field '{}' expects a list", #fident_str))?; + for item in list.iter() { + result.#fident.push(item.downcast_ref_err::<#inner_ty>()?.clone()); + } + } + } else if attrs.repeated && attrs.string { + quote! { + let list = ::starlark::values::list::ListRef::from_value(value) + .ok_or_else(|| ::anyhow::anyhow!("field '{}' expects a list", #fident_str))?; + for item in list.iter() { + result.#fident.push(item.unpack_str() + .ok_or_else(|| ::anyhow::anyhow!("list items for '{}' must be strings", #fident_str))? + .to_string()); + } + } + } else if attrs.repeated && (attrs.int32 || attrs.uint32 || attrs.int64 || attrs.uint64) { + quote! { + let list = ::starlark::values::list::ListRef::from_value(value) + .ok_or_else(|| ::anyhow::anyhow!("field '{}' expects a list", #fident_str))?; + for item in list.iter() { + result.#fident.push(::unpack_value(item)? + .ok_or_else(|| ::anyhow::anyhow!("expected int"))? as _); + } + } + } else if attrs.repeated && attrs.enumeration.is_some() { + quote! { + let list = ::starlark::values::list::ListRef::from_value(value) + .ok_or_else(|| ::anyhow::anyhow!("field '{}' expects a list", #fident_str))?; + for item in list.iter() { + result.#fident.push(::unpack_value(item)? + .ok_or_else(|| ::anyhow::anyhow!("expected int"))? as i32); + } + } + } else if attrs.string { + quote! { + result.#fident = value.unpack_str() + .ok_or_else(|| ::anyhow::anyhow!("field '{}' expects a string", #fident_str))? + .to_string(); + } + } else if attrs.bool { + quote! { + result.#fident = value.unpack_bool() + .ok_or_else(|| ::anyhow::anyhow!("field '{}' expects a bool", #fident_str))?; + } + } else if attrs.int32 || attrs.uint32 { + quote! { + result.#fident = ::unpack_value(value)? + .ok_or_else(|| ::anyhow::anyhow!("expected int"))? as _; + } + } else if attrs.int64 || attrs.uint64 { + quote! { + result.#fident = ::unpack_value(value)? + .ok_or_else(|| ::anyhow::anyhow!("expected int"))? as _; + } + } else if attrs.enumeration.is_some() { + quote! { + result.#fident = ::unpack_value(value)? + .ok_or_else(|| ::anyhow::anyhow!("expected int"))? as i32; + } + } else if attrs.message { + let ty = &field.ty; + quote! { + use ::starlark::values::ValueLike; + result.#fident = value.downcast_ref_err::<#ty>()?.clone(); + } + } else { + return None; + }; + + Some(quote! { + #fident_str => { #conversion }, + }) + }) + .collect(); + + let repr_fields: Vec = fields + .iter() + .filter_map(|(field, sattrs, attrs, _)| { + let has_deprecated = field.attrs.iter().any(|v| v.path().is_ident("deprecated")); + if sattrs.skip + || has_deprecated + || sattrs.any + || sattrs.duration + || sattrs.timestamp + || attrs.bytes.is_some() + { + return None; + } + + let field_ident = field.ident.as_ref()?; + let display_name = if let Some(ref rename) = sattrs.rename { + rename.clone() + } else { + field_ident.to_string() + }; + + let value_fmt = if attrs.oneof.is_some() { + quote! { + match &self.#field_ident { + Some(v) => { write!(f, "{:?}", v)?; }, + None => f.write_str("None")?, + } + } + } else if attrs.map.is_some() { + quote! { + f.write_str("{")?; + let mut __map_first = true; + for (k, v) in &self.#field_ident { + if !__map_first { f.write_str(", ")?; } + __map_first = false; + write!(f, "{:?}: {:?}", k, v)?; + } + f.write_str("}")?; + } + } else if attrs.repeated { + let item_fmt = if attrs.string { + quote! { write!(f, "\"{}\"", item)?; } + } else if attrs.bool { + quote! { f.write_str(if *item { "True" } else { "False" })?; } + } else if attrs.message { + quote! { write!(f, "{}", item)?; } + } else { + quote! { write!(f, "{}", item)?; } + }; + quote! { + f.write_str("[")?; + for (i, item) in self.#field_ident.iter().enumerate() { + if i > 0 { f.write_str(", ")?; } + #item_fmt + } + f.write_str("]")?; + } + } else if attrs.optional { + let some_fmt = if attrs.string { + quote! { write!(f, "\"{}\"", v)?; } + } else if attrs.bool { + quote! { f.write_str(if *v { "True" } else { "False" })?; } + } else if attrs.message { + quote! { write!(f, "{}", v)?; } + } else { + quote! { write!(f, "{}", v)?; } + }; + quote! { + match &self.#field_ident { + Some(v) => { #some_fmt }, + None => f.write_str("None")?, + } + } + } else if attrs.string { + quote! { write!(f, "\"{}\"", &self.#field_ident)?; } + } else if attrs.bool { + quote! { f.write_str(if self.#field_ident { "True" } else { "False" })?; } + } else if attrs.int32 || attrs.uint32 || attrs.int64 || attrs.uint64 { + quote! { write!(f, "{}", self.#field_ident)?; } + } else if attrs.enumeration.is_some() { + quote! { write!(f, "{}", self.#field_ident)?; } + } else if attrs.message { + quote! { write!(f, "{}", self.#field_ident)?; } + } else { + return None; + }; + + Some(quote! { + if !__repr_first { f.write_str(", ")?; } + __repr_first = false; + f.write_str(#display_name)?; + f.write_str("=")?; + #value_fmt + }) + }) + .collect(); + + let display_body = if repr_fields.is_empty() { + quote! { + f.write_str(#ident_str)?; + f.write_str("()")?; + Ok(()) + } + } else { + quote! { + f.write_str(#ident_str)?; + f.write_str("(")?; + let mut __repr_first = true; + #(#repr_fields)* + f.write_str(")")?; + Ok(()) + } + }; + + let repr_fields_pretty: Vec = fields + .iter() + .filter_map(|(field, sattrs, attrs, _)| { + let has_deprecated = field.attrs.iter().any(|v| v.path().is_ident("deprecated")); + if sattrs.skip + || has_deprecated + || sattrs.any + || sattrs.duration + || sattrs.timestamp + || attrs.bytes.is_some() + { + return None; + } + + let field_ident = field.ident.as_ref()?; + let display_name = if let Some(ref rename) = sattrs.rename { + rename.clone() + } else { + field_ident.to_string() + }; + + let value_fmt = if attrs.oneof.is_some() { + quote! { + match &self.#field_ident { + Some(v) => { write!(__col, "{:?}", v).unwrap(); }, + None => __col.push_str("None"), + } + } + } else if attrs.map.is_some() { + quote! { + if self.#field_ident.is_empty() { + __col.push_str("{}"); + } else { + __col.push_str("{"); + let mut __map_first = true; + for (k, v) in &self.#field_ident { + if !__map_first { __col.push_str(","); } + __map_first = false; + __col.push_str("\n"); + for _ in 0..(__inner + 2) { __col.push(' '); } + write!(__col, "{:?}: {:?}", k, v).unwrap(); + } + __col.push_str("\n"); + for _ in 0..__inner { __col.push(' '); } + __col.push_str("}"); + } + } + } else if attrs.repeated && attrs.message { + quote! { + if self.#field_ident.is_empty() { + __col.push_str("[]"); + } else { + __col.push_str("["); + for (__i, __item) in self.#field_ident.iter().enumerate() { + if __i > 0 { __col.push_str(","); } + __col.push_str("\n"); + for _ in 0..(__inner + 2) { __col.push(' '); } + __item.__starbuf_pretty(__col, __inner + 2); + } + __col.push_str("\n"); + for _ in 0..__inner { __col.push(' '); } + __col.push_str("]"); + } + } + } else if attrs.repeated { + let item_fmt = if attrs.string { + quote! { write!(__col, "\"{}\"", __item).unwrap(); } + } else if attrs.bool { + quote! { __col.push_str(if *__item { "True" } else { "False" }); } + } else { + quote! { write!(__col, "{}", __item).unwrap(); } + }; + quote! { + __col.push_str("["); + for (__i, __item) in self.#field_ident.iter().enumerate() { + if __i > 0 { __col.push_str(", "); } + #item_fmt + } + __col.push_str("]"); + } + } else if attrs.optional { + let is_real_message = attrs.message && { + let is_scalar = + extract_inner_type(&field.ty, "core::option::Option").map_or(false, |ty| { + let s = ty.to_token_stream().to_string().replace(' ', ""); + matches!( + s.as_str(), + "u32" | "i32" | "u64" | "i64" | "f32" | "f64" | "bool" + ) + }); + !is_scalar + }; + let some_fmt = if is_real_message { + quote! { v.__starbuf_pretty(__col, __inner); } + } else if attrs.string { + quote! { write!(__col, "\"{}\"", v).unwrap(); } + } else if attrs.bool { + quote! { __col.push_str(if *v { "True" } else { "False" }); } + } else { + quote! { write!(__col, "{}", v).unwrap(); } + }; + quote! { + match &self.#field_ident { + Some(v) => { #some_fmt }, + None => __col.push_str("None"), + } + } + } else if attrs.string { + quote! { write!(__col, "\"{}\"", &self.#field_ident).unwrap(); } + } else if attrs.bool { + quote! { __col.push_str(if self.#field_ident { "True" } else { "False" }); } + } else if attrs.int32 || attrs.uint32 || attrs.int64 || attrs.uint64 { + quote! { write!(__col, "{}", self.#field_ident).unwrap(); } + } else if attrs.enumeration.is_some() { + quote! { write!(__col, "{}", self.#field_ident).unwrap(); } + } else if attrs.message { + quote! { self.#field_ident.__starbuf_pretty(__col, __inner); } + } else { + return None; + }; + + Some(quote! { + if !__repr_first { __col.push_str(","); } + __repr_first = false; + __col.push_str("\n"); + for _ in 0..__inner { __col.push(' '); } + __col.push_str(#display_name); + __col.push_str("="); + #value_fmt + }) + }) + .collect(); + + let pretty_body = if repr_fields_pretty.is_empty() { + quote! { + __col.push_str(#ident_str); + __col.push_str("()"); + } + } else { + quote! { + use ::std::fmt::Write; + let __inner = __indent + 2; + __col.push_str(#ident_str); + __col.push_str("("); + let mut __repr_first = true; + #(#repr_fields_pretty)* + __col.push_str("\n"); + for _ in 0..__indent { __col.push(' '); } + __col.push_str(")"); + } + }; let expanded = quote! { + impl #ident { + #[doc(hidden)] + pub fn __starbuf_pretty(&self, __col: &mut String, __indent: usize) { + #pretty_body + } + } + + impl ::std::fmt::Display for #ident { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + #display_body + } + } + impl<'v> ::starlark::values::AllocValue<'v> for #ident { fn alloc_value(self, heap: &'v ::starlark::values::Heap) -> ::starlark::values::Value<'v> { heap.alloc_simple(self) @@ -388,13 +922,35 @@ fn try_message(input: TokenStream) -> Result { ::starlark::environment::MethodsStatic::new(); RES.methods(#methods_ident) } - } + fn collect_repr(&self, collector: &mut String) { + self.__starbuf_pretty(collector, 0); + } + } #[::starlark::starlark_module] pub(crate) fn #methods_ident(registry: &mut ::starlark::environment::MethodsBuilder) { #(#starlark_attributes)* } + + #[::starlark::starlark_module] + pub fn #constructor_fn_ident(globals: &mut ::starlark::environment::GlobalsBuilder) { + fn #ident<'v>( + #[starlark(kwargs)] kwargs: ::starlark::collections::SmallMap<&str, ::starlark::values::Value<'v>>, + ) -> ::starlark::Result<#ident> { + let mut result = #ident::default(); + for (key, val) in kwargs.iter() { + let value = *val; + match *key { + #(#constructor_arms)* + other => return Err(::anyhow::anyhow!( + "unknown field '{}' for {}", other, stringify!(#ident) + ).into()), + } + } + Ok(result) + } + } }; Ok(expanded) @@ -491,6 +1047,28 @@ pub fn oneof(input: proc_macro::TokenStream) -> proc_macro::TokenStream { try_oneof(input.into()).unwrap().into() } +fn extract_inner_type(ty: &Type, expected_path: &str) -> Option { + if let Type::Path(p) = ty { + let tys = p + .path + .segments + .iter() + .map(|i| i.ident.to_string()) + .collect::>() + .join("::"); + if tys == expected_path { + if let Some(last_seg) = p.path.segments.last() { + if let syn::PathArguments::AngleBracketed(args) = &last_seg.arguments { + if let Some(syn::GenericArgument::Type(inner_ty)) = args.args.first() { + return Some(inner_ty.clone()); + } + } + } + } + } + None +} + fn snake(s: String) -> String { let mut result = String::new(); let mut chars = s.chars().peekable(); @@ -573,6 +1151,190 @@ pub fn enumeration(input: proc_macro::TokenStream) -> proc_macro::TokenStream { try_enumeration(input.into()).unwrap().into() } +fn try_service(attr: TokenStream, item: TokenStream) -> Result { + let (client_ty, methods) = parse_service_attr(attr)?; + + let input: ItemStruct = syn::parse2(item)?; + let ident = &input.ident; + + let handle_ident = Ident::new(&format!("{}ClientHandle", ident), ident.span()); + let ident_snake = snake(ident.to_string()); + let methods_ident = Ident::new(&format!("{}_client_methods", ident_snake), ident.span()); + let module_ident = Ident::new(&format!("{}_service", ident_snake), ident.span()); + let starlark_type = format!("{}_client", ident_snake); + + let rpc_methods = methods.iter().map(|rpc| { + let rpc_name = &rpc.name; + let rpc_method = &rpc.method; + let req = &rpc.request; + let resp = &rpc.response; + + quote! { + fn #rpc_name<'v>( + this: ::starlark::values::Value<'v>, + req: ::starlark::values::Value<'v>, + ) -> ::starlark::Result<#resp> { + use ::starlark::values::ValueLike; + let handle = this.downcast_ref_err::<#handle_ident>()?; + let req = req.downcast_ref_err::<#req>()?.clone(); + + let client = handle.client.get() + .ok_or_else(|| ::starlark::Error::from(::anyhow::anyhow!( + "service not connected; call .connect(ctx) first")))? + .clone(); + let rt = handle.rt.get() + .ok_or_else(|| ::starlark::Error::from(::anyhow::anyhow!( + "service not connected; call .connect(ctx) first")))? + .clone(); + + let headers = handle.headers.clone(); + + let resp = rt.block_on(async move { + let mut c = client.as_ref().clone(); + let mut request = ::tonic::Request::new(req); + for (key, value) in &headers { + request.metadata_mut().insert( + key.parse::<::tonic::metadata::MetadataKey<::tonic::metadata::Ascii>>() + .map_err(|e| ::anyhow::anyhow!("invalid header key '{}': {}", key, e))?, + value.parse::<::tonic::metadata::MetadataValue<::tonic::metadata::Ascii>>() + .map_err(|e| ::anyhow::anyhow!("invalid header value: {}", e))?, + ); + } + let resp = c + .#rpc_method(request) + .await + .map_err(::anyhow::Error::new)? + .into_inner(); + Ok::<#resp, ::anyhow::Error>(resp) + }) + .map_err(|e| ::starlark::Error::from(::anyhow::anyhow!(e)))?; + + Ok(resp) + } + } + }); + + let expanded = quote! { + #[derive(Debug, ::allocative::Allocative, ::starlark::values::NoSerialize, ::starlark::values::ProvidesStaticType)] + pub struct #handle_ident { + uri: String, + headers: Vec<(String, String)>, + timeout: ::std::time::Duration, + #[allocative(skip)] + client: ::std::sync::OnceLock<::std::sync::Arc<#client_ty<::tonic::transport::Channel>>>, + #[allocative(skip)] + rt: ::std::sync::OnceLock<::tokio::runtime::Handle>, + } + + unsafe impl<'v> ::starlark::values::Trace<'v> for #handle_ident { + fn trace(&mut self, _tracer: &::starlark::values::Tracer<'v>) {} + } + + impl ::std::fmt::Display for #handle_ident { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + write!(f, stringify!(#handle_ident)) + } + } + + impl<'v> ::starlark::values::AllocValue<'v> for #handle_ident { + fn alloc_value(self, heap: &'v ::starlark::values::Heap) -> ::starlark::values::Value<'v> { + heap.alloc_simple(self) + } + } + + #[::starlark::values::starlark_value(type = #starlark_type)] + impl<'v> ::starlark::values::StarlarkValue<'v> for #handle_ident { + fn get_methods() -> ::core::option::Option<&'static ::starlark::environment::Methods> { + static RES: ::starlark::environment::MethodsStatic = ::starlark::environment::MethodsStatic::new(); + RES.methods(#methods_ident) + } + } + + #[::starlark::starlark_module] + pub(crate) fn #methods_ident(registry: &mut ::starlark::environment::MethodsBuilder) { + fn connect<'v>( + this: ::starlark::values::Value<'v>, + ctx: ::starlark::values::Value<'v>, + ) -> ::starlark::Result<::starlark::values::none::NoneType> { + use ::starlark::values::ValueLike; + let handle = this.downcast_ref_err::<#handle_ident>()?; + + // Normalize grpcs:// to https:// + let uri = if handle.uri.starts_with("grpcs://") { + format!("https://{}", &handle.uri["grpcs://".len()..]) + } else { + handle.uri.clone() + }; + + let rt = ::tokio::runtime::Handle::current(); + + let ep = ::tonic::transport::Endpoint::from_shared(uri.clone()) + .map_err(|e| ::anyhow::anyhow!("invalid URI: {}", e))? + .connect_timeout(::std::time::Duration::from_secs(5)) + .timeout(handle.timeout); + + let ep = if uri.starts_with("https://") { + ep.tls_config(::tonic::transport::ClientTlsConfig::new().with_native_roots()) + .map_err(|e| ::anyhow::anyhow!("TLS config error: {}", e))? + } else { + ep + }; + + let channel = ep.connect_lazy(); + let client = #client_ty::new(channel); + + handle.client.set(::std::sync::Arc::new(client)) + .map_err(|_| ::anyhow::anyhow!("service already connected"))?; + handle.rt.set(rt) + .map_err(|_| ::anyhow::anyhow!("service already connected"))?; + + Ok(::starlark::values::none::NoneType) + } + + #(#rpc_methods)* + } + + #[::starlark::starlark_module] + pub fn #module_ident(globals: &mut ::starlark::environment::GlobalsBuilder) { + fn #ident<'v>( + #[starlark(require = named)] uri: String, + #[starlark(require = named)] headers: ::starlark::values::Value<'v>, + #[starlark(require = named, default = 10000)] timeout: u64, + ) -> ::starlark::Result<#handle_ident> { + let mut h = Vec::new(); + if let Some(dict) = ::starlark::values::dict::DictRef::from_value(headers) { + for (k, v) in dict.iter() { + let key = k.unpack_str() + .ok_or_else(|| ::anyhow::anyhow!("header key must be a string"))?; + let val = v.unpack_str() + .ok_or_else(|| ::anyhow::anyhow!("header value must be a string"))?; + h.push((key.to_string(), val.to_string())); + } + } + Ok(#handle_ident { + uri, + headers: h, + timeout: ::std::time::Duration::from_millis(timeout), + client: ::std::sync::OnceLock::new(), + rt: ::std::sync::OnceLock::new(), + }) + } + } + + #input + }; + + Ok(expanded) +} + +#[proc_macro_attribute] +pub fn service( + attr: proc_macro::TokenStream, + item: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + try_service(attr.into(), item.into()).unwrap().into() +} + #[cfg(test)] mod tests { use super::*; diff --git a/docs/lib.md b/docs/lib.md index 1fa18d059..c08d0667a 100644 --- a/docs/lib.md +++ b/docs/lib.md @@ -50,6 +50,8 @@ `module` [json](/lib/json) +`module` [remote](/lib/remote) + `module` [std](/lib/std) `module` [typing](/lib/typing) @@ -110,6 +112,30 @@ any([0, 0]) == False any([0, False]) == False ``` +`function` **attr** + +
def attr(
+    typ: typing.Any,
+    default: typing.Any = ...,
+    /
+) -> field
+ +Creates a field definition with a type and optional default value. + +Mutable defaults (lists, dicts) are deep-copied when a spec instance is +created, so each instance gets its own independent copy. + +Example: + +```starlark +MySpec = spec(host=str, port=attr(int, 80)) +r = MySpec(host="localhost") # port defaults to 80 + +# Mutable defaults are copied per instance: +attr(list[str], []) # each instance gets a fresh [] +attr(dict[str, int], {}) # each instance gets a fresh {} +``` + `function` **breakpoint**
def breakpoint() -> None
@@ -627,6 +653,36 @@ sorted(["two", "three", "four"], key=len) == ["two", "four", "thr sorted(["two", "three", "four"], key=len, reverse=True) == ["three", "four", "two"] # longest to shortest ``` +`function` **spec** + +
def spec(
+    **kwargs: typing.Any
+) -> spec
+ +Creates a spec type with the given fields. + +Each field can be a bare type (required, no default) or an `attr()` +definition (with type and optional default). + +Mutable defaults (lists, dicts) are deep-copied per instance, so each +instance gets its own independent copy. No `default_factory` needed. + +Example: + +```starlark +MySpec = spec(host=str, port=int) +r = MySpec(host="localhost", port=80) +print(r.host) # "localhost" +print(r.port) # 80 + +# Mutable defaults are safe: +ListSpec = spec(items=attr(list[str], [])) +a = ListSpec() +b = ListSpec() +a.items.append("x") +print(b.items) # [] — each instance has its own list +``` + `function` **task**
def task(
@@ -653,7 +709,7 @@ build = task(
     task_args = {
         "target": args.string(),
     },
-    config = None  # Optional user-defined config (e.g., a record); defaults to None if not provided
+    config = lambda: MyConfig(key = "value")  # Optional lambda that returns config; evaluated at task creation
 )
 ```
 
diff --git a/docs/lib/future.md b/docs/lib/future.md
index deb2c6424..d5584c011 100644
--- a/docs/lib/future.md
+++ b/docs/lib/future.md
@@ -3,3 +3,19 @@
 `function` **Future.block**
 
 
def Future.block() -> typing.Any
+ +`function` **Future.map\_err** + +
def Future.map_err(callable: typing.Any) -> Future
+ +`function` **Future.map\_ok** + +
def Future.map_ok(callable: typing.Any) -> Future
+ +`function` **Future.map\_ok\_or\_else** + +
def Future.map_ok_or_else(
+    *,
+    map_ok: typing.Any,
+    map_err: typing.Any
+) -> Future
diff --git a/docs/lib/http.md b/docs/lib/http.md index f1aa34c3d..0f18798d8 100644 --- a/docs/lib/http.md +++ b/docs/lib/http.md @@ -1,5 +1,14 @@ +`function` **Http.delete** + +
def Http.delete(
+    *,
+    url: str,
+    headers: dict[str, str] = ...,
+    unix_socket: None | str = None
+) -> Future
+ `function` **Http.download**
def Http.download(
@@ -22,7 +31,8 @@ The checksum is verified in a streaming fashion during download.
 
def Http.get(
     *,
     url: str,
-    headers: dict[str, str] = ...
+    headers: dict[str, str] = ...,
+    unix_socket: None | str = None
 ) -> Future
`function` **Http.post** @@ -31,5 +41,6 @@ The checksum is verified in a streaming fashion during download. url
: str, *, headers: dict[str, str] = ..., - data: str + data: str, + unix_socket: None | str = None ) -> Future
diff --git a/docs/lib/remote.md b/docs/lib/remote.md new file mode 100644 index 000000000..89c2255bf --- /dev/null +++ b/docs/lib/remote.md @@ -0,0 +1,3 @@ + + +`module` [execution](/lib/remote/execution) diff --git a/docs/lib/remote/execution.md b/docs/lib/remote/execution.md new file mode 100644 index 000000000..4c81b718f --- /dev/null +++ b/docs/lib/remote/execution.md @@ -0,0 +1,5 @@ + + +`function` **ActionCache** + +
def ActionCache(addr: str, timeout_ms: int = ...) -> action_cache_client
diff --git a/examples/large_bes/MODULE.bazel.lock b/examples/large_bes/MODULE.bazel.lock new file mode 100644 index 000000000..891271ffc --- /dev/null +++ b/examples/large_bes/MODULE.bazel.lock @@ -0,0 +1,191 @@ +{ + "lockFileVersion": 26, + "registryFileHashes": { + "https://bcr.bazel.build/bazel_registry.json": "8a28e4aff06ee60aed2a8c281907fb8bcbf3b753c91fb5a5c57da3215d5b3497", + "https://bcr.bazel.build/modules/abseil-cpp/20210324.2/MODULE.bazel": "7cd0312e064fde87c8d1cd79ba06c876bd23630c83466e9500321be55c96ace2", + "https://bcr.bazel.build/modules/abseil-cpp/20211102.0/MODULE.bazel": "70390338f7a5106231d20620712f7cccb659cd0e9d073d1991c038eb9fc57589", + "https://bcr.bazel.build/modules/abseil-cpp/20230125.1/MODULE.bazel": "89047429cb0207707b2dface14ba7f8df85273d484c2572755be4bab7ce9c3a0", + "https://bcr.bazel.build/modules/abseil-cpp/20230802.0.bcr.1/MODULE.bazel": "1c8cec495288dccd14fdae6e3f95f772c1c91857047a098fad772034264cc8cb", + "https://bcr.bazel.build/modules/abseil-cpp/20230802.0/MODULE.bazel": "d253ae36a8bd9ee3c5955384096ccb6baf16a1b1e93e858370da0a3b94f77c16", + "https://bcr.bazel.build/modules/abseil-cpp/20230802.1/MODULE.bazel": "fa92e2eb41a04df73cdabeec37107316f7e5272650f81d6cc096418fe647b915", + "https://bcr.bazel.build/modules/abseil-cpp/20240116.1/MODULE.bazel": "37bcdb4440fbb61df6a1c296ae01b327f19e9bb521f9b8e26ec854b6f97309ed", + "https://bcr.bazel.build/modules/abseil-cpp/20240116.2/MODULE.bazel": "73939767a4686cd9a520d16af5ab440071ed75cec1a876bf2fcfaf1f71987a16", + "https://bcr.bazel.build/modules/abseil-cpp/20250127.1/MODULE.bazel": "c4a89e7ceb9bf1e25cf84a9f830ff6b817b72874088bf5141b314726e46a57c1", + "https://bcr.bazel.build/modules/abseil-cpp/20250512.1/MODULE.bazel": "d209fdb6f36ffaf61c509fcc81b19e81b411a999a934a032e10cd009a0226215", + "https://bcr.bazel.build/modules/abseil-cpp/20250814.1/MODULE.bazel": "51f2312901470cdab0dbdf3b88c40cd21c62a7ed58a3de45b365ddc5b11bcab2", + "https://bcr.bazel.build/modules/abseil-cpp/20250814.1/source.json": "cea3901d7e299da7320700abbaafe57a65d039f10d0d7ea601c4a66938ea4b0c", + "https://bcr.bazel.build/modules/apple_support/1.11.1/MODULE.bazel": "1843d7cd8a58369a444fc6000e7304425fba600ff641592161d9f15b179fb896", + "https://bcr.bazel.build/modules/apple_support/1.15.1/MODULE.bazel": "a0556fefca0b1bb2de8567b8827518f94db6a6e7e7d632b4c48dc5f865bc7c85", + "https://bcr.bazel.build/modules/apple_support/1.21.0/MODULE.bazel": "ac1824ed5edf17dee2fdd4927ada30c9f8c3b520be1b5fd02a5da15bc10bff3e", + "https://bcr.bazel.build/modules/apple_support/1.21.1/MODULE.bazel": "5809fa3efab15d1f3c3c635af6974044bac8a4919c62238cce06acee8a8c11f1", + "https://bcr.bazel.build/modules/apple_support/1.24.2/MODULE.bazel": "0e62471818affb9f0b26f128831d5c40b074d32e6dda5a0d3852847215a41ca4", + "https://bcr.bazel.build/modules/apple_support/1.24.2/source.json": "2c22c9827093250406c5568da6c54e6fdf0ef06238def3d99c71b12feb057a8d", + "https://bcr.bazel.build/modules/bazel_features/1.1.1/MODULE.bazel": "27b8c79ef57efe08efccbd9dd6ef70d61b4798320b8d3c134fd571f78963dbcd", + "https://bcr.bazel.build/modules/bazel_features/1.10.0/MODULE.bazel": "f75e8807570484a99be90abcd52b5e1f390362c258bcb73106f4544957a48101", + "https://bcr.bazel.build/modules/bazel_features/1.11.0/MODULE.bazel": "f9382337dd5a474c3b7d334c2f83e50b6eaedc284253334cf823044a26de03e8", + "https://bcr.bazel.build/modules/bazel_features/1.15.0/MODULE.bazel": "d38ff6e517149dc509406aca0db3ad1efdd890a85e049585b7234d04238e2a4d", + "https://bcr.bazel.build/modules/bazel_features/1.17.0/MODULE.bazel": "039de32d21b816b47bd42c778e0454217e9c9caac4a3cf8e15c7231ee3ddee4d", + "https://bcr.bazel.build/modules/bazel_features/1.18.0/MODULE.bazel": "1be0ae2557ab3a72a57aeb31b29be347bcdc5d2b1eb1e70f39e3851a7e97041a", + "https://bcr.bazel.build/modules/bazel_features/1.19.0/MODULE.bazel": "59adcdf28230d220f0067b1f435b8537dd033bfff8db21335ef9217919c7fb58", + "https://bcr.bazel.build/modules/bazel_features/1.21.0/MODULE.bazel": "675642261665d8eea09989aa3b8afb5c37627f1be178382c320d1b46afba5e3b", + "https://bcr.bazel.build/modules/bazel_features/1.23.0/MODULE.bazel": "fd1ac84bc4e97a5a0816b7fd7d4d4f6d837b0047cf4cbd81652d616af3a6591a", + "https://bcr.bazel.build/modules/bazel_features/1.27.0/MODULE.bazel": "621eeee06c4458a9121d1f104efb80f39d34deff4984e778359c60eaf1a8cb65", + "https://bcr.bazel.build/modules/bazel_features/1.28.0/MODULE.bazel": "4b4200e6cbf8fa335b2c3f43e1d6ef3e240319c33d43d60cc0fbd4b87ece299d", + "https://bcr.bazel.build/modules/bazel_features/1.3.0/MODULE.bazel": "cdcafe83ec318cda34e02948e81d790aab8df7a929cec6f6969f13a489ccecd9", + "https://bcr.bazel.build/modules/bazel_features/1.30.0/MODULE.bazel": "a14b62d05969a293b80257e72e597c2da7f717e1e69fa8b339703ed6731bec87", + "https://bcr.bazel.build/modules/bazel_features/1.33.0/MODULE.bazel": "8b8dc9d2a4c88609409c3191165bccec0e4cb044cd7a72ccbe826583303459f6", + "https://bcr.bazel.build/modules/bazel_features/1.33.0/source.json": "13617db3930328c2cd2807a0f13d52ca870ac05f96db9668655113265147b2a6", + "https://bcr.bazel.build/modules/bazel_features/1.4.1/MODULE.bazel": "e45b6bb2350aff3e442ae1111c555e27eac1d915e77775f6fdc4b351b758b5d7", + "https://bcr.bazel.build/modules/bazel_features/1.9.1/MODULE.bazel": "8f679097876a9b609ad1f60249c49d68bfab783dd9be012faf9d82547b14815a", + "https://bcr.bazel.build/modules/bazel_skylib/1.0.3/MODULE.bazel": "bcb0fd896384802d1ad283b4e4eb4d718eebd8cb820b0a2c3a347fb971afd9d8", + "https://bcr.bazel.build/modules/bazel_skylib/1.1.1/MODULE.bazel": "1add3e7d93ff2e6998f9e118022c84d163917d912f5afafb3058e3d2f1545b5e", + "https://bcr.bazel.build/modules/bazel_skylib/1.2.0/MODULE.bazel": "44fe84260e454ed94ad326352a698422dbe372b21a1ac9f3eab76eb531223686", + "https://bcr.bazel.build/modules/bazel_skylib/1.2.1/MODULE.bazel": "f35baf9da0efe45fa3da1696ae906eea3d615ad41e2e3def4aeb4e8bc0ef9a7a", + "https://bcr.bazel.build/modules/bazel_skylib/1.3.0/MODULE.bazel": "20228b92868bf5cfc41bda7afc8a8ba2a543201851de39d990ec957b513579c5", + "https://bcr.bazel.build/modules/bazel_skylib/1.4.1/MODULE.bazel": "a0dcb779424be33100dcae821e9e27e4f2901d9dfd5333efe5ac6a8d7ab75e1d", + "https://bcr.bazel.build/modules/bazel_skylib/1.4.2/MODULE.bazel": "3bd40978e7a1fac911d5989e6b09d8f64921865a45822d8b09e815eaa726a651", + "https://bcr.bazel.build/modules/bazel_skylib/1.5.0/MODULE.bazel": "32880f5e2945ce6a03d1fbd588e9198c0a959bb42297b2cfaf1685b7bc32e138", + "https://bcr.bazel.build/modules/bazel_skylib/1.6.1/MODULE.bazel": "8fdee2dbaace6c252131c00e1de4b165dc65af02ea278476187765e1a617b917", + "https://bcr.bazel.build/modules/bazel_skylib/1.7.0/MODULE.bazel": "0db596f4563de7938de764cc8deeabec291f55e8ec15299718b93c4423e9796d", + "https://bcr.bazel.build/modules/bazel_skylib/1.7.1/MODULE.bazel": "3120d80c5861aa616222ec015332e5f8d3171e062e3e804a2a0253e1be26e59b", + "https://bcr.bazel.build/modules/bazel_skylib/1.8.1/MODULE.bazel": "88ade7293becda963e0e3ea33e7d54d3425127e0a326e0d17da085a5f1f03ff6", + "https://bcr.bazel.build/modules/bazel_skylib/1.8.2/MODULE.bazel": "69ad6927098316848b34a9142bcc975e018ba27f08c4ff403f50c1b6e646ca67", + "https://bcr.bazel.build/modules/bazel_skylib/1.8.2/source.json": "34a3c8bcf233b835eb74be9d628899bb32999d3e0eadef1947a0a562a2b16ffb", + "https://bcr.bazel.build/modules/buildozer/8.2.1/MODULE.bazel": "61e9433c574c2bd9519cad7fa66b9c1d2b8e8d5f3ae5d6528a2c2d26e68d874d", + "https://bcr.bazel.build/modules/buildozer/8.2.1/source.json": "7c33f6a26ee0216f85544b4bca5e9044579e0219b6898dd653f5fb449cf2e484", + "https://bcr.bazel.build/modules/google_benchmark/1.8.2/MODULE.bazel": "a70cf1bba851000ba93b58ae2f6d76490a9feb74192e57ab8e8ff13c34ec50cb", + "https://bcr.bazel.build/modules/googletest/1.11.0/MODULE.bazel": "3a83f095183f66345ca86aa13c58b59f9f94a2f81999c093d4eeaa2d262d12f4", + "https://bcr.bazel.build/modules/googletest/1.14.0.bcr.1/MODULE.bazel": "22c31a561553727960057361aa33bf20fb2e98584bc4fec007906e27053f80c6", + "https://bcr.bazel.build/modules/googletest/1.14.0/MODULE.bazel": "cfbcbf3e6eac06ef9d85900f64424708cc08687d1b527f0ef65aa7517af8118f", + "https://bcr.bazel.build/modules/googletest/1.15.2/MODULE.bazel": "6de1edc1d26cafb0ea1a6ab3f4d4192d91a312fd2d360b63adaa213cd00b2108", + "https://bcr.bazel.build/modules/googletest/1.17.0/MODULE.bazel": "dbec758171594a705933a29fcf69293d2468c49ec1f2ebca65c36f504d72df46", + "https://bcr.bazel.build/modules/googletest/1.17.0/source.json": "38e4454b25fc30f15439c0378e57909ab1fd0a443158aa35aec685da727cd713", + "https://bcr.bazel.build/modules/jsoncpp/1.9.5/MODULE.bazel": "31271aedc59e815656f5736f282bb7509a97c7ecb43e927ac1a37966e0578075", + "https://bcr.bazel.build/modules/jsoncpp/1.9.6/MODULE.bazel": "2f8d20d3b7d54143213c4dfc3d98225c42de7d666011528dc8fe91591e2e17b0", + "https://bcr.bazel.build/modules/jsoncpp/1.9.6/source.json": "a04756d367a2126c3541682864ecec52f92cdee80a35735a3cb249ce015ca000", + "https://bcr.bazel.build/modules/libpfm/4.11.0/MODULE.bazel": "45061ff025b301940f1e30d2c16bea596c25b176c8b6b3087e92615adbd52902", + "https://bcr.bazel.build/modules/nlohmann_json/3.6.1/MODULE.bazel": "6f7b417dcc794d9add9e556673ad25cb3ba835224290f4f848f8e2db1e1fca74", + "https://bcr.bazel.build/modules/nlohmann_json/3.6.1/source.json": "f448c6e8963fdfa7eb831457df83ad63d3d6355018f6574fb017e8169deb43a9", + "https://bcr.bazel.build/modules/platforms/0.0.10/MODULE.bazel": "8cb8efaf200bdeb2150d93e162c40f388529a25852b332cec879373771e48ed5", + "https://bcr.bazel.build/modules/platforms/0.0.11/MODULE.bazel": "0daefc49732e227caa8bfa834d65dc52e8cc18a2faf80df25e8caea151a9413f", + "https://bcr.bazel.build/modules/platforms/0.0.4/MODULE.bazel": "9b328e31ee156f53f3c416a64f8491f7eb731742655a47c9eec4703a71644aee", + "https://bcr.bazel.build/modules/platforms/0.0.5/MODULE.bazel": "5733b54ea419d5eaf7997054bb55f6a1d0b5ff8aedf0176fef9eea44f3acda37", + "https://bcr.bazel.build/modules/platforms/0.0.6/MODULE.bazel": "ad6eeef431dc52aefd2d77ed20a4b353f8ebf0f4ecdd26a807d2da5aa8cd0615", + "https://bcr.bazel.build/modules/platforms/0.0.7/MODULE.bazel": "72fd4a0ede9ee5c021f6a8dd92b503e089f46c227ba2813ff183b71616034814", + "https://bcr.bazel.build/modules/platforms/0.0.8/MODULE.bazel": "9f142c03e348f6d263719f5074b21ef3adf0b139ee4c5133e2aa35664da9eb2d", + "https://bcr.bazel.build/modules/platforms/0.0.9/MODULE.bazel": "4a87a60c927b56ddd67db50c89acaa62f4ce2a1d2149ccb63ffd871d5ce29ebc", + "https://bcr.bazel.build/modules/platforms/1.0.0/MODULE.bazel": "f05feb42b48f1b3c225e4ccf351f367be0371411a803198ec34a389fb22aa580", + "https://bcr.bazel.build/modules/platforms/1.0.0/source.json": "f4ff1fd412e0246fd38c82328eb209130ead81d62dcd5a9e40910f867f733d96", + "https://bcr.bazel.build/modules/protobuf/21.7/MODULE.bazel": "a5a29bb89544f9b97edce05642fac225a808b5b7be74038ea3640fae2f8e66a7", + "https://bcr.bazel.build/modules/protobuf/27.0/MODULE.bazel": "7873b60be88844a0a1d8f80b9d5d20cfbd8495a689b8763e76c6372998d3f64c", + "https://bcr.bazel.build/modules/protobuf/29.0-rc2/MODULE.bazel": "6241d35983510143049943fc0d57937937122baf1b287862f9dc8590fc4c37df", + "https://bcr.bazel.build/modules/protobuf/29.0-rc3/MODULE.bazel": "33c2dfa286578573afc55a7acaea3cada4122b9631007c594bf0729f41c8de92", + "https://bcr.bazel.build/modules/protobuf/29.1/MODULE.bazel": "557c3457560ff49e122ed76c0bc3397a64af9574691cb8201b4e46d4ab2ecb95", + "https://bcr.bazel.build/modules/protobuf/3.19.0/MODULE.bazel": "6b5fbb433f760a99a22b18b6850ed5784ef0e9928a72668b66e4d7ccd47db9b0", + "https://bcr.bazel.build/modules/protobuf/32.1/MODULE.bazel": "89cd2866a9cb07fee9ff74c41ceace11554f32e0d849de4e23ac55515cfada4d", + "https://bcr.bazel.build/modules/protobuf/33.4/MODULE.bazel": "114775b816b38b6d0ca620450d6b02550c60ceedfdc8d9a229833b34a223dc42", + "https://bcr.bazel.build/modules/protobuf/33.4/source.json": "555f8686b4c7d6b5ba731fbea13bf656b4bfd9a7ff629c1d9d3f6e1d6155de79", + "https://bcr.bazel.build/modules/pybind11_bazel/2.11.1/MODULE.bazel": "88af1c246226d87e65be78ed49ecd1e6f5e98648558c14ce99176da041dc378e", + "https://bcr.bazel.build/modules/pybind11_bazel/2.12.0/MODULE.bazel": "e6f4c20442eaa7c90d7190d8dc539d0ab422f95c65a57cc59562170c58ae3d34", + "https://bcr.bazel.build/modules/pybind11_bazel/2.12.0/source.json": "6900fdc8a9e95866b8c0d4ad4aba4d4236317b5c1cd04c502df3f0d33afed680", + "https://bcr.bazel.build/modules/re2/2023-09-01/MODULE.bazel": "cb3d511531b16cfc78a225a9e2136007a48cf8a677e4264baeab57fe78a80206", + "https://bcr.bazel.build/modules/re2/2024-07-02.bcr.1/MODULE.bazel": "b4963dda9b31080be1905ef085ecd7dd6cd47c05c79b9cdf83ade83ab2ab271a", + "https://bcr.bazel.build/modules/re2/2024-07-02.bcr.1/source.json": "2ff292be6ef3340325ce8a045ecc326e92cbfab47c7cbab4bd85d28971b97ac4", + "https://bcr.bazel.build/modules/re2/2024-07-02/MODULE.bazel": "0eadc4395959969297cbcf31a249ff457f2f1d456228c67719480205aa306daa", + "https://bcr.bazel.build/modules/rules_android/0.1.1/MODULE.bazel": "48809ab0091b07ad0182defb787c4c5328bd3a278938415c00a7b69b50c4d3a8", + "https://bcr.bazel.build/modules/rules_android/0.1.1/source.json": "e6986b41626ee10bdc864937ffb6d6bf275bb5b9c65120e6137d56e6331f089e", + "https://bcr.bazel.build/modules/rules_apple/3.16.0/MODULE.bazel": "0d1caf0b8375942ce98ea944be754a18874041e4e0459401d925577624d3a54a", + "https://bcr.bazel.build/modules/rules_apple/4.1.0/MODULE.bazel": "76e10fd4a48038d3fc7c5dc6e63b7063bbf5304a2e3bd42edda6ec660eebea68", + "https://bcr.bazel.build/modules/rules_apple/4.1.0/source.json": "8ee81e1708756f81b343a5eb2b2f0b953f1d25c4ab3d4a68dc02754872e80715", + "https://bcr.bazel.build/modules/rules_cc/0.0.1/MODULE.bazel": "cb2aa0747f84c6c3a78dad4e2049c154f08ab9d166b1273835a8174940365647", + "https://bcr.bazel.build/modules/rules_cc/0.0.10/MODULE.bazel": "ec1705118f7eaedd6e118508d3d26deba2a4e76476ada7e0e3965211be012002", + "https://bcr.bazel.build/modules/rules_cc/0.0.13/MODULE.bazel": "0e8529ed7b323dad0775ff924d2ae5af7640b23553dfcd4d34344c7e7a867191", + "https://bcr.bazel.build/modules/rules_cc/0.0.15/MODULE.bazel": "6704c35f7b4a72502ee81f61bf88706b54f06b3cbe5558ac17e2e14666cd5dcc", + "https://bcr.bazel.build/modules/rules_cc/0.0.16/MODULE.bazel": "7661303b8fc1b4d7f532e54e9d6565771fea666fbdf839e0a86affcd02defe87", + "https://bcr.bazel.build/modules/rules_cc/0.0.17/MODULE.bazel": "2ae1d8f4238ec67d7185d8861cb0a2cdf4bc608697c331b95bf990e69b62e64a", + "https://bcr.bazel.build/modules/rules_cc/0.0.2/MODULE.bazel": "6915987c90970493ab97393024c156ea8fb9f3bea953b2f3ec05c34f19b5695c", + "https://bcr.bazel.build/modules/rules_cc/0.0.6/MODULE.bazel": "abf360251023dfe3efcef65ab9d56beefa8394d4176dd29529750e1c57eaa33f", + "https://bcr.bazel.build/modules/rules_cc/0.0.8/MODULE.bazel": "964c85c82cfeb6f3855e6a07054fdb159aced38e99a5eecf7bce9d53990afa3e", + "https://bcr.bazel.build/modules/rules_cc/0.0.9/MODULE.bazel": "836e76439f354b89afe6a911a7adf59a6b2518fafb174483ad78a2a2fde7b1c5", + "https://bcr.bazel.build/modules/rules_cc/0.1.1/MODULE.bazel": "2f0222a6f229f0bf44cd711dc13c858dad98c62d52bd51d8fc3a764a83125513", + "https://bcr.bazel.build/modules/rules_cc/0.1.2/MODULE.bazel": "557ddc3a96858ec0d465a87c0a931054d7dcfd6583af2c7ed3baf494407fd8d0", + "https://bcr.bazel.build/modules/rules_cc/0.1.5/MODULE.bazel": "88dfc9361e8b5ae1008ac38f7cdfd45ad738e4fa676a3ad67d19204f045a1fd8", + "https://bcr.bazel.build/modules/rules_cc/0.2.0/MODULE.bazel": "b5c17f90458caae90d2ccd114c81970062946f49f355610ed89bebf954f5783c", + "https://bcr.bazel.build/modules/rules_cc/0.2.13/MODULE.bazel": "eecdd666eda6be16a8d9dc15e44b5c75133405e820f620a234acc4b1fdc5aa37", + "https://bcr.bazel.build/modules/rules_cc/0.2.14/MODULE.bazel": "353c99ed148887ee89c54a17d4100ae7e7e436593d104b668476019023b58df8", + "https://bcr.bazel.build/modules/rules_cc/0.2.14/source.json": "55d0a4587c5592fad350f6e698530f4faf0e7dd15e69d43f8d87e220c78bea54", + "https://bcr.bazel.build/modules/rules_cc/0.2.8/MODULE.bazel": "f1df20f0bf22c28192a794f29b501ee2018fa37a3862a1a2132ae2940a23a642", + "https://bcr.bazel.build/modules/rules_foreign_cc/0.9.0/MODULE.bazel": "c9e8c682bf75b0e7c704166d79b599f93b72cfca5ad7477df596947891feeef6", + "https://bcr.bazel.build/modules/rules_fuzzing/0.5.2/MODULE.bazel": "40c97d1144356f52905566c55811f13b299453a14ac7769dfba2ac38192337a8", + "https://bcr.bazel.build/modules/rules_java/4.0.0/MODULE.bazel": "5a78a7ae82cd1a33cef56dc578c7d2a46ed0dca12643ee45edbb8417899e6f74", + "https://bcr.bazel.build/modules/rules_java/5.3.5/MODULE.bazel": "a4ec4f2db570171e3e5eb753276ee4b389bae16b96207e9d3230895c99644b86", + "https://bcr.bazel.build/modules/rules_java/6.5.2/MODULE.bazel": "1d440d262d0e08453fa0c4d8f699ba81609ed0e9a9a0f02cd10b3e7942e61e31", + "https://bcr.bazel.build/modules/rules_java/7.10.0/MODULE.bazel": "530c3beb3067e870561739f1144329a21c851ff771cd752a49e06e3dc9c2e71a", + "https://bcr.bazel.build/modules/rules_java/7.12.2/MODULE.bazel": "579c505165ee757a4280ef83cda0150eea193eed3bef50b1004ba88b99da6de6", + "https://bcr.bazel.build/modules/rules_java/7.2.0/MODULE.bazel": "06c0334c9be61e6cef2c8c84a7800cef502063269a5af25ceb100b192453d4ab", + "https://bcr.bazel.build/modules/rules_java/7.6.1/MODULE.bazel": "2f14b7e8a1aa2f67ae92bc69d1ec0fa8d9f827c4e17ff5e5f02e91caa3b2d0fe", + "https://bcr.bazel.build/modules/rules_java/8.3.2/MODULE.bazel": "7336d5511ad5af0b8615fdc7477535a2e4e723a357b6713af439fe8cf0195017", + "https://bcr.bazel.build/modules/rules_java/8.5.1/MODULE.bazel": "d8a9e38cc5228881f7055a6079f6f7821a073df3744d441978e7a43e20226939", + "https://bcr.bazel.build/modules/rules_java/8.6.1/MODULE.bazel": "f4808e2ab5b0197f094cabce9f4b006a27766beb6a9975931da07099560ca9c2", + "https://bcr.bazel.build/modules/rules_java/9.0.3/MODULE.bazel": "1f98ed015f7e744a745e0df6e898a7c5e83562d6b759dfd475c76456dda5ccea", + "https://bcr.bazel.build/modules/rules_java/9.0.3/source.json": "b038c0c07e12e658135bbc32cc1a2ded6e33785105c9d41958014c592de4593e", + "https://bcr.bazel.build/modules/rules_jvm_external/4.4.2/MODULE.bazel": "a56b85e418c83eb1839819f0b515c431010160383306d13ec21959ac412d2fe7", + "https://bcr.bazel.build/modules/rules_jvm_external/5.1/MODULE.bazel": "33f6f999e03183f7d088c9be518a63467dfd0be94a11d0055fe2d210f89aa909", + "https://bcr.bazel.build/modules/rules_jvm_external/5.2/MODULE.bazel": "d9351ba35217ad0de03816ef3ed63f89d411349353077348a45348b096615036", + "https://bcr.bazel.build/modules/rules_jvm_external/6.3/MODULE.bazel": "c998e060b85f71e00de5ec552019347c8bca255062c990ac02d051bb80a38df0", + "https://bcr.bazel.build/modules/rules_jvm_external/6.7/MODULE.bazel": "e717beabc4d091ecb2c803c2d341b88590e9116b8bf7947915eeb33aab4f96dd", + "https://bcr.bazel.build/modules/rules_jvm_external/6.7/source.json": "5426f412d0a7fc6b611643376c7e4a82dec991491b9ce5cb1cfdd25fe2e92be4", + "https://bcr.bazel.build/modules/rules_kotlin/1.9.6/MODULE.bazel": "d269a01a18ee74d0335450b10f62c9ed81f2321d7958a2934e44272fe82dcef3", + "https://bcr.bazel.build/modules/rules_kotlin/1.9.6/source.json": "2faa4794364282db7c06600b7e5e34867a564ae91bda7cae7c29c64e9466b7d5", + "https://bcr.bazel.build/modules/rules_license/0.0.3/MODULE.bazel": "627e9ab0247f7d1e05736b59dbb1b6871373de5ad31c3011880b4133cafd4bd0", + "https://bcr.bazel.build/modules/rules_license/0.0.7/MODULE.bazel": "088fbeb0b6a419005b89cf93fe62d9517c0a2b8bb56af3244af65ecfe37e7d5d", + "https://bcr.bazel.build/modules/rules_license/1.0.0/MODULE.bazel": "a7fda60eefdf3d8c827262ba499957e4df06f659330bbe6cdbdb975b768bb65c", + "https://bcr.bazel.build/modules/rules_license/1.0.0/source.json": "a52c89e54cc311196e478f8382df91c15f7a2bfdf4c6cd0e2675cc2ff0b56efb", + "https://bcr.bazel.build/modules/rules_pkg/0.7.0/MODULE.bazel": "df99f03fc7934a4737122518bb87e667e62d780b610910f0447665a7e2be62dc", + "https://bcr.bazel.build/modules/rules_pkg/1.0.1/MODULE.bazel": "5b1df97dbc29623bccdf2b0dcd0f5cb08e2f2c9050aab1092fd39a41e82686ff", + "https://bcr.bazel.build/modules/rules_pkg/1.0.1/source.json": "bd82e5d7b9ce2d31e380dd9f50c111d678c3bdaca190cb76b0e1c71b05e1ba8a", + "https://bcr.bazel.build/modules/rules_proto/4.0.0/MODULE.bazel": "a7a7b6ce9bee418c1a760b3d84f83a299ad6952f9903c67f19e4edd964894e06", + "https://bcr.bazel.build/modules/rules_proto/5.3.0-21.7/MODULE.bazel": "e8dff86b0971688790ae75528fe1813f71809b5afd57facb44dad9e8eca631b7", + "https://bcr.bazel.build/modules/rules_proto/6.0.0-rc1/MODULE.bazel": "1e5b502e2e1a9e825eef74476a5a1ee524a92297085015a052510b09a1a09483", + "https://bcr.bazel.build/modules/rules_proto/6.0.2/MODULE.bazel": "ce916b775a62b90b61888052a416ccdda405212b6aaeb39522f7dc53431a5e73", + "https://bcr.bazel.build/modules/rules_proto/7.1.0/MODULE.bazel": "002d62d9108f75bb807cd56245d45648f38275cb3a99dcd45dfb864c5d74cb96", + "https://bcr.bazel.build/modules/rules_proto/7.1.0/source.json": "39f89066c12c24097854e8f57ab8558929f9c8d474d34b2c00ac04630ad8940e", + "https://bcr.bazel.build/modules/rules_python/0.10.2/MODULE.bazel": "cc82bc96f2997baa545ab3ce73f196d040ffb8756fd2d66125a530031cd90e5f", + "https://bcr.bazel.build/modules/rules_python/0.23.1/MODULE.bazel": "49ffccf0511cb8414de28321f5fcf2a31312b47c40cc21577144b7447f2bf300", + "https://bcr.bazel.build/modules/rules_python/0.25.0/MODULE.bazel": "72f1506841c920a1afec76975b35312410eea3aa7b63267436bfb1dd91d2d382", + "https://bcr.bazel.build/modules/rules_python/0.28.0/MODULE.bazel": "cba2573d870babc976664a912539b320cbaa7114cd3e8f053c720171cde331ed", + "https://bcr.bazel.build/modules/rules_python/0.31.0/MODULE.bazel": "93a43dc47ee570e6ec9f5779b2e64c1476a6ce921c48cc9a1678a91dd5f8fd58", + "https://bcr.bazel.build/modules/rules_python/0.33.2/MODULE.bazel": "3e036c4ad8d804a4dad897d333d8dce200d943df4827cb849840055be8d2e937", + "https://bcr.bazel.build/modules/rules_python/0.4.0/MODULE.bazel": "9208ee05fd48bf09ac60ed269791cf17fb343db56c8226a720fbb1cdf467166c", + "https://bcr.bazel.build/modules/rules_python/1.3.0/MODULE.bazel": "8361d57eafb67c09b75bf4bbe6be360e1b8f4f18118ab48037f2bd50aa2ccb13", + "https://bcr.bazel.build/modules/rules_python/1.4.1/MODULE.bazel": "8991ad45bdc25018301d6b7e1d3626afc3c8af8aaf4bc04f23d0b99c938b73a6", + "https://bcr.bazel.build/modules/rules_python/1.6.0/MODULE.bazel": "7e04ad8f8d5bea40451cf80b1bd8262552aa73f841415d20db96b7241bd027d8", + "https://bcr.bazel.build/modules/rules_python/1.7.0/MODULE.bazel": "d01f995ecd137abf30238ad9ce97f8fc3ac57289c8b24bd0bf53324d937a14f8", + "https://bcr.bazel.build/modules/rules_python/1.7.0/source.json": "028a084b65dcf8f4dc4f82f8778dbe65df133f234b316828a82e060d81bdce32", + "https://bcr.bazel.build/modules/rules_shell/0.2.0/MODULE.bazel": "fda8a652ab3c7d8fee214de05e7a9916d8b28082234e8d2c0094505c5268ed3c", + "https://bcr.bazel.build/modules/rules_shell/0.3.0/MODULE.bazel": "de4402cd12f4cc8fda2354fce179fdb068c0b9ca1ec2d2b17b3e21b24c1a937b", + "https://bcr.bazel.build/modules/rules_shell/0.6.1/MODULE.bazel": "72e76b0eea4e81611ef5452aa82b3da34caca0c8b7b5c0c9584338aa93bae26b", + "https://bcr.bazel.build/modules/rules_shell/0.6.1/source.json": "20ec05cd5e592055e214b2da8ccb283c7f2a421ea0dc2acbf1aa792e11c03d0c", + "https://bcr.bazel.build/modules/rules_swift/1.16.0/MODULE.bazel": "4a09f199545a60d09895e8281362b1ff3bb08bbde69c6fc87aff5b92fcc916ca", + "https://bcr.bazel.build/modules/rules_swift/2.1.1/MODULE.bazel": "494900a80f944fc7aa61500c2073d9729dff0b764f0e89b824eb746959bc1046", + "https://bcr.bazel.build/modules/rules_swift/2.4.0/MODULE.bazel": "1639617eb1ede28d774d967a738b4a68b0accb40650beadb57c21846beab5efd", + "https://bcr.bazel.build/modules/rules_swift/3.1.2/MODULE.bazel": "72c8f5cf9d26427cee6c76c8e3853eb46ce6b0412a081b2b6db6e8ad56267400", + "https://bcr.bazel.build/modules/rules_swift/3.1.2/source.json": "e85761f3098a6faf40b8187695e3de6d97944e98abd0d8ce579cb2daf6319a66", + "https://bcr.bazel.build/modules/stardoc/0.5.1/MODULE.bazel": "1a05d92974d0c122f5ccf09291442580317cdd859f07a8655f1db9a60374f9f8", + "https://bcr.bazel.build/modules/stardoc/0.5.3/MODULE.bazel": "c7f6948dae6999bf0db32c1858ae345f112cacf98f174c7a8bb707e41b974f1c", + "https://bcr.bazel.build/modules/stardoc/0.7.0/MODULE.bazel": "05e3d6d30c099b6770e97da986c53bd31844d7f13d41412480ea265ac9e8079c", + "https://bcr.bazel.build/modules/stardoc/0.7.2/MODULE.bazel": "fc152419aa2ea0f51c29583fab1e8c99ddefd5b3778421845606ee628629e0e5", + "https://bcr.bazel.build/modules/stardoc/0.7.2/source.json": "58b029e5e901d6802967754adf0a9056747e8176f017cfe3607c0851f4d42216", + "https://bcr.bazel.build/modules/swift_argument_parser/1.3.1.1/MODULE.bazel": "5e463fbfba7b1701d957555ed45097d7f984211330106ccd1352c6e0af0dcf91", + "https://bcr.bazel.build/modules/swift_argument_parser/1.3.1.2/MODULE.bazel": "75aab2373a4bbe2a1260b9bf2a1ebbdbf872d3bd36f80bff058dccd82e89422f", + "https://bcr.bazel.build/modules/swift_argument_parser/1.3.1.2/source.json": "5fba48bbe0ba48761f9e9f75f92876cafb5d07c0ce059cc7a8027416de94a05b", + "https://bcr.bazel.build/modules/upb/0.0.0-20220923-a547704/MODULE.bazel": "7298990c00040a0e2f121f6c32544bab27d4452f80d9ce51349b1a28f3005c43", + "https://bcr.bazel.build/modules/zlib/1.2.11/MODULE.bazel": "07b389abc85fdbca459b69e2ec656ae5622873af3f845e1c9d80fe179f3effa0", + "https://bcr.bazel.build/modules/zlib/1.3.1.bcr.5/MODULE.bazel": "eec517b5bbe5492629466e11dae908d043364302283de25581e3eb944326c4ca", + "https://bcr.bazel.build/modules/zlib/1.3.1.bcr.5/source.json": "22bc55c47af97246cfc093d0acf683a7869377de362b5d1c552c2c2e16b7a806", + "https://bcr.bazel.build/modules/zlib/1.3.1/MODULE.bazel": "751c9940dcfe869f5f7274e1295422a34623555916eb98c174c1e945594bf198" + }, + "selectedYankedVersions": {}, + "moduleExtensions": {}, + "facts": {} +}