From dca4a3088a3dd1d9d07a31bea00137f9b5f8d4af Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Sun, 22 Mar 2026 23:00:45 +0200 Subject: [PATCH 01/20] chore(husky): prefer local node_modules bins and optional pre-commit on push --- .husky/commit-msg | 18 +++++++++++++++ .husky/pre-commit | 26 +++++++++++++++++++++ .husky/pre-push | 59 +++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 103 insertions(+) create mode 100644 .husky/commit-msg create mode 100644 .husky/pre-commit create mode 100644 .husky/pre-push diff --git a/.husky/commit-msg b/.husky/commit-msg new file mode 100644 index 00000000..8cb681e6 --- /dev/null +++ b/.husky/commit-msg @@ -0,0 +1,18 @@ +#!/bin/sh + +REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" +cd "$REPO_ROOT" || exit 1 + +if [ -z "${NVM_DIR:-}" ]; then + NVM_DIR="$HOME/.nvm" +fi +if [ -s "$NVM_DIR/nvm.sh" ]; then + # shellcheck source=/dev/null + . "$NVM_DIR/nvm.sh" +fi + +if [ -x ./node_modules/.bin/commitlint ]; then + ./node_modules/.bin/commitlint --edit "$1" +else + npx --no -- commitlint --edit "$1" +fi diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 00000000..ab547f90 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,26 @@ +#!/bin/sh +set -e + +REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" +cd "$REPO_ROOT" || exit 1 + +if [ -z "${NVM_DIR:-}" ]; then + NVM_DIR="$HOME/.nvm" +fi +if [ -s "$NVM_DIR/nvm.sh" ]; then + # shellcheck source=/dev/null + . "$NVM_DIR/nvm.sh" +fi + +if [ -x ./node_modules/.bin/lint-staged ]; then + ./node_modules/.bin/lint-staged +else + npx --no -- lint-staged +fi + +cd nemoclaw +if [ -x ./node_modules/.bin/vitest ]; then + ./node_modules/.bin/vitest run +else + npx vitest run +fi diff --git a/.husky/pre-push b/.husky/pre-push new file mode 100644 index 00000000..ebfcefa9 --- /dev/null +++ b/.husky/pre-push @@ -0,0 +1,59 @@ +#!/bin/sh +# Type checking and shared hooks before push — avoids a wasted CI round-trip. +set -e + +REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" +cd "$REPO_ROOT" || exit 1 + +# GUI Git / minimal PATH often lacks nvm/npm — load nvm when present so npx works as fallback. +if [ -z "${NVM_DIR:-}" ]; then + NVM_DIR="$HOME/.nvm" +fi +if [ -s "$NVM_DIR/nvm.sh" ]; then + # shellcheck source=/dev/null + . "$NVM_DIR/nvm.sh" +fi + +echo "pre-push: checking TypeScript types..." +cd nemoclaw +if [ -x ./node_modules/.bin/tsc ]; then + ./node_modules/.bin/tsc --noEmit +elif command -v npx >/dev/null 2>&1; then + npx tsc --noEmit +else + echo "pre-push: tsc not found — run: cd nemoclaw && npm install" >&2 + exit 1 +fi +cd "$REPO_ROOT" + +# Pyright (Python) — skip if uv is not installed +if command -v uv >/dev/null 2>&1; then + echo "pre-push: checking Python types..." + (cd nemoclaw-blueprint && uv run --with pyright pyright .) || exit 1 +fi + +# pre-commit: same hooks as .pre-commit-config.yaml — optional if not installed locally. +# Falls back to prek if present (same config file). +if command -v pre-commit >/dev/null 2>&1; then + echo "pre-push: running pre-commit on outgoing commits..." + if git rev-parse @{u} >/dev/null 2>&1; then + FROM=$(git merge-base HEAD @{u}) + pre-commit run --from-ref "$FROM" --to-ref HEAD + elif git rev-parse HEAD~1 >/dev/null 2>&1; then + pre-commit run --from-ref HEAD~1 --to-ref HEAD + else + pre-commit run --all-files + fi +elif command -v prek >/dev/null 2>&1; then + echo "pre-push: running prek on outgoing commits..." + if git rev-parse @{u} >/dev/null 2>&1; then + FROM=$(git merge-base HEAD @{u}) + prek run --from-ref "$FROM" --to-ref HEAD + elif git rev-parse HEAD~1 >/dev/null 2>&1; then + prek run --from-ref HEAD~1 --to-ref HEAD + else + prek run --all-files + fi +else + echo "pre-push: pre-commit not in PATH — skipping shared hooks (pip install pre-commit && pre-commit install). Optional: install prek for the same config without Python." >&2 +fi From 2c0e9e1af77f1babba8b68c18cf6e179549e92ea Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Sun, 22 Mar 2026 23:02:51 +0200 Subject: [PATCH 02/20] fix(husky): ensure Node is on PATH for git hook environments --- .husky/commit-msg | 10 ++-------- .husky/pre-commit | 10 ++-------- .husky/pre-push | 11 ++--------- scripts/husky-env.sh | 20 ++++++++++++++++++++ 4 files changed, 26 insertions(+), 25 deletions(-) create mode 100644 scripts/husky-env.sh diff --git a/.husky/commit-msg b/.husky/commit-msg index 8cb681e6..d1fb2c0c 100644 --- a/.husky/commit-msg +++ b/.husky/commit-msg @@ -2,14 +2,8 @@ REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" cd "$REPO_ROOT" || exit 1 - -if [ -z "${NVM_DIR:-}" ]; then - NVM_DIR="$HOME/.nvm" -fi -if [ -s "$NVM_DIR/nvm.sh" ]; then - # shellcheck source=/dev/null - . "$NVM_DIR/nvm.sh" -fi +# shellcheck source=scripts/husky-env.sh +. "$REPO_ROOT/scripts/husky-env.sh" if [ -x ./node_modules/.bin/commitlint ]; then ./node_modules/.bin/commitlint --edit "$1" diff --git a/.husky/pre-commit b/.husky/pre-commit index ab547f90..e8e5cf5b 100644 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -3,14 +3,8 @@ set -e REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" cd "$REPO_ROOT" || exit 1 - -if [ -z "${NVM_DIR:-}" ]; then - NVM_DIR="$HOME/.nvm" -fi -if [ -s "$NVM_DIR/nvm.sh" ]; then - # shellcheck source=/dev/null - . "$NVM_DIR/nvm.sh" -fi +# shellcheck source=scripts/husky-env.sh +. "$REPO_ROOT/scripts/husky-env.sh" if [ -x ./node_modules/.bin/lint-staged ]; then ./node_modules/.bin/lint-staged diff --git a/.husky/pre-push b/.husky/pre-push index ebfcefa9..2d1ff29f 100644 --- a/.husky/pre-push +++ b/.husky/pre-push @@ -4,15 +4,8 @@ set -e REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" cd "$REPO_ROOT" || exit 1 - -# GUI Git / minimal PATH often lacks nvm/npm — load nvm when present so npx works as fallback. -if [ -z "${NVM_DIR:-}" ]; then - NVM_DIR="$HOME/.nvm" -fi -if [ -s "$NVM_DIR/nvm.sh" ]; then - # shellcheck source=/dev/null - . "$NVM_DIR/nvm.sh" -fi +# shellcheck source=scripts/husky-env.sh +. "$REPO_ROOT/scripts/husky-env.sh" echo "pre-push: checking TypeScript types..." cd nemoclaw diff --git a/scripts/husky-env.sh b/scripts/husky-env.sh new file mode 100644 index 00000000..27c001f7 --- /dev/null +++ b/scripts/husky-env.sh @@ -0,0 +1,20 @@ +# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Sourced by .husky/* hooks. Git GUI / non-login shells often omit PATH entries where Node lives, +# which breaks bin stubs that use `#!/usr/bin/env node` (tsc, vitest, lint-staged, etc.). + +PATH="/opt/homebrew/bin:/usr/local/bin:/home/linuxbrew/.linuxbrew/bin:$PATH" +export PATH + +if [ -z "${NVM_DIR:-}" ]; then + NVM_DIR="$HOME/.nvm" +fi +if [ -s "$NVM_DIR/nvm.sh" ]; then + # shellcheck source=/dev/null + . "$NVM_DIR/nvm.sh" +fi + +if command -v fnm >/dev/null 2>&1; then + eval "$(fnm env)" || true +fi From af72b667f404cd0d47a7e0ae3bf3449758647ea0 Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Sun, 22 Mar 2026 23:06:44 +0200 Subject: [PATCH 03/20] chore(husky): update hook scripts to executable permissions --- .husky/commit-msg | 4 +++- .husky/pre-commit | 4 +++- .husky/pre-push | 36 +++++++++++++++++++----------------- 3 files changed, 25 insertions(+), 19 deletions(-) mode change 100644 => 100755 .husky/commit-msg mode change 100644 => 100755 .husky/pre-commit mode change 100644 => 100755 .husky/pre-push diff --git a/.husky/commit-msg b/.husky/commit-msg old mode 100644 new mode 100755 index d1fb2c0c..d967e943 --- a/.husky/commit-msg +++ b/.husky/commit-msg @@ -1,8 +1,10 @@ #!/bin/sh +# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" cd "$REPO_ROOT" || exit 1 -# shellcheck source=scripts/husky-env.sh +# shellcheck disable=SC1091 . "$REPO_ROOT/scripts/husky-env.sh" if [ -x ./node_modules/.bin/commitlint ]; then diff --git a/.husky/pre-commit b/.husky/pre-commit old mode 100644 new mode 100755 index e8e5cf5b..314a03a0 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -1,9 +1,11 @@ #!/bin/sh +# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 set -e REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" cd "$REPO_ROOT" || exit 1 -# shellcheck source=scripts/husky-env.sh +# shellcheck disable=SC1091 . "$REPO_ROOT/scripts/husky-env.sh" if [ -x ./node_modules/.bin/lint-staged ]; then diff --git a/.husky/pre-push b/.husky/pre-push old mode 100644 new mode 100755 index 2d1ff29f..161d17a0 --- a/.husky/pre-push +++ b/.husky/pre-push @@ -1,10 +1,13 @@ #!/bin/sh +# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# # Type checking and shared hooks before push — avoids a wasted CI round-trip. set -e REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" cd "$REPO_ROOT" || exit 1 -# shellcheck source=scripts/husky-env.sh +# shellcheck disable=SC1091 . "$REPO_ROOT/scripts/husky-env.sh" echo "pre-push: checking TypeScript types..." @@ -25,28 +28,27 @@ if command -v uv >/dev/null 2>&1; then (cd nemoclaw-blueprint && uv run --with pyright pyright .) || exit 1 fi -# pre-commit: same hooks as .pre-commit-config.yaml — optional if not installed locally. -# Falls back to prek if present (same config file). -if command -v pre-commit >/dev/null 2>&1; then - echo "pre-push: running pre-commit on outgoing commits..." - if git rev-parse @{u} >/dev/null 2>&1; then - FROM=$(git merge-base HEAD @{u}) - pre-commit run --from-ref "$FROM" --to-ref HEAD - elif git rev-parse HEAD~1 >/dev/null 2>&1; then - pre-commit run --from-ref HEAD~1 --to-ref HEAD - else - pre-commit run --all-files - fi -elif command -v prek >/dev/null 2>&1; then +# prek or pre-commit: same hooks as .pre-commit-config.yaml — optional if not installed locally. +if command -v prek >/dev/null 2>&1; then echo "pre-push: running prek on outgoing commits..." - if git rev-parse @{u} >/dev/null 2>&1; then - FROM=$(git merge-base HEAD @{u}) + if git rev-parse '@{u}' >/dev/null 2>&1; then + FROM=$(git merge-base HEAD '@{u}') prek run --from-ref "$FROM" --to-ref HEAD elif git rev-parse HEAD~1 >/dev/null 2>&1; then prek run --from-ref HEAD~1 --to-ref HEAD else prek run --all-files fi +elif command -v pre-commit >/dev/null 2>&1; then + echo "pre-push: running pre-commit on outgoing commits..." + if git rev-parse '@{u}' >/dev/null 2>&1; then + FROM=$(git merge-base HEAD '@{u}') + pre-commit run --from-ref "$FROM" --to-ref HEAD + elif git rev-parse HEAD~1 >/dev/null 2>&1; then + pre-commit run --from-ref HEAD~1 --to-ref HEAD + else + pre-commit run --all-files + fi else - echo "pre-push: pre-commit not in PATH — skipping shared hooks (pip install pre-commit && pre-commit install). Optional: install prek for the same config without Python." >&2 + echo "pre-push: prek/pre-commit not in PATH — skipping shared hooks (install prek or: pip install pre-commit)." >&2 fi From 7219160ac4b779ba22a28ef271a5b86be9a53346 Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 09:06:18 +0200 Subject: [PATCH 04/20] fix(husky): run pyright with dev extras for pytest types uv run --with pyright omitted optional dev deps; pytest was unresolved in orchestrator/test_endpoint_validation.py. Align pre-push and pre-commit with nemoclaw-blueprint Makefile (uv run --extra dev --with pyright). --- .husky/pre-push | 2 +- .pre-commit-config.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.husky/pre-push b/.husky/pre-push index 161d17a0..00927202 100755 --- a/.husky/pre-push +++ b/.husky/pre-push @@ -25,7 +25,7 @@ cd "$REPO_ROOT" # Pyright (Python) — skip if uv is not installed if command -v uv >/dev/null 2>&1; then echo "pre-push: checking Python types..." - (cd nemoclaw-blueprint && uv run --with pyright pyright .) || exit 1 + (cd nemoclaw-blueprint && uv run --extra dev --with pyright pyright .) || exit 1 fi # prek or pre-commit: same hooks as .pre-commit-config.yaml — optional if not installed locally. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cac42595..cf5c279b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -181,7 +181,7 @@ repos: - id: pyright-check name: Pyright (nemoclaw-blueprint) - entry: bash -c 'cd nemoclaw-blueprint && uv run --with pyright pyright .' + entry: bash -c 'cd nemoclaw-blueprint && uv run --extra dev --with pyright pyright .' language: system pass_filenames: false always_run: true From 22a35fad7772a5570400fedec004536cc11ade0b Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 09:09:49 +0200 Subject: [PATCH 05/20] fix(tooling): unblock prek shellcheck, hadolint, and vitest hooks - husky-env: shellcheck shell=bash for SC2148 - Dockerfile: hadolint pragmas and merged pip install with pyyaml pin - vitest pre-commit: use repo-root node_modules vitest (avoid broken npx) --- .pre-commit-config.yaml | 2 +- Dockerfile | 9 ++++----- scripts/husky-env.sh | 1 + 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cf5c279b..cbc59e15 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -152,7 +152,7 @@ repos: hooks: - id: vitest-plugin name: Vitest (plugin project) - entry: npx vitest run --project plugin + entry: bash -c 'root="$(git rev-parse --show-toplevel)" && cd "$root" && exec ./node_modules/.bin/vitest run --project plugin' language: system pass_filenames: false files: ^nemoclaw/ diff --git a/Dockerfile b/Dockerfile index 471a806b..fe85ed2e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,6 +12,7 @@ FROM node:22-slim@sha256:4f77a690f2f8946ab16fe1e791a3ac0667ae1c3575c3e4d0d4589e9 ENV DEBIAN_FRONTEND=noninteractive +# hadolint ignore=DL3008 RUN apt-get update && apt-get install -y --no-install-recommends \ python3 python3-pip python3-venv \ curl git ca-certificates \ @@ -51,11 +52,9 @@ RUN mkdir -p /sandbox/.openclaw-data/agents/main/agent \ && ln -s /sandbox/.openclaw-data/update-check.json /sandbox/.openclaw/update-check.json \ && chown -R sandbox:sandbox /sandbox/.openclaw /sandbox/.openclaw-data -# Install OpenClaw CLI -RUN npm install -g openclaw@2026.3.11 - -# Install PyYAML for blueprint runner -RUN pip3 install --break-system-packages pyyaml +# Install OpenClaw CLI and PyYAML for blueprint runner (single layer) +RUN npm install -g openclaw@2026.3.11 \ + && pip3 install --no-cache-dir --break-system-packages "pyyaml==6.0.2" # Copy built plugin and blueprint into the sandbox COPY --from=builder /opt/nemoclaw/dist/ /opt/nemoclaw/dist/ diff --git a/scripts/husky-env.sh b/scripts/husky-env.sh index 27c001f7..86a8b8bd 100644 --- a/scripts/husky-env.sh +++ b/scripts/husky-env.sh @@ -1,3 +1,4 @@ +# shellcheck shell=bash # SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # From d257ad110ec5a6e810be4e0ebee5d97a11a86694 Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 09:14:21 +0200 Subject: [PATCH 06/20] fix(npm): skip prek install when core.hooksPath is set Husky sets core.hooksPath; prek refuses to install in that case. Still run prek install for clones without hooksPath. --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 504f8899..bf29661d 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,7 @@ }, "scripts": { "test": "vitest run", - "prepare": "if [ -d .git ]; then prek install --hook-type pre-commit --hook-type commit-msg --hook-type pre-push; fi", + "prepare": "if [ -d .git ] && ! git config --get core.hooksPath >/dev/null 2>&1; then prek install --hook-type pre-commit --hook-type commit-msg --hook-type pre-push; fi", "prepublishOnly": "cd nemoclaw && env -u npm_config_global -u npm_config_prefix -u npm_config_omit npm install --ignore-scripts && ./node_modules/.bin/tsc" }, "dependencies": { From f84ed49072ff04d673a7da4526ececc30196be08 Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 09:19:22 +0200 Subject: [PATCH 07/20] chore: centralize prek hook types and format shell scripts - Add default_install_hook_types (pre-commit, commit-msg, pre-push) to .pre-commit-config.yaml - Simplify npm prepare to run prek install without repeated --hook-type flags - Align backup-workspace, brev-setup, and install-openshell with shfmt/shellcheck style --- .pre-commit-config.yaml | 7 ++ package.json | 2 +- scripts/backup-workspace.sh | 155 ++++++++++++++++++----------------- scripts/brev-setup.sh | 59 ++++++------- scripts/install-openshell.sh | 4 +- 5 files changed, 120 insertions(+), 107 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cbc59e15..b3bafd03 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,6 +20,13 @@ exclude: ^(nemoclaw/dist/|nemoclaw/node_modules/|docs/_build/|\.venv/|uv\.lock$) +# Which git hook shims `prek install` writes (separate from each hook's `stages:`). +# https://prek.j178.dev/configuration/#default_install_hook_types +default_install_hook_types: + - pre-commit + - commit-msg + - pre-push + repos: # ── Priority 0: general file fixers ─────────────────────────────────────── - repo: https://github.com/pre-commit/pre-commit-hooks diff --git a/package.json b/package.json index bf29661d..0b4bbb07 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,7 @@ }, "scripts": { "test": "vitest run", - "prepare": "if [ -d .git ] && ! git config --get core.hooksPath >/dev/null 2>&1; then prek install --hook-type pre-commit --hook-type commit-msg --hook-type pre-push; fi", + "prepare": "if [ -d .git ] && ! git config --get core.hooksPath >/dev/null 2>&1; then prek install; fi", "prepublishOnly": "cd nemoclaw && env -u npm_config_global -u npm_config_prefix -u npm_config_omit npm install --ignore-scripts && ./node_modules/.bin/tsc" }, "dependencies": { diff --git a/scripts/backup-workspace.sh b/scripts/backup-workspace.sh index 0e83ca3f..57382344 100755 --- a/scripts/backup-workspace.sh +++ b/scripts/backup-workspace.sh @@ -16,10 +16,13 @@ NC='\033[0m' info() { echo -e "${GREEN}[backup]${NC} $1"; } warn() { echo -e "${YELLOW}[backup]${NC} $1"; } -fail() { echo -e "${RED}[backup]${NC} $1" >&2; exit 1; } +fail() { + echo -e "${RED}[backup]${NC} $1" >&2 + exit 1 +} usage() { - cat < $(basename "$0") restore [timestamp] @@ -31,88 +34,88 @@ Commands: Backup location: ${BACKUP_BASE}// EOF - exit 1 + exit 1 } do_backup() { - local sandbox="$1" - local ts - ts="$(date +%Y%m%d-%H%M%S)" - local dest="${BACKUP_BASE}/${ts}" - - mkdir -p "$BACKUP_BASE" - chmod 0700 "${HOME}/.nemoclaw" "$BACKUP_BASE" || \ - fail "Failed to set secure permissions on ${HOME}/.nemoclaw — check directory ownership." - mkdir -p "$dest" - chmod 0700 "$dest" - - info "Backing up workspace from sandbox '${sandbox}'..." - - local count=0 - for f in "${FILES[@]}"; do - if openshell sandbox download "$sandbox" "${WORKSPACE_PATH}/${f}" "${dest}/"; then - count=$((count + 1)) - else - warn "Skipped ${f} (not found or download failed)" - fi - done - - for d in "${DIRS[@]}"; do - if openshell sandbox download "$sandbox" "${WORKSPACE_PATH}/${d}/" "${dest}/${d}/"; then - count=$((count + 1)) - else - warn "Skipped ${d}/ (not found or download failed)" - fi - done - - if [ "$count" -eq 0 ]; then - fail "No files were backed up. Check that the sandbox '${sandbox}' exists and has workspace files." + local sandbox="$1" + local ts + ts="$(date +%Y%m%d-%H%M%S)" + local dest="${BACKUP_BASE}/${ts}" + + mkdir -p "$BACKUP_BASE" + chmod 0700 "${HOME}/.nemoclaw" "$BACKUP_BASE" \ + || fail "Failed to set secure permissions on ${HOME}/.nemoclaw — check directory ownership." + mkdir -p "$dest" + chmod 0700 "$dest" + + info "Backing up workspace from sandbox '${sandbox}'..." + + local count=0 + for f in "${FILES[@]}"; do + if openshell sandbox download "$sandbox" "${WORKSPACE_PATH}/${f}" "${dest}/"; then + count=$((count + 1)) + else + warn "Skipped ${f} (not found or download failed)" + fi + done + + for d in "${DIRS[@]}"; do + if openshell sandbox download "$sandbox" "${WORKSPACE_PATH}/${d}/" "${dest}/${d}/"; then + count=$((count + 1)) + else + warn "Skipped ${d}/ (not found or download failed)" fi + done + + if [ "$count" -eq 0 ]; then + fail "No files were backed up. Check that the sandbox '${sandbox}' exists and has workspace files." + fi - info "Backup saved to ${dest}/ (${count} items)" + info "Backup saved to ${dest}/ (${count} items)" } do_restore() { - local sandbox="$1" - local ts="${2:-}" - - if [ -z "$ts" ]; then - ts="$(ls -1 "$BACKUP_BASE" 2>/dev/null | sort -r | head -n1)" - [ -n "$ts" ] || fail "No backups found in ${BACKUP_BASE}/" - info "Using most recent backup: ${ts}" + local sandbox="$1" + local ts="${2:-}" + + if [ -z "$ts" ]; then + ts="$(ls -1 "$BACKUP_BASE" 2>/dev/null | sort -r | head -n1)" + [ -n "$ts" ] || fail "No backups found in ${BACKUP_BASE}/" + info "Using most recent backup: ${ts}" + fi + + local src="${BACKUP_BASE}/${ts}" + [ -d "$src" ] || fail "Backup directory not found: ${src}" + + info "Restoring workspace to sandbox '${sandbox}' from ${src}..." + + local count=0 + for f in "${FILES[@]}"; do + if [ -f "${src}/${f}" ]; then + if openshell sandbox upload "$sandbox" "${src}/${f}" "${WORKSPACE_PATH}/"; then + count=$((count + 1)) + else + warn "Failed to restore ${f}" + fi fi - - local src="${BACKUP_BASE}/${ts}" - [ -d "$src" ] || fail "Backup directory not found: ${src}" - - info "Restoring workspace to sandbox '${sandbox}' from ${src}..." - - local count=0 - for f in "${FILES[@]}"; do - if [ -f "${src}/${f}" ]; then - if openshell sandbox upload "$sandbox" "${src}/${f}" "${WORKSPACE_PATH}/"; then - count=$((count + 1)) - else - warn "Failed to restore ${f}" - fi - fi - done - - for d in "${DIRS[@]}"; do - if [ -d "${src}/${d}" ]; then - if openshell sandbox upload "$sandbox" "${src}/${d}/" "${WORKSPACE_PATH}/${d}/"; then - count=$((count + 1)) - else - warn "Failed to restore ${d}/" - fi - fi - done - - if [ "$count" -eq 0 ]; then - fail "No files were restored. Check that the sandbox '${sandbox}' is running." + done + + for d in "${DIRS[@]}"; do + if [ -d "${src}/${d}" ]; then + if openshell sandbox upload "$sandbox" "${src}/${d}/" "${WORKSPACE_PATH}/${d}/"; then + count=$((count + 1)) + else + warn "Failed to restore ${d}/" + fi fi + done + + if [ "$count" -eq 0 ]; then + fail "No files were restored. Check that the sandbox '${sandbox}' is running." + fi - info "Restored ${count} items to sandbox '${sandbox}'." + info "Restored ${count} items to sandbox '${sandbox}'." } # --- Main --- @@ -125,7 +128,7 @@ sandbox="$2" shift 2 case "$action" in - backup) do_backup "$sandbox" ;; - restore) do_restore "$sandbox" "$@" ;; - *) usage ;; + backup) do_backup "$sandbox" ;; + restore) do_restore "$sandbox" "$@" ;; + *) usage ;; esac diff --git a/scripts/brev-setup.sh b/scripts/brev-setup.sh index a4b42157..cc8701ba 100755 --- a/scripts/brev-setup.sh +++ b/scripts/brev-setup.sh @@ -23,7 +23,10 @@ NC='\033[0m' info() { echo -e "${GREEN}[brev]${NC} $1"; } warn() { echo -e "${YELLOW}[brev]${NC} $1"; } -fail() { echo -e "${RED}[brev]${NC} $1"; exit 1; } +fail() { + echo -e "${RED}[brev]${NC} $1" + exit 1 +} SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" @@ -34,20 +37,20 @@ export NEEDRESTART_MODE=a export DEBIAN_FRONTEND=noninteractive # --- 0. Node.js (needed for services) --- -if ! command -v node > /dev/null 2>&1; then +if ! command -v node >/dev/null 2>&1; then info "Installing Node.js..." - curl -fsSL https://deb.nodesource.com/setup_22.x | sudo -E bash - > /dev/null 2>&1 - sudo apt-get install -y -qq nodejs > /dev/null 2>&1 + curl -fsSL https://deb.nodesource.com/setup_22.x | sudo -E bash - >/dev/null 2>&1 + sudo apt-get install -y -qq nodejs >/dev/null 2>&1 info "Node.js $(node --version) installed" else info "Node.js already installed: $(node --version)" fi # --- 1. Docker --- -if ! command -v docker > /dev/null 2>&1; then +if ! command -v docker >/dev/null 2>&1; then info "Installing Docker..." - sudo apt-get update -qq > /dev/null 2>&1 - sudo apt-get install -y -qq docker.io > /dev/null 2>&1 + sudo apt-get update -qq >/dev/null 2>&1 + sudo apt-get install -y -qq docker.io >/dev/null 2>&1 sudo usermod -aG docker "$(whoami)" info "Docker installed" else @@ -55,17 +58,17 @@ else fi # --- 2. NVIDIA Container Toolkit (if GPU present) --- -if command -v nvidia-smi > /dev/null 2>&1; then - if ! dpkg -s nvidia-container-toolkit > /dev/null 2>&1; then +if command -v nvidia-smi >/dev/null 2>&1; then + if ! dpkg -s nvidia-container-toolkit >/dev/null 2>&1; then info "Installing NVIDIA Container Toolkit..." curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey \ | sudo gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list \ | sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' \ - | sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list > /dev/null - sudo apt-get update -qq > /dev/null 2>&1 - sudo apt-get install -y -qq nvidia-container-toolkit > /dev/null 2>&1 - sudo nvidia-ctk runtime configure --runtime=docker > /dev/null 2>&1 + | sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list >/dev/null + sudo apt-get update -qq >/dev/null 2>&1 + sudo apt-get install -y -qq nvidia-container-toolkit >/dev/null 2>&1 + sudo nvidia-ctk runtime configure --runtime=docker >/dev/null 2>&1 sudo systemctl restart docker info "NVIDIA Container Toolkit installed" else @@ -74,16 +77,16 @@ if command -v nvidia-smi > /dev/null 2>&1; then fi # --- 3. openshell CLI (binary release, not pip) --- -if ! command -v openshell > /dev/null 2>&1; then +if ! command -v openshell >/dev/null 2>&1; then info "Installing openshell CLI from GitHub release..." - if ! command -v gh > /dev/null 2>&1; then - sudo apt-get update -qq > /dev/null 2>&1 - sudo apt-get install -y -qq gh > /dev/null 2>&1 + if ! command -v gh >/dev/null 2>&1; then + sudo apt-get update -qq >/dev/null 2>&1 + sudo apt-get install -y -qq gh >/dev/null 2>&1 fi ARCH="$(uname -m)" case "$ARCH" in - x86_64|amd64) ASSET="openshell-x86_64-unknown-linux-musl.tar.gz" ;; - aarch64|arm64) ASSET="openshell-aarch64-unknown-linux-musl.tar.gz" ;; + x86_64 | amd64) ASSET="openshell-x86_64-unknown-linux-musl.tar.gz" ;; + aarch64 | arm64) ASSET="openshell-aarch64-unknown-linux-musl.tar.gz" ;; *) fail "Unsupported architecture: $ARCH" ;; esac tmpdir="$(mktemp -d)" @@ -98,12 +101,12 @@ else fi # --- 3b. cloudflared (for public tunnel) --- -if ! command -v cloudflared > /dev/null 2>&1; then +if ! command -v cloudflared >/dev/null 2>&1; then info "Installing cloudflared..." CF_ARCH="$(uname -m)" case "$CF_ARCH" in - x86_64|amd64) CF_ARCH="amd64" ;; - aarch64|arm64) CF_ARCH="arm64" ;; + x86_64 | amd64) CF_ARCH="amd64" ;; + aarch64 | arm64) CF_ARCH="arm64" ;; *) fail "Unsupported architecture for cloudflared: $CF_ARCH" ;; esac tmpdir=$(mktemp -d) @@ -117,11 +120,11 @@ fi # --- 4. vLLM (local inference, if GPU present) --- VLLM_MODEL="nvidia/nemotron-3-nano-30b-a3b" -if command -v nvidia-smi > /dev/null 2>&1; then +if command -v nvidia-smi >/dev/null 2>&1; then if ! python3 -c "import vllm" 2>/dev/null; then info "Installing vLLM..." - if ! command -v pip3 > /dev/null 2>&1; then - sudo apt-get install -y -qq python3-pip > /dev/null 2>&1 + if ! command -v pip3 >/dev/null 2>&1; then + sudo apt-get install -y -qq python3-pip >/dev/null 2>&1 fi pip3 install --break-system-packages vllm 2>/dev/null || pip3 install vllm info "vLLM installed" @@ -130,7 +133,7 @@ if command -v nvidia-smi > /dev/null 2>&1; then fi # Start vLLM if not already running - if curl -s http://localhost:8000/v1/models > /dev/null 2>&1; then + if curl -s http://localhost:8000/v1/models >/dev/null 2>&1; then info "vLLM already running on :8000" elif python3 -c "import vllm" 2>/dev/null; then info "Starting vLLM with $VLLM_MODEL..." @@ -138,11 +141,11 @@ if command -v nvidia-smi > /dev/null 2>&1; then --model "$VLLM_MODEL" \ --port 8000 \ --host 0.0.0.0 \ - > /tmp/vllm-server.log 2>&1 & + >/tmp/vllm-server.log 2>&1 & VLLM_PID=$! info "Waiting for vLLM to load model (this can take a few minutes)..." for _ in $(seq 1 120); do - if curl -s http://localhost:8000/v1/models > /dev/null 2>&1; then + if curl -s http://localhost:8000/v1/models >/dev/null 2>&1; then info "vLLM ready (PID $VLLM_PID)" break fi diff --git a/scripts/install-openshell.sh b/scripts/install-openshell.sh index 1eeec7d2..dbbeed40 100755 --- a/scripts/install-openshell.sh +++ b/scripts/install-openshell.sh @@ -40,8 +40,8 @@ version_gte() { # Returns 0 (true) if $1 >= $2 — portable, no sort -V (BSD compat) local IFS=. local -a a b - read -r -a a <<< "$1" - read -r -a b <<< "$2" + read -r -a a <<<"$1" + read -r -a b <<<"$2" for i in 0 1 2; do local ai=${a[$i]:-0} bi=${b[$i]:-0} if ((ai > bi)); then return 0; fi From 6e372e682ba880b381732ba8695dc47a4b7fadbf Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 09:29:58 +0200 Subject: [PATCH 08/20] refactor: replace Husky hooks with prek-only equivalents Remove .husky/ directory, scripts/husky-env.sh, and the lint-staged dependency. All hook logic now lives in .pre-commit-config.yaml: - pre-commit: formatters, linters, Vitest (unchanged) - commit-msg: commitlint (unchanged) - pre-push: tsc, pyright, plus prek-push-range which re-runs pre-commit hooks on the outgoing commit range The prepare script unconditionally runs `prek install` (no more core.hooksPath guard). Contributors with a stale Husky hooksPath should run: git config --unset core.hooksPath --- .husky/commit-msg | 14 ----------- .husky/pre-commit | 22 ----------------- .husky/pre-push | 54 ----------------------------------------- .pre-commit-config.yaml | 14 +++++++++-- CONTRIBUTING.md | 2 ++ package.json | 2 +- scripts/husky-env.sh | 21 ---------------- 7 files changed, 15 insertions(+), 114 deletions(-) delete mode 100755 .husky/commit-msg delete mode 100755 .husky/pre-commit delete mode 100755 .husky/pre-push delete mode 100644 scripts/husky-env.sh diff --git a/.husky/commit-msg b/.husky/commit-msg deleted file mode 100755 index d967e943..00000000 --- a/.husky/commit-msg +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 - -REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" -cd "$REPO_ROOT" || exit 1 -# shellcheck disable=SC1091 -. "$REPO_ROOT/scripts/husky-env.sh" - -if [ -x ./node_modules/.bin/commitlint ]; then - ./node_modules/.bin/commitlint --edit "$1" -else - npx --no -- commitlint --edit "$1" -fi diff --git a/.husky/pre-commit b/.husky/pre-commit deleted file mode 100755 index 314a03a0..00000000 --- a/.husky/pre-commit +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/sh -# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -set -e - -REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" -cd "$REPO_ROOT" || exit 1 -# shellcheck disable=SC1091 -. "$REPO_ROOT/scripts/husky-env.sh" - -if [ -x ./node_modules/.bin/lint-staged ]; then - ./node_modules/.bin/lint-staged -else - npx --no -- lint-staged -fi - -cd nemoclaw -if [ -x ./node_modules/.bin/vitest ]; then - ./node_modules/.bin/vitest run -else - npx vitest run -fi diff --git a/.husky/pre-push b/.husky/pre-push deleted file mode 100755 index 00927202..00000000 --- a/.husky/pre-push +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/sh -# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -# Type checking and shared hooks before push — avoids a wasted CI round-trip. -set -e - -REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" -cd "$REPO_ROOT" || exit 1 -# shellcheck disable=SC1091 -. "$REPO_ROOT/scripts/husky-env.sh" - -echo "pre-push: checking TypeScript types..." -cd nemoclaw -if [ -x ./node_modules/.bin/tsc ]; then - ./node_modules/.bin/tsc --noEmit -elif command -v npx >/dev/null 2>&1; then - npx tsc --noEmit -else - echo "pre-push: tsc not found — run: cd nemoclaw && npm install" >&2 - exit 1 -fi -cd "$REPO_ROOT" - -# Pyright (Python) — skip if uv is not installed -if command -v uv >/dev/null 2>&1; then - echo "pre-push: checking Python types..." - (cd nemoclaw-blueprint && uv run --extra dev --with pyright pyright .) || exit 1 -fi - -# prek or pre-commit: same hooks as .pre-commit-config.yaml — optional if not installed locally. -if command -v prek >/dev/null 2>&1; then - echo "pre-push: running prek on outgoing commits..." - if git rev-parse '@{u}' >/dev/null 2>&1; then - FROM=$(git merge-base HEAD '@{u}') - prek run --from-ref "$FROM" --to-ref HEAD - elif git rev-parse HEAD~1 >/dev/null 2>&1; then - prek run --from-ref HEAD~1 --to-ref HEAD - else - prek run --all-files - fi -elif command -v pre-commit >/dev/null 2>&1; then - echo "pre-push: running pre-commit on outgoing commits..." - if git rev-parse '@{u}' >/dev/null 2>&1; then - FROM=$(git merge-base HEAD '@{u}') - pre-commit run --from-ref "$FROM" --to-ref HEAD - elif git rev-parse HEAD~1 >/dev/null 2>&1; then - pre-commit run --from-ref HEAD~1 --to-ref HEAD - else - pre-commit run --all-files - fi -else - echo "pre-push: prek/pre-commit not in PATH — skipping shared hooks (install prek or: pip install pre-commit)." >&2 -fi diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b3bafd03..178b8557 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,8 +1,10 @@ # NemoClaw — prek hook configuration # prek: https://github.com/j178/prek — single binary, no Python required for the runner # Installed as an npm devDependency (@j178/prek) — available after `npm install`. -# All git hooks (pre-commit, commit-msg, pre-push) are managed by prek via this file. -# The "prepare" script in package.json runs `prek install` to wire them up. +# All git hooks (pre-commit, commit-msg, pre-push) are managed by prek via this file only. +# The "prepare" script in package.json runs `prek install` (writes `.git/hooks/*`). +# If you previously used Husky, run: git config --unset core.hooksPath +# then `npm install` again so Git uses the hooks prek installs. # # Usage: # npx prek install @@ -194,6 +196,14 @@ repos: always_run: true stages: [pre-push] + - id: prek-push-range + name: Re-run pre-commit hooks on outgoing commits + entry: bash -c 'if git rev-parse "@{u}" >/dev/null 2>&1; then FROM=$(git merge-base HEAD "@{u}"); prek run --from-ref "$FROM" --to-ref HEAD; elif git rev-parse HEAD~1 >/dev/null 2>&1; then prek run --from-ref HEAD~1 --to-ref HEAD; else prek run --all-files; fi' + language: system + pass_filenames: false + always_run: true + stages: [pre-push] + default_language_version: python: python3 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f8f8405d..58ca61f7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -71,6 +71,8 @@ All git hooks are managed by [prek](https://prek.j178.dev/), a fast, single-bina For a full manual check: `npx prek run --all-files`. For scoped runs: `npx prek run --from-ref --to-ref HEAD`. +If you still have `core.hooksPath` set from an old Husky setup, Git will ignore `.git/hooks`. Run `git config --unset core.hooksPath` in this repo, then `npm install` so `prek install` (via `prepare`) can register the hooks. + `make check` remains the primary documented linter entry point. ## Project Structure diff --git a/package.json b/package.json index 0b4bbb07..cbbe0bc3 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,7 @@ }, "scripts": { "test": "vitest run", - "prepare": "if [ -d .git ] && ! git config --get core.hooksPath >/dev/null 2>&1; then prek install; fi", + "prepare": "if [ -d .git ]; then prek install; fi", "prepublishOnly": "cd nemoclaw && env -u npm_config_global -u npm_config_prefix -u npm_config_omit npm install --ignore-scripts && ./node_modules/.bin/tsc" }, "dependencies": { diff --git a/scripts/husky-env.sh b/scripts/husky-env.sh deleted file mode 100644 index 86a8b8bd..00000000 --- a/scripts/husky-env.sh +++ /dev/null @@ -1,21 +0,0 @@ -# shellcheck shell=bash -# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -# Sourced by .husky/* hooks. Git GUI / non-login shells often omit PATH entries where Node lives, -# which breaks bin stubs that use `#!/usr/bin/env node` (tsc, vitest, lint-staged, etc.). - -PATH="/opt/homebrew/bin:/usr/local/bin:/home/linuxbrew/.linuxbrew/bin:$PATH" -export PATH - -if [ -z "${NVM_DIR:-}" ]; then - NVM_DIR="$HOME/.nvm" -fi -if [ -s "$NVM_DIR/nvm.sh" ]; then - # shellcheck source=/dev/null - . "$NVM_DIR/nvm.sh" -fi - -if command -v fnm >/dev/null 2>&1; then - eval "$(fnm env)" || true -fi From d92dc47daef952431a22b652801a14ef226200dc Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 09:34:52 +0200 Subject: [PATCH 09/20] ci: run prek in PR workflow instead of make check Replace the ad-hoc make check (eslint + prettier + tsc + ruff) with npx prek run --all-files, which covers every hook in .pre-commit-config.yaml (shfmt, shellcheck, hadolint, gitleaks, SPDX headers, etc.) in addition to the existing linters. Pre-push stage hooks (tsc --noEmit, pyright) run in a second step with --skip prek-push-range to avoid recursion. make check now delegates to prek as well. --- .github/PULL_REQUEST_TEMPLATE.md | 4 +- .github/workflows/pr.yaml | 23 +++- .pre-commit-config.yaml | 2 +- Makefile | 5 +- scripts/docs-to-skills.py | 223 +++++++++++++++++++++++-------- 5 files changed, 192 insertions(+), 65 deletions(-) mode change 100644 => 100755 scripts/docs-to-skills.py diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 9e84b051..d1edcbea 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -16,7 +16,7 @@ ## Testing -- [ ] `make check` passes. +- [ ] `npx prek run --all-files` passes (or equivalently `make check`). - [ ] `npm test` passes. - [ ] `make docs` builds without warnings. (for doc-only changes) @@ -28,7 +28,7 @@ ### Code Changes -- [ ] `make format` applied (TypeScript and Python). +- [ ] Formatters applied — `npx prek run --all-files` auto-fixes formatting (or `make format` for targeted runs). - [ ] Tests added or updated for new or changed behavior. - [ ] No secrets, API keys, or credentials committed. - [ ] Doc pages updated for any user-facing behavior changes (new commands, changed defaults, new features, bug fixes that contradict existing docs). diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 372aa074..230eea74 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -17,7 +17,7 @@ concurrency: jobs: lint: runs-on: ubuntu-latest - timeout-minutes: 5 + timeout-minutes: 10 steps: - name: Checkout uses: actions/checkout@v4 @@ -36,15 +36,24 @@ jobs: - name: Install uv uses: astral-sh/setup-uv@v4 - - name: Install ruff - run: uv tool install ruff + - name: Install root dependencies + run: npm install --ignore-scripts - - name: Install Node dependencies + - name: Install and build TypeScript plugin working-directory: nemoclaw - run: npm install + run: | + npm install + npm run build + + - name: Install Python dependencies + working-directory: nemoclaw-blueprint + run: uv sync --extra dev + + - name: Run all pre-commit hooks + run: npx prek run --all-files - - name: Run all linters - run: make check + - name: Run pre-push hooks (tsc + pyright) + run: npx prek run --all-files --stage pre-push --skip prek-push-range test-unit: runs-on: ubuntu-latest diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 178b8557..74c1709e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -136,7 +136,7 @@ repos: - repo: https://github.com/hadolint/hadolint rev: v2.14.0 hooks: - - id: hadolint + - id: hadolint-docker priority: 10 - repo: local diff --git a/Makefile b/Makefile index 7eaf3c64..89330130 100644 --- a/Makefile +++ b/Makefile @@ -1,9 +1,10 @@ .PHONY: check lint format lint-ts lint-py format-ts format-py docs docs-strict docs-live docs-clean -check: lint-ts lint-py +check: + npx prek run --all-files @echo "All checks passed." -lint: lint-ts lint-py +lint: check lint-ts: cd nemoclaw && npm run check diff --git a/scripts/docs-to-skills.py b/scripts/docs-to-skills.py old mode 100644 new mode 100755 index 805b2079..5e8fed79 --- a/scripts/docs-to-skills.py +++ b/scripts/docs-to-skills.py @@ -61,6 +61,7 @@ # Frontmatter / doc parsing # --------------------------------------------------------------------------- + @dataclass class DocPage: """A single documentation page with parsed metadata and content.""" @@ -96,7 +97,7 @@ def parse_yaml_frontmatter(text: str) -> tuple[dict, str]: return {}, text fm_text = text[4:end].strip() - body = text[end + 4:].strip() + body = text[end + 4 :].strip() fm = _parse_simple_yaml(fm_text) return fm, body @@ -105,7 +106,6 @@ def _parse_simple_yaml(text: str) -> dict: """Minimal YAML parser for doc frontmatter. Handles nested keys, lists.""" result: dict = {} current_key: str | None = None - current_indent = 0 parent_stack: list[tuple[str, dict, int]] = [] for line in text.split("\n"): @@ -138,8 +138,11 @@ def _parse_simple_yaml(text: str) -> dict: target = _current_dict(result, parent_stack) if val.startswith("[") and val.endswith("]"): - items = [v.strip().strip('"').strip("'") - for v in val[1:-1].split(",") if v.strip()] + items = [ + v.strip().strip('"').strip("'") + for v in val[1:-1].split(",") + if v.strip() + ] target[key] = items current_key = key elif val: @@ -150,8 +153,6 @@ def _parse_simple_yaml(text: str) -> dict: parent_stack.append((key, target, indent)) current_key = None - current_indent = indent - return result @@ -218,6 +219,7 @@ def _extract_sections(body: str) -> list[tuple[str, str]]: # Content transformation # --------------------------------------------------------------------------- + def clean_myst_directives(text: str) -> str: """Convert MyST/Sphinx directives to standard markdown equivalents.""" # Multi-line {include} directives with :start-after: etc. @@ -250,8 +252,11 @@ def clean_myst_directives(text: str) -> str: def _format_admonition(title: str, body: str) -> str: """Format an admonition as a blockquote, stripping directive lines.""" - lines = [l for l in body.strip().split("\n") - if not re.match(r"^\s*:[a-z_-]+:", l)] + lines = [ + line + for line in body.strip().split("\n") + if not re.match(r"^\s*:[a-z_-]+:", line) + ] while lines and not lines[0].strip(): lines.pop(0) while lines and not lines[-1].strip(): @@ -315,9 +320,7 @@ def resolve_includes(text: str, source_dir: Path) -> str: Handles :start-after: and :end-before: markers for partial content extraction. Falls back to a placeholder when the file cannot be read. """ - pattern = re.compile( - r"```\{include\}\s*([^\n]+)\n((?::[^\n]+\n)*)```" - ) + pattern = re.compile(r"```\{include\}\s*([^\n]+)\n((?::[^\n]+\n)*)```") def _resolve(match: re.Match) -> str: raw_path = match.group(1).strip() @@ -328,9 +331,9 @@ def _resolve(match: re.Match) -> str: for line in directives.strip().split("\n"): line = line.strip() if line.startswith(":start-after:"): - start_after = line[len(":start-after:"):].strip() + start_after = line[len(":start-after:") :].strip() elif line.startswith(":end-before:"): - end_before = line[len(":end-before:"):].strip() + end_before = line[len(":end-before:") :].strip() resolved = (source_dir / raw_path).resolve() if not resolved.is_file(): @@ -344,7 +347,7 @@ def _resolve(match: re.Match) -> str: if start_after: idx = content.find(start_after) if idx != -1: - content = content[idx + len(start_after):] + content = content[idx + len(start_after) :] if end_before: idx = content.find(end_before) if idx != -1: @@ -441,8 +444,8 @@ def extract_related_skills(text: str) -> tuple[str, list[str]]: # Match H2 or H3 "Next Steps" / "Related Topics" sections and their content pattern = re.compile( r"^(#{2,3})\s+(Next Steps|Related Topics)\s*\n+" - r"(?:.*?\n)*?" # optional intro line - r"((?:- .+\n?)+)", # the bullet list + r"(?:.*?\n)*?" # optional intro line + r"((?:- .+\n?)+)", # the bullet list re.MULTILINE, ) @@ -479,7 +482,7 @@ def _safe_truncation_point(lines: list[str], target: int) -> int: """Find a safe truncation point that doesn't break code fences.""" in_fence = False last_safe = target - for i, line in enumerate(lines[:target + 20]): + for i, line in enumerate(lines[: target + 20]): if line.strip().startswith("```"): in_fence = not in_fence if i >= target and not in_fence: @@ -505,8 +508,25 @@ def extract_trigger_keywords(pages: list[DocPage]) -> list[str]: # Extract meaningful words from the title if page.title: title_words = re.sub(r"[^a-zA-Z\s]", "", page.title).lower().split() - stop_words = {"the", "a", "an", "and", "or", "for", "to", "in", "of", - "it", "how", "what", "with", "from", "by", "on", "is"} + stop_words = { + "the", + "a", + "an", + "and", + "or", + "for", + "to", + "in", + "of", + "it", + "how", + "what", + "with", + "from", + "by", + "on", + "is", + } title_words = [w for w in title_words if w not in stop_words and len(w) > 2] if len(title_words) >= 2: keywords.add(" ".join(title_words[:4])) @@ -575,14 +595,58 @@ def extract_trigger_keywords(pages: list[DocPage]) -> list[str]: "security": "security", } -NOUN_STOP = {"the", "a", "an", "and", "or", "for", "to", "in", "of", "it", - "how", "what", "with", "from", "by", "on", "is", "your", "that", - "this", "its", "use", "using", "at", "runtime", "activity", - "issues", "guide", "configuration", "settings", "options", - "models", "providers", "requests", "resources", "instances", - "debug", "troubleshoot", "fix", "check", "verify", "test", - "deny", "approve", "enable", "disable", "manage", "works", - "agent", "agents"} +NOUN_STOP = { + "the", + "a", + "an", + "and", + "or", + "for", + "to", + "in", + "of", + "it", + "how", + "what", + "with", + "from", + "by", + "on", + "is", + "your", + "that", + "this", + "its", + "use", + "using", + "at", + "runtime", + "activity", + "issues", + "guide", + "configuration", + "settings", + "options", + "models", + "providers", + "requests", + "resources", + "instances", + "debug", + "troubleshoot", + "fix", + "check", + "verify", + "test", + "deny", + "approve", + "enable", + "disable", + "manage", + "works", + "agent", + "agents", +} PROJECT_STOP = set() # Populated at runtime from --prefix @@ -603,14 +667,16 @@ def _extract_noun_from_title(title: str) -> str | None: # Strip the leading verb phrase for phrase in sorted(TITLE_VERBS, key=lambda x: -len(x)): if lower.startswith(phrase): - lower = lower[len(phrase):].strip() + lower = lower[len(phrase) :].strip() break # Strip everything after em-dash, en-dash, or colon (subtitle) lower = re.split(r"\s*[—–]\s*|\s*:\s*|\s*-{2,}\s*", lower)[0] words = re.sub(r"[^a-z\s]", "", lower).split() - nouns = [w for w in words if w not in NOUN_STOP and w not in PROJECT_STOP and len(w) > 2] + nouns = [ + w for w in words if w not in NOUN_STOP and w not in PROJECT_STOP and len(w) > 2 + ] if len(nouns) >= 2: return "-".join(nouns[:2]) @@ -681,7 +747,9 @@ def generate_skill_name( return name -def build_skill_description(name: str, pages: list[DocPage], keywords: list[str]) -> str: +def build_skill_description( + name: str, pages: list[DocPage], keywords: list[str] +) -> str: """Build the description field for the skill frontmatter. Best-practices compliance: @@ -736,8 +804,18 @@ def _to_third_person(sentence: str) -> str: return sentence _BASE_VERBS_ENDING_IN_S = { - "access", "process", "address", "discuss", "bypass", "express", - "compress", "assess", "stress", "progress", "focus", "canvas", + "access", + "process", + "address", + "discuss", + "bypass", + "express", + "compress", + "assess", + "stress", + "progress", + "focus", + "canvas", } if first_word.endswith("ing"): return first_word + trailing_punct + suffix @@ -745,7 +823,11 @@ def _to_third_person(sentence: str) -> str: return first_word + trailing_punct + suffix if first_word.endswith(("ch", "sh", "x", "ss", "zz")): return first_word + "es" + trailing_punct + suffix - if first_word.endswith("y") and len(first_word) > 1 and first_word[-2] not in "aeiou": + if ( + first_word.endswith("y") + and len(first_word) > 1 + and first_word[-2] not in "aeiou" + ): return first_word[:-1] + "ies" + trailing_punct + suffix return first_word + "s" + trailing_punct + suffix @@ -786,9 +868,15 @@ def _clean(text: str, source: DocPage) -> str: result = rewrite_doc_paths(result, source, docs_dir, doc_to_skill) return result - procedures = [p for p in pages if CONTENT_TYPE_ROLE.get(p.content_type) == "procedure"] - context_pages = [p for p in pages if CONTENT_TYPE_ROLE.get(p.content_type) == "context"] - reference_pages = [p for p in pages if CONTENT_TYPE_ROLE.get(p.content_type) == "reference"] + procedures = [ + p for p in pages if CONTENT_TYPE_ROLE.get(p.content_type) == "procedure" + ] + context_pages = [ + p for p in pages if CONTENT_TYPE_ROLE.get(p.content_type) == "context" + ] + reference_pages = [ + p for p in pages if CONTENT_TYPE_ROLE.get(p.content_type) == "reference" + ] # Pages without a recognized content_type default to procedure untyped = [p for p in pages if p.content_type not in CONTENT_TYPE_ROLE] @@ -822,7 +910,7 @@ def _clean(text: str, source: DocPage) -> str: body = _clean(cp.body, cp) h1_match = re.match(r"^#\s+.+\n+", body) if h1_match: - body = body[h1_match.end():] + body = body[h1_match.end() :] # Trim to keep SKILL.md concise; full content goes to references/ body_lines = body.split("\n") if len(body_lines) > 60: @@ -867,7 +955,7 @@ def _clean(text: str, source: DocPage) -> str: for idx, pp in enumerate(procedures): # When merging multiple docs, add a transition heading if len(procedures) > 1 and idx > 0 and pp.title: - lines.append(f"---") + lines.append("---") lines.append("") for heading, content in pp.sections: @@ -971,6 +1059,7 @@ def _clean(text: str, source: DocPage) -> str: # Grouping strategies # --------------------------------------------------------------------------- + def group_by_directory(pages: list[DocPage]) -> dict[str, list[DocPage]]: """Group pages by their parent directory.""" groups: dict[str, list[DocPage]] = {} @@ -1019,8 +1108,13 @@ def group_by_content_type(pages: list[DocPage]) -> dict[str, list[DocPage]]: # --------------------------------------------------------------------------- EXCLUDED_PATTERNS = { - "CONTRIBUTING.md", "README.md", "SETUP.md", "CHANGELOG.md", - "LICENSE.md", "license.md", "index.md", + "CONTRIBUTING.md", + "README.md", + "SETUP.md", + "CHANGELOG.md", + "LICENSE.md", + "license.md", + "index.md", } @@ -1051,6 +1145,7 @@ def scan_docs(docs_dir: Path) -> list[DocPage]: # CLI # --------------------------------------------------------------------------- + def main(): parser = argparse.ArgumentParser( description="Convert documentation files into Agent Skills.", @@ -1068,26 +1163,39 @@ def main(): %(prog)s docs/ output/ --strategy smart --dry-run """), ) - parser.add_argument("docs_dir", type=Path, help="Path to the documentation directory") - parser.add_argument("output_dir", type=Path, help="Output directory for generated skills") parser.add_argument( - "--strategy", choices=list(STRATEGIES.keys()), default="smart", + "docs_dir", type=Path, help="Path to the documentation directory" + ) + parser.add_argument( + "output_dir", type=Path, help="Output directory for generated skills" + ) + parser.add_argument( + "--strategy", + choices=list(STRATEGIES.keys()), + default="smart", help="Grouping strategy (default: smart)", ) parser.add_argument( - "--dry-run", action="store_true", + "--dry-run", + action="store_true", help="Show what would be generated without writing files", ) parser.add_argument( - "--prefix", default="", + "--prefix", + default="", help="Prefix for all skill names (e.g. 'nemoclaw')", ) parser.add_argument( - "--name-map", nargs="*", default=[], metavar="CAT=NAME", + "--name-map", + nargs="*", + default=[], + metavar="CAT=NAME", help="Override names: --name-map about=overview deployment=deploy-remote", ) parser.add_argument( - "--exclude", nargs="*", default=[], + "--exclude", + nargs="*", + default=[], help="Additional file patterns to exclude", ) @@ -1097,8 +1205,10 @@ def main(): name_overrides: dict[str, str] = {} for mapping in args.name_map: if "=" not in mapping: - print(f"Error: --name-map entries must be CAT=NAME, got '{mapping}'", - file=sys.stderr) + print( + f"Error: --name-map entries must be CAT=NAME, got '{mapping}'", + file=sys.stderr, + ) sys.exit(1) cat, _, nm = mapping.partition("=") name_overrides[cat.strip()] = nm.strip() @@ -1150,7 +1260,8 @@ def main(): skill_names: dict[str, str] = {} # group_name → skill_name for group_name, group_pages in sorted(groups.items()): sname = generate_skill_name( - group_name, group_pages, + group_name, + group_pages, prefix=args.prefix, name_overrides=name_overrides, ) @@ -1167,12 +1278,16 @@ def main(): pass # Generate skills - print(f"\n{'[DRY RUN] ' if args.dry_run else ''}Generating skills to {args.output_dir}/") + print( + f"\n{'[DRY RUN] ' if args.dry_run else ''}Generating skills to {args.output_dir}/" + ) summaries: list[dict] = [] for group_name, group_pages in sorted(groups.items()): name = skill_names[group_name] summary = generate_skill( - name, group_pages, args.output_dir, + name, + group_pages, + args.output_dir, docs_dir=docs_dir_resolved, doc_to_skill=doc_to_skill, dry_run=args.dry_run, @@ -1194,7 +1309,9 @@ def main(): warning = " ⚠ >500 lines" if lines > 500 else "" print(f" {s['name']:30s} {lines:4d} lines {refs} refs{warning}{status}") - print(f"\nTotal: {len(summaries)} skills, {total_lines} lines, {total_refs} reference files") + print( + f"\nTotal: {len(summaries)} skills, {total_lines} lines, {total_refs} reference files" + ) if any(s["skill_md_lines"] > 500 for s in summaries): print("\nNote: Skills over 500 lines should be trimmed. Move detailed") From afaca4fe0e5882f7ae4337b9903c93d5ad4197f9 Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 09:49:16 +0200 Subject: [PATCH 10/20] fix: resolve pre-existing prek hook failures across the repo MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Fix trailing whitespace and missing final newlines in docs, skills, CODE_OF_CONDUCT.md, and scripts/debug.sh. - Set executable bits on scripts with shebangs: install.sh, smoke-macos-install.sh, test-full-e2e.sh, lib/runtime.sh, migrations/snapshot.py. - Fix shellcheck SC2206 in install.sh (safe IFS+read -ra for version parsing) and remove unused SCRIPT_DIR in setup-spark.sh. - Add hadolint ignore pragmas to test Dockerfiles (DL3008, DL3013, DL3042, DL3059, DL4006 — test images don't need version pinning). - Add SPDX license headers to __init__.py, test-double-onboard.sh, test-inference-local.sh, test-inference.sh, test-full-e2e.sh. --- .../skills/docs/nemoclaw-overview/references/how-it-works.md | 2 +- .agents/skills/docs/nemoclaw-overview/references/overview.md | 2 +- .../docs/nemoclaw-overview/references/release-notes.md | 2 +- .agents/skills/docs/nemoclaw-reference/SKILL.md | 2 +- .../docs/nemoclaw-reference/references/architecture.md | 2 +- .../skills/docs/nemoclaw-reference/references/commands.md | 2 +- .../docs/nemoclaw-reference/references/inference-profiles.md | 2 +- .../docs/nemoclaw-reference/references/network-policies.md | 2 +- .../docs/nemoclaw-reference/references/troubleshooting.md | 2 +- CODE_OF_CONDUCT.md | 4 ++-- docs/about/overview.md | 2 +- docs/about/release-notes.md | 2 +- install.sh | 5 +++-- nemoclaw-blueprint/orchestrator/__init__.py | 2 ++ scripts/debug.sh | 1 - scripts/setup-spark.sh | 2 -- scripts/test-inference-local.sh | 3 +++ scripts/test-inference.sh | 3 +++ test/Dockerfile.sandbox | 1 + test/e2e/Dockerfile.full-e2e | 1 + test/e2e/test-double-onboard.sh | 3 +++ test/e2e/test-full-e2e.sh | 3 +++ 22 files changed, 32 insertions(+), 18 deletions(-) diff --git a/.agents/skills/docs/nemoclaw-overview/references/how-it-works.md b/.agents/skills/docs/nemoclaw-overview/references/how-it-works.md index 9530758b..6b53ffa9 100644 --- a/.agents/skills/docs/nemoclaw-overview/references/how-it-works.md +++ b/.agents/skills/docs/nemoclaw-overview/references/how-it-works.md @@ -111,4 +111,4 @@ Approved endpoints persist for the current session but are not saved to the base - Follow the Quickstart (see the `nemoclaw-get-started` skill) to launch your first sandbox. - Refer to the Architecture (see the `nemoclaw-reference` skill) for the full technical structure, including file layouts and the blueprint lifecycle. -- Refer to Inference Profiles (see the `nemoclaw-reference` skill) for detailed provider configuration. \ No newline at end of file +- Refer to Inference Profiles (see the `nemoclaw-reference` skill) for detailed provider configuration. diff --git a/.agents/skills/docs/nemoclaw-overview/references/overview.md b/.agents/skills/docs/nemoclaw-overview/references/overview.md index 7134e56a..71203b47 100644 --- a/.agents/skills/docs/nemoclaw-overview/references/overview.md +++ b/.agents/skills/docs/nemoclaw-overview/references/overview.md @@ -53,4 +53,4 @@ Explore the following pages to learn more about NemoClaw. - Switch Inference Providers (see the `nemoclaw-configure-inference` skill) to configure the inference provider. - Approve or Deny Network Requests (see the `nemoclaw-manage-policy` skill) to manage egress approvals. - Deploy to a Remote GPU Instance (see the `nemoclaw-deploy-remote` skill) for persistent operation. -- Monitor Sandbox Activity (see the `nemoclaw-monitor-sandbox` skill) to observe agent behavior. \ No newline at end of file +- Monitor Sandbox Activity (see the `nemoclaw-monitor-sandbox` skill) to observe agent behavior. diff --git a/.agents/skills/docs/nemoclaw-overview/references/release-notes.md b/.agents/skills/docs/nemoclaw-overview/references/release-notes.md index 9e0242de..92949faf 100644 --- a/.agents/skills/docs/nemoclaw-overview/references/release-notes.md +++ b/.agents/skills/docs/nemoclaw-overview/references/release-notes.md @@ -12,4 +12,4 @@ NVIDIA NemoClaw is available in early preview starting March 16, 2026. Use the f | [Releases](https://github.com/NVIDIA/NemoClaw/releases) | Versioned release notes and downloadable assets. | | [Release comparison](https://github.com/NVIDIA/NemoClaw/compare) | Diff between any two tags or branches. | | [Merged pull requests](https://github.com/NVIDIA/NemoClaw/pulls?q=is%3Apr+is%3Amerged) | Individual changes with review discussion. | -| [Commit history](https://github.com/NVIDIA/NemoClaw/commits/main) | Full commit log on `main`. | \ No newline at end of file +| [Commit history](https://github.com/NVIDIA/NemoClaw/commits/main) | Full commit log on `main`. | diff --git a/.agents/skills/docs/nemoclaw-reference/SKILL.md b/.agents/skills/docs/nemoclaw-reference/SKILL.md index e6b61e8f..61230014 100644 --- a/.agents/skills/docs/nemoclaw-reference/SKILL.md +++ b/.agents/skills/docs/nemoclaw-reference/SKILL.md @@ -13,4 +13,4 @@ Learn how NemoClaw combines a lightweight CLI plugin with a versioned blueprint - [NemoClaw CLI Commands Reference](references/commands.md) - [NemoClaw Inference Profiles — NVIDIA Endpoint](references/inference-profiles.md) - [NemoClaw Network Policies — Baseline Rules and Operator Approval](references/network-policies.md) -- [NemoClaw Troubleshooting Guide](references/troubleshooting.md) \ No newline at end of file +- [NemoClaw Troubleshooting Guide](references/troubleshooting.md) diff --git a/.agents/skills/docs/nemoclaw-reference/references/architecture.md b/.agents/skills/docs/nemoclaw-reference/references/architecture.md index c5b9dada..c28d4287 100644 --- a/.agents/skills/docs/nemoclaw-reference/references/architecture.md +++ b/.agents/skills/docs/nemoclaw-reference/references/architecture.md @@ -79,4 +79,4 @@ OpenShell intercepts them and routes to the configured provider: Agent (sandbox) ──▶ OpenShell gateway ──▶ NVIDIA Endpoint (build.nvidia.com) ``` -Refer to Inference Profiles (see the `nemoclaw-reference` skill) for provider configuration details. \ No newline at end of file +Refer to Inference Profiles (see the `nemoclaw-reference` skill) for provider configuration details. diff --git a/.agents/skills/docs/nemoclaw-reference/references/commands.md b/.agents/skills/docs/nemoclaw-reference/references/commands.md index 5085ed8d..fd08af4c 100644 --- a/.agents/skills/docs/nemoclaw-reference/references/commands.md +++ b/.agents/skills/docs/nemoclaw-reference/references/commands.md @@ -149,4 +149,4 @@ After the fixes complete, the script prompts you to run `nemoclaw onboard` to co ```console $ sudo nemoclaw setup-spark -``` \ No newline at end of file +``` diff --git a/.agents/skills/docs/nemoclaw-reference/references/inference-profiles.md b/.agents/skills/docs/nemoclaw-reference/references/inference-profiles.md index 57c8f137..9586246b 100644 --- a/.agents/skills/docs/nemoclaw-reference/references/inference-profiles.md +++ b/.agents/skills/docs/nemoclaw-reference/references/inference-profiles.md @@ -50,4 +50,4 @@ $ openshell inference set --provider nvidia-nim --model ``` The change takes effect immediately. -No sandbox restart is needed. \ No newline at end of file +No sandbox restart is needed. diff --git a/.agents/skills/docs/nemoclaw-reference/references/network-policies.md b/.agents/skills/docs/nemoclaw-reference/references/network-policies.md index d094c94c..a32e4158 100644 --- a/.agents/skills/docs/nemoclaw-reference/references/network-policies.md +++ b/.agents/skills/docs/nemoclaw-reference/references/network-policies.md @@ -119,4 +119,4 @@ Apply policy updates to a running sandbox without restarting: ```console $ openshell policy set -``` \ No newline at end of file +``` diff --git a/.agents/skills/docs/nemoclaw-reference/references/troubleshooting.md b/.agents/skills/docs/nemoclaw-reference/references/troubleshooting.md index bc97b041..8dd01774 100644 --- a/.agents/skills/docs/nemoclaw-reference/references/troubleshooting.md +++ b/.agents/skills/docs/nemoclaw-reference/references/troubleshooting.md @@ -161,4 +161,4 @@ View the error output for the failed blueprint run: $ nemoclaw logs ``` -Use `--follow` to stream logs in real time while debugging. \ No newline at end of file +Use `--follow` to stream logs in real time while debugging. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index efdeccac..3a65b3ff 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -67,7 +67,7 @@ reported by contacting GitHub_Conduct@nvidia.com. All complaints will be reviewe investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies -may be posted separately. +may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other @@ -81,4 +81,4 @@ available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.ht [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see -https://www.contributor-covenant.org/faq \ No newline at end of file +https://www.contributor-covenant.org/faq diff --git a/docs/about/overview.md b/docs/about/overview.md index 23a49126..f6f73274 100644 --- a/docs/about/overview.md +++ b/docs/about/overview.md @@ -71,4 +71,4 @@ Explore the following pages to learn more about NemoClaw. - [Switch Inference Providers](../inference/switch-inference-providers.md) to configure the inference provider. - [Approve or Deny Network Requests](../network-policy/approve-network-requests.md) to manage egress approvals. - [Deploy to a Remote GPU Instance](../deployment/deploy-to-remote-gpu.md) for persistent operation. -- [Monitor Sandbox Activity](../monitoring/monitor-sandbox-activity.md) to observe agent behavior. \ No newline at end of file +- [Monitor Sandbox Activity](../monitoring/monitor-sandbox-activity.md) to observe agent behavior. diff --git a/docs/about/release-notes.md b/docs/about/release-notes.md index e9a74ca6..b5f3b88e 100644 --- a/docs/about/release-notes.md +++ b/docs/about/release-notes.md @@ -30,4 +30,4 @@ NVIDIA NemoClaw is available in early preview starting March 16, 2026. Use the f | [Releases](https://github.com/NVIDIA/NemoClaw/releases) | Versioned release notes and downloadable assets. | | [Release comparison](https://github.com/NVIDIA/NemoClaw/compare) | Diff between any two tags or branches. | | [Merged pull requests](https://github.com/NVIDIA/NemoClaw/pulls?q=is%3Apr+is%3Amerged) | Individual changes with review discussion. | -| [Commit history](https://github.com/NVIDIA/NemoClaw/commits/main) | Full commit log on `main`. | \ No newline at end of file +| [Commit history](https://github.com/NVIDIA/NemoClaw/commits/main) | Full commit log on `main`. | diff --git a/install.sh b/install.sh index 88c76798..f8351db3 100755 --- a/install.sh +++ b/install.sh @@ -191,8 +191,9 @@ ORIGINAL_PATH="${PATH:-}" # Compare two semver strings (major.minor.patch). Returns 0 if $1 >= $2. version_gte() { - local IFS=. - local -a a=($1) b=($2) + local -a a b + IFS=. read -ra a <<< "$1" + IFS=. read -ra b <<< "$2" for i in 0 1 2; do local ai=${a[$i]:-0} bi=${b[$i]:-0} if (( ai > bi )); then return 0; fi diff --git a/nemoclaw-blueprint/orchestrator/__init__.py b/nemoclaw-blueprint/orchestrator/__init__.py index e69de29b..52a7a9da 100644 --- a/nemoclaw-blueprint/orchestrator/__init__.py +++ b/nemoclaw-blueprint/orchestrator/__init__.py @@ -0,0 +1,2 @@ +# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 diff --git a/scripts/debug.sh b/scripts/debug.sh index 3fa2647e..24d6052e 100755 --- a/scripts/debug.sh +++ b/scripts/debug.sh @@ -322,4 +322,3 @@ fi echo "" info "Done. If filing a bug, run with --output and attach the tarball to your issue:" info " nemoclaw debug --output /tmp/nemoclaw-debug.tar.gz" - diff --git a/scripts/setup-spark.sh b/scripts/setup-spark.sh index 0cf5a20c..5911185e 100755 --- a/scripts/setup-spark.sh +++ b/scripts/setup-spark.sh @@ -30,8 +30,6 @@ info() { echo -e "${GREEN}>>>${NC} $1"; } warn() { echo -e "${YELLOW}>>>${NC} $1"; } fail() { echo -e "${RED}>>>${NC} $1"; exit 1; } -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" - # ── Pre-flight checks ───────────────────────────────────────────── if [ "$(uname -s)" != "Linux" ]; then diff --git a/scripts/test-inference-local.sh b/scripts/test-inference-local.sh index 93aea362..4404c257 100755 --- a/scripts/test-inference-local.sh +++ b/scripts/test-inference-local.sh @@ -1,4 +1,7 @@ #!/usr/bin/env bash +# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 + # Test inference.local routing through OpenShell provider (local vLLM) echo '{"model":"nvidia/nemotron-3-nano-30b-a3b","messages":[{"role":"user","content":"say hello"}]}' > /tmp/req.json curl -s https://inference.local/v1/chat/completions -H "Content-Type: application/json" -d @/tmp/req.json diff --git a/scripts/test-inference.sh b/scripts/test-inference.sh index cbe599f9..332f1b98 100755 --- a/scripts/test-inference.sh +++ b/scripts/test-inference.sh @@ -1,4 +1,7 @@ #!/usr/bin/env bash +# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 + # Test inference.local routing through OpenShell provider echo '{"model":"nvidia/nemotron-3-super-120b-a12b","messages":[{"role":"user","content":"say hello"}]}' > /tmp/req.json curl -s https://inference.local/v1/chat/completions -H "Content-Type: application/json" -d @/tmp/req.json diff --git a/test/Dockerfile.sandbox b/test/Dockerfile.sandbox index cf0ef49a..8bed7153 100644 --- a/test/Dockerfile.sandbox +++ b/test/Dockerfile.sandbox @@ -1,3 +1,4 @@ +# hadolint global ignore=DL3008,DL3013,DL3042,DL3059,DL4006 # Lightweight test sandbox for NemoClaw E2E testing # Simulates the OpenClaw-in-OpenShell environment without requiring # the full NVIDIA base image or openshell CLI diff --git a/test/e2e/Dockerfile.full-e2e b/test/e2e/Dockerfile.full-e2e index e514c79a..cedef732 100644 --- a/test/e2e/Dockerfile.full-e2e +++ b/test/e2e/Dockerfile.full-e2e @@ -1,3 +1,4 @@ +# hadolint global ignore=DL3008,DL4006 FROM ubuntu:24.04 ENV DEBIAN_FRONTEND=noninteractive diff --git a/test/e2e/test-double-onboard.sh b/test/e2e/test-double-onboard.sh index 7ebdddcd..3245ee95 100755 --- a/test/e2e/test-double-onboard.sh +++ b/test/e2e/test-double-onboard.sh @@ -1,4 +1,7 @@ #!/bin/bash +# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 + # Double onboard: verify that consecutive `nemoclaw onboard` runs recover # automatically from stale state (gateway, port forward, registry entries) # left behind by a previous run. diff --git a/test/e2e/test-full-e2e.sh b/test/e2e/test-full-e2e.sh index 5441ca68..538ecbe9 100644 --- a/test/e2e/test-full-e2e.sh +++ b/test/e2e/test-full-e2e.sh @@ -1,4 +1,7 @@ #!/bin/bash +# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 + # Full E2E: install → onboard → verify inference (REAL services, no mocks) # # Proves the COMPLETE user journey including real inference against From fb1a1f991481d1f57c8d37ea6bd1ff84f8ae1317 Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 09:52:05 +0200 Subject: [PATCH 11/20] fix: remove redundant prek-push-range hook and fix remaining lint prek already runs pre-commit-stage hooks during pre-push, so the prek-push-range hook caused a duplicate pass. Remove it. Also fix: test-full-e2e.sh exec bit, hadolint SC2086/SC2038 ignores in test Dockerfiles. --- .github/workflows/pr.yaml | 2 +- .pre-commit-config.yaml | 8 ------ test/Dockerfile.sandbox | 2 +- test/e2e/Dockerfile.full-e2e | 2 +- test/e2e/test-full-e2e.sh | 54 ++++++++++++++++++++++++------------ 5 files changed, 39 insertions(+), 29 deletions(-) diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 230eea74..b6d345b4 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -53,7 +53,7 @@ jobs: run: npx prek run --all-files - name: Run pre-push hooks (tsc + pyright) - run: npx prek run --all-files --stage pre-push --skip prek-push-range + run: npx prek run --all-files --stage pre-push test-unit: runs-on: ubuntu-latest diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 74c1709e..23238447 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -196,14 +196,6 @@ repos: always_run: true stages: [pre-push] - - id: prek-push-range - name: Re-run pre-commit hooks on outgoing commits - entry: bash -c 'if git rev-parse "@{u}" >/dev/null 2>&1; then FROM=$(git merge-base HEAD "@{u}"); prek run --from-ref "$FROM" --to-ref HEAD; elif git rev-parse HEAD~1 >/dev/null 2>&1; then prek run --from-ref HEAD~1 --to-ref HEAD; else prek run --all-files; fi' - language: system - pass_filenames: false - always_run: true - stages: [pre-push] - default_language_version: python: python3 diff --git a/test/Dockerfile.sandbox b/test/Dockerfile.sandbox index 8bed7153..7552e041 100644 --- a/test/Dockerfile.sandbox +++ b/test/Dockerfile.sandbox @@ -1,4 +1,4 @@ -# hadolint global ignore=DL3008,DL3013,DL3042,DL3059,DL4006 +# hadolint global ignore=DL3008,DL3013,DL3042,DL3059,DL4006,SC2038 # Lightweight test sandbox for NemoClaw E2E testing # Simulates the OpenClaw-in-OpenShell environment without requiring # the full NVIDIA base image or openshell CLI diff --git a/test/e2e/Dockerfile.full-e2e b/test/e2e/Dockerfile.full-e2e index cedef732..2f45a0b8 100644 --- a/test/e2e/Dockerfile.full-e2e +++ b/test/e2e/Dockerfile.full-e2e @@ -1,4 +1,4 @@ -# hadolint global ignore=DL3008,DL4006 +# hadolint global ignore=DL3008,DL4006,SC2086 FROM ubuntu:24.04 ENV DEBIAN_FRONTEND=noninteractive diff --git a/test/e2e/test-full-e2e.sh b/test/e2e/test-full-e2e.sh index 538ecbe9..6e7b6f33 100644 --- a/test/e2e/test-full-e2e.sh +++ b/test/e2e/test-full-e2e.sh @@ -31,11 +31,26 @@ FAIL=0 SKIP=0 TOTAL=0 -pass() { ((PASS++)); ((TOTAL++)); printf '\033[32m PASS: %s\033[0m\n' "$1"; } -fail() { ((FAIL++)); ((TOTAL++)); printf '\033[31m FAIL: %s\033[0m\n' "$1"; } -skip() { ((SKIP++)); ((TOTAL++)); printf '\033[33m SKIP: %s\033[0m\n' "$1"; } -section() { echo ""; printf '\033[1;36m=== %s ===\033[0m\n' "$1"; } -info() { printf '\033[1;34m [info]\033[0m %s\n' "$1"; } +pass() { + ((PASS++)) + ((TOTAL++)) + printf '\033[32m PASS: %s\033[0m\n' "$1" +} +fail() { + ((FAIL++)) + ((TOTAL++)) + printf '\033[31m FAIL: %s\033[0m\n' "$1" +} +skip() { + ((SKIP++)) + ((TOTAL++)) + printf '\033[33m SKIP: %s\033[0m\n' "$1" +} +section() { + echo "" + printf '\033[1;36m=== %s ===\033[0m\n' "$1" +} +info() { printf '\033[1;34m [info]\033[0m %s\n' "$1"; } # Parse chat completion response — handles both content and reasoning_content # (nemotron-3-super is a reasoning model that may put output in reasoning_content) @@ -70,10 +85,10 @@ SANDBOX_NAME="${NEMOCLAW_SANDBOX_NAME:-e2e-nightly}" # ══════════════════════════════════════════════════════════════════ section "Phase 0: Pre-cleanup" info "Destroying any leftover sandbox/gateway from previous runs..." -if command -v nemoclaw > /dev/null 2>&1; then +if command -v nemoclaw >/dev/null 2>&1; then nemoclaw "$SANDBOX_NAME" destroy 2>/dev/null || true fi -if command -v openshell > /dev/null 2>&1; then +if command -v openshell >/dev/null 2>&1; then openshell sandbox delete "$SANDBOX_NAME" 2>/dev/null || true openshell gateway destroy -g nemoclaw 2>/dev/null || true fi @@ -84,7 +99,7 @@ pass "Pre-cleanup complete" # ══════════════════════════════════════════════════════════════════ section "Phase 1: Prerequisites" -if docker info > /dev/null 2>&1; then +if docker info >/dev/null 2>&1; then pass "Docker is running" else fail "Docker is not running — cannot continue" @@ -98,7 +113,7 @@ else exit 1 fi -if curl -sf --max-time 10 https://integrate.api.nvidia.com/v1/models > /dev/null 2>&1; then +if curl -sf --max-time 10 https://integrate.api.nvidia.com/v1/models >/dev/null 2>&1; then pass "Network access to integrate.api.nvidia.com" else fail "Cannot reach integrate.api.nvidia.com" @@ -115,7 +130,10 @@ fi # ══════════════════════════════════════════════════════════════════ section "Phase 2: Install nemoclaw (non-interactive mode)" -cd "$REPO" || { fail "Could not cd to repo root: $REPO"; exit 1; } +cd "$REPO" || { + fail "Could not cd to repo root: $REPO" + exit 1 +} info "Running install.sh --non-interactive..." info "This installs Node.js, openshell, NemoClaw, and runs onboard." @@ -125,7 +143,7 @@ INSTALL_LOG="/tmp/nemoclaw-e2e-install.log" # Write to a file instead of piping through tee. openshell's background # port-forward inherits pipe file descriptors, which prevents tee from exiting. # Use tail -f in the background for real-time output in CI logs. -bash install.sh --non-interactive > "$INSTALL_LOG" 2>&1 & +bash install.sh --non-interactive >"$INSTALL_LOG" 2>&1 & install_pid=$! tail -f "$INSTALL_LOG" --pid=$install_pid 2>/dev/null & tail_pid=$! @@ -154,7 +172,7 @@ else fi # Verify nemoclaw is on PATH -if command -v nemoclaw > /dev/null 2>&1; then +if command -v nemoclaw >/dev/null 2>&1; then pass "nemoclaw installed at $(command -v nemoclaw)" else fail "nemoclaw not found on PATH after install" @@ -162,14 +180,14 @@ else fi # Verify openshell was installed -if command -v openshell > /dev/null 2>&1; then +if command -v openshell >/dev/null 2>&1; then pass "openshell installed ($(openshell --version 2>&1 || echo unknown))" else fail "openshell not found on PATH after install" exit 1 fi -nemoclaw --help > /dev/null 2>&1 \ +nemoclaw --help >/dev/null 2>&1 \ && pass "nemoclaw --help exits 0" \ || fail "nemoclaw --help failed" @@ -252,11 +270,11 @@ info "[LIVE] Sandbox inference test → user → sandbox → gateway → NVIDIA ssh_config="$(mktemp)" sandbox_response="" -if openshell sandbox ssh-config "$SANDBOX_NAME" > "$ssh_config" 2>/dev/null; then +if openshell sandbox ssh-config "$SANDBOX_NAME" >"$ssh_config" 2>/dev/null; then # Use timeout if available (Linux, Homebrew), fall back to plain ssh TIMEOUT_CMD="" - command -v timeout > /dev/null 2>&1 && TIMEOUT_CMD="timeout 90" - command -v gtimeout > /dev/null 2>&1 && TIMEOUT_CMD="gtimeout 90" + command -v timeout >/dev/null 2>&1 && TIMEOUT_CMD="timeout 90" + command -v gtimeout >/dev/null 2>&1 && TIMEOUT_CMD="gtimeout 90" sandbox_response=$($TIMEOUT_CMD ssh -F "$ssh_config" \ -o StrictHostKeyChecking=no \ -o UserKnownHostsFile=/dev/null \ @@ -266,7 +284,7 @@ if openshell sandbox ssh-config "$SANDBOX_NAME" > "$ssh_config" 2>/dev/null; the "curl -s --max-time 60 https://inference.local/v1/chat/completions \ -H 'Content-Type: application/json' \ -d '{\"model\":\"nvidia/nemotron-3-super-120b-a12b\",\"messages\":[{\"role\":\"user\",\"content\":\"Reply with exactly one word: PONG\"}],\"max_tokens\":100}'" \ - 2>&1) || true + 2>&1) || true fi rm -f "$ssh_config" From db309745968720d4f5a2e0cb8c6438782a81b306 Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 10:01:00 +0200 Subject: [PATCH 12/20] fix: use local commitlint binary instead of npx prek's system hook couldn't resolve npx. Use the repo-local node_modules/.bin/commitlint directly via git rev-parse. --- .pre-commit-config.yaml | 8 ++-- install.sh | 74 ++++++++++++++++++++------------- scripts/debug.sh | 23 +++++----- scripts/setup-spark.sh | 11 +++-- scripts/test-inference-local.sh | 2 +- scripts/test-inference.sh | 2 +- test/e2e/test-double-onboard.sh | 45 +++++++++++++------- test/e2e/test-full-e2e.sh | 0 8 files changed, 100 insertions(+), 65 deletions(-) mode change 100644 => 100755 test/e2e/test-full-e2e.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 23238447..8269c4fa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -168,14 +168,12 @@ repos: priority: 20 # ── commit-msg hooks ──────────────────────────────────────────────────────── - - repo: local + - repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook + rev: v9.24.0 hooks: - id: commitlint - name: commitlint - entry: npx commitlint --edit - language: system stages: [commit-msg] - always_run: true + additional_dependencies: ["@commitlint/config-conventional@20"] # ── pre-push hooks ───────────────────────────────────────────────────────── - repo: local diff --git a/install.sh b/install.sh index f8351db3..40a0d6e2 100755 --- a/install.sh +++ b/install.sh @@ -27,9 +27,9 @@ NEMOCLAW_VERSION="$(resolve_installer_version)" # --------------------------------------------------------------------------- if [[ -z "${NO_COLOR:-}" && -t 1 ]]; then if [[ "${COLORTERM:-}" == "truecolor" || "${COLORTERM:-}" == "24bit" ]]; then - C_GREEN=$'\033[38;2;118;185;0m' # #76B900 — exact NVIDIA green + C_GREEN=$'\033[38;2;118;185;0m' # #76B900 — exact NVIDIA green else - C_GREEN=$'\033[38;5;148m' # closest 256-color on dark backgrounds + C_GREEN=$'\033[38;5;148m' # closest 256-color on dark backgrounds fi C_BOLD=$'\033[1m' C_DIM=$'\033[2m' @@ -44,10 +44,13 @@ fi # --------------------------------------------------------------------------- # Helpers # --------------------------------------------------------------------------- -info() { printf "${C_CYAN}[INFO]${C_RESET} %s\n" "$*"; } -warn() { printf "${C_YELLOW}[WARN]${C_RESET} %s\n" "$*"; } -error() { printf "${C_RED}[ERROR]${C_RESET} %s\n" "$*" >&2; exit 1; } -ok() { printf " ${C_GREEN}✓${C_RESET} %s\n" "$*"; } +info() { printf "${C_CYAN}[INFO]${C_RESET} %s\n" "$*"; } +warn() { printf "${C_YELLOW}[WARN]${C_RESET} %s\n" "$*"; } +error() { + printf "${C_RED}[ERROR]${C_RESET} %s\n" "$*" >&2 + exit 1 +} +ok() { printf " ${C_GREEN}✓${C_RESET} %s\n" "$*"; } resolve_default_sandbox_name() { local registry_file="${HOME}/.nemoclaw/sandboxes.json" @@ -95,7 +98,7 @@ print_banner() { } print_done() { - local elapsed=$(( SECONDS - _INSTALL_START )) + local elapsed=$((SECONDS - _INSTALL_START)) local sandbox_name sandbox_name="$(resolve_default_sandbox_name)" info "=== Installation complete ===" @@ -146,7 +149,8 @@ usage() { # Stdout/stderr are captured; dumped only on failure. # Falls back to plain output when stdout is not a TTY (CI / piped installs). spin() { - local msg="$1"; shift + local msg="$1" + shift if [[ ! -t 1 ]]; then info "$msg" @@ -154,7 +158,8 @@ spin() { return fi - local log; log=$(mktemp) + local log + log=$(mktemp) "$@" >"$log" 2>&1 & local pid=$! i=0 local frames=('⠋' '⠙' '⠹' '⠸' '⠼' '⠴' '⠦' '⠧' '⠇' '⠏') @@ -192,12 +197,12 @@ ORIGINAL_PATH="${PATH:-}" # Compare two semver strings (major.minor.patch). Returns 0 if $1 >= $2. version_gte() { local -a a b - IFS=. read -ra a <<< "$1" - IFS=. read -ra b <<< "$2" + IFS=. read -ra a <<<"$1" + IFS=. read -ra b <<<"$2" for i in 0 1 2; do local ai=${a[$i]:-0} bi=${b[$i]:-0} - if (( ai > bi )); then return 0; fi - if (( ai < bi )); then return 1; fi + if ((ai > bi)); then return 0; fi + if ((ai < bi)); then return 1; fi done return 0 } @@ -266,7 +271,7 @@ ensure_supported_runtime() { [[ "$node_major" =~ ^[0-9]+$ ]] || error "Could not determine Node.js version from '${node_version}'. ${RUNTIME_REQUIREMENT_MSG}" [[ "$npm_major" =~ ^[0-9]+$ ]] || error "Could not determine npm version from '${npm_version}'. ${RUNTIME_REQUIREMENT_MSG}" - if (( node_major < MIN_NODE_MAJOR || npm_major < MIN_NPM_MAJOR )); then + if ((node_major < MIN_NODE_MAJOR || npm_major < MIN_NPM_MAJOR)); then error "Unsupported runtime detected: Node.js ${node_version:-unknown}, npm ${npm_version:-unknown}. ${RUNTIME_REQUIREMENT_MSG} Upgrade Node.js and rerun the installer." fi @@ -289,7 +294,10 @@ install_nodejs() { local nvm_tmp nvm_tmp="$(mktemp)" curl -fsSL "https://raw.githubusercontent.com/nvm-sh/nvm/${NVM_VERSION}/install.sh" -o "$nvm_tmp" \ - || { rm -f "$nvm_tmp"; error "Failed to download nvm installer"; } + || { + rm -f "$nvm_tmp" + error "Failed to download nvm installer" + } local actual_hash if command_exists sha256sum; then actual_hash="$(sha256sum "$nvm_tmp" | awk '{print $1}')" @@ -297,7 +305,7 @@ install_nodejs() { actual_hash="$(shasum -a 256 "$nvm_tmp" | awk '{print $1}')" else warn "No SHA-256 tool found — skipping nvm integrity check" - actual_hash="$NVM_SHA256" # allow execution + actual_hash="$NVM_SHA256" # allow execution fi if [[ "$actual_hash" != "$NVM_SHA256" ]]; then rm -f "$nvm_tmp" @@ -342,7 +350,7 @@ get_vram_mb() { if [[ "$(uname -s)" == "Darwin" ]] && command_exists sysctl; then local bytes bytes=$(sysctl -n hw.memsize 2>/dev/null || echo 0) - echo $(( bytes / 1024 / 1024 )) + echo $((bytes / 1024 / 1024)) return fi echo 0 @@ -374,10 +382,10 @@ install_or_upgrade_ollama() { # Pull the appropriate model based on VRAM local vram_mb vram_mb=$(get_vram_mb) - local vram_gb=$(( vram_mb / 1024 )) + local vram_gb=$((vram_mb / 1024)) info "Detected ${vram_gb} GB VRAM" - if (( vram_gb >= 120 )); then + if ((vram_gb >= 120)); then info "Pulling nemotron-3-super:120b…" ollama pull nemotron-3-super:120b else @@ -407,13 +415,12 @@ pre_extract_openclaw() { info "Pre-extracting openclaw@${openclaw_version} with system tar (GH-503 workaround)…" local tmpdir tmpdir="$(mktemp -d)" - if npm pack "openclaw@${openclaw_version}" --pack-destination "$tmpdir" > /dev/null 2>&1; then + if npm pack "openclaw@${openclaw_version}" --pack-destination "$tmpdir" >/dev/null 2>&1; then local tgz tgz="$(find "$tmpdir" -maxdepth 1 -name 'openclaw-*.tgz' -print -quit)" if [[ -n "$tgz" && -f "$tgz" ]]; then if mkdir -p "${install_dir}/node_modules/openclaw" \ - && tar xzf "$tgz" -C "${install_dir}/node_modules/openclaw" --strip-components=1 - then + && tar xzf "$tgz" -C "${install_dir}/node_modules/openclaw" --strip-components=1; then info "openclaw pre-extracted successfully" else warn "Failed to extract openclaw tarball" @@ -436,8 +443,8 @@ pre_extract_openclaw() { install_nemoclaw() { if [[ -f "./package.json" ]] && grep -q '"name": "nemoclaw"' ./package.json 2>/dev/null; then info "NemoClaw package.json found in current directory — installing from source…" - spin "Preparing OpenClaw package" bash -lc "$(declare -f pre_extract_openclaw); pre_extract_openclaw \"\$1\"" _ "$(pwd)" || \ - warn "Pre-extraction failed — npm install may fail if openclaw tarball is broken" + spin "Preparing OpenClaw package" bash -lc "$(declare -f pre_extract_openclaw); pre_extract_openclaw \"\$1\"" _ "$(pwd)" \ + || warn "Pre-extraction failed — npm install may fail if openclaw tarball is broken" spin "Installing NemoClaw dependencies" npm install --ignore-scripts spin "Building NemoClaw plugin" bash -lc 'cd nemoclaw && npm install --ignore-scripts && npm run build' spin "Linking NemoClaw CLI" npm link @@ -450,8 +457,8 @@ install_nemoclaw() { rm -rf "$nemoclaw_src" mkdir -p "$(dirname "$nemoclaw_src")" spin "Cloning NemoClaw source" git clone --depth 1 https://github.com/NVIDIA/NemoClaw.git "$nemoclaw_src" - spin "Preparing OpenClaw package" bash -lc "$(declare -f pre_extract_openclaw); pre_extract_openclaw \"\$1\"" _ "$nemoclaw_src" || \ - warn "Pre-extraction failed — npm install may fail if openclaw tarball is broken" + spin "Preparing OpenClaw package" bash -lc "$(declare -f pre_extract_openclaw); pre_extract_openclaw \"\$1\"" _ "$nemoclaw_src" \ + || warn "Pre-extraction failed — npm install may fail if openclaw tarball is broken" spin "Installing NemoClaw dependencies" bash -lc "cd \"$nemoclaw_src\" && npm install --ignore-scripts" spin "Building NemoClaw plugin" bash -lc "cd \"$nemoclaw_src\"/nemoclaw && npm install --ignore-scripts && npm run build" spin "Linking NemoClaw CLI" bash -lc "cd \"$nemoclaw_src\" && npm link" @@ -559,9 +566,18 @@ main() { for arg in "$@"; do case "$arg" in --non-interactive) NON_INTERACTIVE=1 ;; - --version|-v) printf "nemoclaw-installer v%s\n" "$NEMOCLAW_VERSION"; exit 0 ;; - --help|-h) usage; exit 0 ;; - *) usage; error "Unknown option: $arg" ;; + --version | -v) + printf "nemoclaw-installer v%s\n" "$NEMOCLAW_VERSION" + exit 0 + ;; + --help | -h) + usage + exit 0 + ;; + *) + usage + error "Unknown option: $arg" + ;; esac done # Also honor env var diff --git a/scripts/debug.sh b/scripts/debug.sh index 24d6052e..045f38fc 100755 --- a/scripts/debug.sh +++ b/scripts/debug.sh @@ -26,9 +26,12 @@ RED='\033[0;31m' CYAN='\033[0;36m' NC='\033[0m' -info() { echo -e "${GREEN}[debug]${NC} $1"; } -warn() { echo -e "${YELLOW}[debug]${NC} $1"; } -fail() { echo -e "${RED}[debug]${NC} $1"; exit 1; } +info() { echo -e "${GREEN}[debug]${NC} $1"; } +warn() { echo -e "${YELLOW}[debug]${NC} $1"; } +fail() { + echo -e "${RED}[debug]${NC} $1" + exit 1 +} section() { echo -e "\n${CYAN}═══ $1 ═══${NC}\n"; } # ── Parse flags ────────────────────────────────────────────────── @@ -47,11 +50,11 @@ while [ $# -gt 0 ]; do QUICK=true shift ;; - --output|-o) + --output | -o) OUTPUT="${2:?--output requires a path}" shift 2 ;; - --help|-h) + --help | -h) cat <<'USAGE' Usage: scripts/debug.sh [OPTIONS] @@ -130,12 +133,12 @@ collect() { local rc=0 local tmpout="${outfile}.raw" if [ -n "$TIMEOUT_BIN" ]; then - "$TIMEOUT_BIN" 30 "$@" > "$tmpout" 2>&1 || rc=$? + "$TIMEOUT_BIN" 30 "$@" >"$tmpout" 2>&1 || rc=$? else - "$@" > "$tmpout" 2>&1 || rc=$? + "$@" >"$tmpout" 2>&1 || rc=$? fi - redact < "$tmpout" > "$outfile" + redact <"$tmpout" >"$outfile" rm -f "$tmpout" cat "$outfile" @@ -244,14 +247,14 @@ fi if command -v openshell &>/dev/null \ && openshell sandbox list 2>/dev/null \ - | awk 'NF { if (tolower($1) == "name") next; print $1 }' \ + | awk 'NF { if (tolower($1) == "name") next; print $1 }' \ | grep -Fxq -- "$SANDBOX_NAME"; then section "Sandbox Internals" # Build a temporary SSH config so we can run commands inside the sandbox. # This follows the pattern from OpenShell's own demo.sh. SANDBOX_SSH_CONFIG=$(mktemp "${TMPDIR_BASE}/nemoclaw-ssh-XXXXXX") - if openshell sandbox ssh-config "$SANDBOX_NAME" > "$SANDBOX_SSH_CONFIG" 2>/dev/null; then + if openshell sandbox ssh-config "$SANDBOX_NAME" >"$SANDBOX_SSH_CONFIG" 2>/dev/null; then SANDBOX_SSH_HOST="openshell-${SANDBOX_NAME}" SANDBOX_SSH_OPTS=(-F "$SANDBOX_SSH_CONFIG" -o StrictHostKeyChecking=no -o ConnectTimeout=10) diff --git a/scripts/setup-spark.sh b/scripts/setup-spark.sh index 5911185e..9ab3ed80 100755 --- a/scripts/setup-spark.sh +++ b/scripts/setup-spark.sh @@ -28,7 +28,10 @@ NC='\033[0m' info() { echo -e "${GREEN}>>>${NC} $1"; } warn() { echo -e "${YELLOW}>>>${NC} $1"; } -fail() { echo -e "${RED}>>>${NC} $1"; exit 1; } +fail() { + echo -e "${RED}>>>${NC} $1" + exit 1 +} # ── Pre-flight checks ───────────────────────────────────────────── @@ -46,7 +49,7 @@ if [ -z "$REAL_USER" ]; then warn "Could not detect non-root user. Docker group will not be configured." fi -command -v docker > /dev/null || fail "Docker not found. DGX Spark should have Docker pre-installed." +command -v docker >/dev/null || fail "Docker not found. DGX Spark should have Docker pre-installed." # ── 1. Docker group ─────────────────────────────────────────────── @@ -110,7 +113,7 @@ with open('$DAEMON_JSON', 'w') as f: else info "Creating Docker daemon config with cgroupns=host..." mkdir -p "$(dirname "$DAEMON_JSON")" - echo '{ "default-cgroupns-mode": "host" }' > "$DAEMON_JSON" + echo '{ "default-cgroupns-mode": "host" }' >"$DAEMON_JSON" NEEDS_RESTART=true fi @@ -121,7 +124,7 @@ if [ "$NEEDS_RESTART" = true ]; then systemctl restart docker # Wait for Docker to be ready for i in 1 2 3 4 5 6 7 8 9 10; do - if docker info > /dev/null 2>&1; then + if docker info >/dev/null 2>&1; then break fi [ "$i" -eq 10 ] && fail "Docker didn't come back after restart. Check 'systemctl status docker'." diff --git a/scripts/test-inference-local.sh b/scripts/test-inference-local.sh index 4404c257..ebdfbcb0 100755 --- a/scripts/test-inference-local.sh +++ b/scripts/test-inference-local.sh @@ -3,5 +3,5 @@ # SPDX-License-Identifier: Apache-2.0 # Test inference.local routing through OpenShell provider (local vLLM) -echo '{"model":"nvidia/nemotron-3-nano-30b-a3b","messages":[{"role":"user","content":"say hello"}]}' > /tmp/req.json +echo '{"model":"nvidia/nemotron-3-nano-30b-a3b","messages":[{"role":"user","content":"say hello"}]}' >/tmp/req.json curl -s https://inference.local/v1/chat/completions -H "Content-Type: application/json" -d @/tmp/req.json diff --git a/scripts/test-inference.sh b/scripts/test-inference.sh index 332f1b98..5553eaed 100755 --- a/scripts/test-inference.sh +++ b/scripts/test-inference.sh @@ -3,5 +3,5 @@ # SPDX-License-Identifier: Apache-2.0 # Test inference.local routing through OpenShell provider -echo '{"model":"nvidia/nemotron-3-super-120b-a12b","messages":[{"role":"user","content":"say hello"}]}' > /tmp/req.json +echo '{"model":"nvidia/nemotron-3-super-120b-a12b","messages":[{"role":"user","content":"say hello"}]}' >/tmp/req.json curl -s https://inference.local/v1/chat/completions -H "Content-Type: application/json" -d @/tmp/req.json diff --git a/test/e2e/test-double-onboard.sh b/test/e2e/test-double-onboard.sh index 3245ee95..13dee992 100755 --- a/test/e2e/test-double-onboard.sh +++ b/test/e2e/test-double-onboard.sh @@ -29,11 +29,26 @@ FAIL=0 SKIP=0 TOTAL=0 -pass() { ((PASS++)); ((TOTAL++)); printf '\033[32m PASS: %s\033[0m\n' "$1"; } -fail() { ((FAIL++)); ((TOTAL++)); printf '\033[31m FAIL: %s\033[0m\n' "$1"; } -skip() { ((SKIP++)); ((TOTAL++)); printf '\033[33m SKIP: %s\033[0m\n' "$1"; } -section() { echo ""; printf '\033[1;36m=== %s ===\033[0m\n' "$1"; } -info() { printf '\033[1;34m [info]\033[0m %s\n' "$1"; } +pass() { + ((PASS++)) + ((TOTAL++)) + printf '\033[32m PASS: %s\033[0m\n' "$1" +} +fail() { + ((FAIL++)) + ((TOTAL++)) + printf '\033[31m FAIL: %s\033[0m\n' "$1" +} +skip() { + ((SKIP++)) + ((TOTAL++)) + printf '\033[33m SKIP: %s\033[0m\n' "$1" +} +section() { + echo "" + printf '\033[1;36m=== %s ===\033[0m\n' "$1" +} +info() { printf '\033[1;34m [info]\033[0m %s\n' "$1"; } SANDBOX_A="e2e-double-a" SANDBOX_B="e2e-double-b" @@ -48,7 +63,7 @@ info "Destroying any leftover test sandboxes/gateway from previous runs..." # the nemoclaw registry at ~/.nemoclaw/sandboxes.json. Stale registry # entries from a previous run would cause Phase 2 to exit with # "Sandbox already exists" before the test even starts. -if command -v nemoclaw > /dev/null 2>&1; then +if command -v nemoclaw >/dev/null 2>&1; then nemoclaw "$SANDBOX_A" destroy 2>/dev/null || true nemoclaw "$SANDBOX_B" destroy 2>/dev/null || true fi @@ -63,21 +78,21 @@ pass "Pre-cleanup complete" # ══════════════════════════════════════════════════════════════════ section "Phase 1: Prerequisites" -if docker info > /dev/null 2>&1; then +if docker info >/dev/null 2>&1; then pass "Docker is running" else fail "Docker is not running — cannot continue" exit 1 fi -if command -v openshell > /dev/null 2>&1; then +if command -v openshell >/dev/null 2>&1; then pass "openshell CLI installed" else fail "openshell CLI not found — cannot continue" exit 1 fi -if command -v nemoclaw > /dev/null 2>&1; then +if command -v nemoclaw >/dev/null 2>&1; then pass "nemoclaw CLI installed" else fail "nemoclaw CLI not found — cannot continue" @@ -102,7 +117,7 @@ ONBOARD_LOG="$(mktemp)" NEMOCLAW_NON_INTERACTIVE=1 \ NEMOCLAW_SANDBOX_NAME="$SANDBOX_A" \ NEMOCLAW_POLICY_MODE=skip \ - nemoclaw onboard --non-interactive > "$ONBOARD_LOG" 2>&1 + nemoclaw onboard --non-interactive >"$ONBOARD_LOG" 2>&1 exit1=$? output1="$(cat "$ONBOARD_LOG")" rm -f "$ONBOARD_LOG" @@ -122,7 +137,7 @@ openshell gateway info -g nemoclaw 2>/dev/null | grep -q "nemoclaw" \ && pass "Gateway is still running (stale state)" \ || fail "Gateway is not running after first onboard" -openshell sandbox get "$SANDBOX_A" > /dev/null 2>&1 \ +openshell sandbox get "$SANDBOX_A" >/dev/null 2>&1 \ && pass "Sandbox '$SANDBOX_A' exists in openshell" \ || fail "Sandbox '$SANDBOX_A' not found in openshell" @@ -143,7 +158,7 @@ NEMOCLAW_NON_INTERACTIVE=1 \ NEMOCLAW_SANDBOX_NAME="$SANDBOX_A" \ NEMOCLAW_RECREATE_SANDBOX=1 \ NEMOCLAW_POLICY_MODE=skip \ - nemoclaw onboard --non-interactive > "$ONBOARD_LOG" 2>&1 + nemoclaw onboard --non-interactive >"$ONBOARD_LOG" 2>&1 exit2=$? output2="$(cat "$ONBOARD_LOG")" rm -f "$ONBOARD_LOG" @@ -185,7 +200,7 @@ ONBOARD_LOG="$(mktemp)" NEMOCLAW_NON_INTERACTIVE=1 \ NEMOCLAW_SANDBOX_NAME="$SANDBOX_B" \ NEMOCLAW_POLICY_MODE=skip \ - nemoclaw onboard --non-interactive > "$ONBOARD_LOG" 2>&1 + nemoclaw onboard --non-interactive >"$ONBOARD_LOG" 2>&1 exit3=$? output3="$(cat "$ONBOARD_LOG")" rm -f "$ONBOARD_LOG" @@ -224,11 +239,11 @@ openshell sandbox delete "$SANDBOX_B" 2>/dev/null || true openshell forward stop 18789 2>/dev/null || true openshell gateway destroy -g nemoclaw 2>/dev/null || true -openshell sandbox get "$SANDBOX_A" > /dev/null 2>&1 \ +openshell sandbox get "$SANDBOX_A" >/dev/null 2>&1 \ && fail "Sandbox '$SANDBOX_A' still exists after cleanup" \ || pass "Sandbox '$SANDBOX_A' cleaned up" -openshell sandbox get "$SANDBOX_B" > /dev/null 2>&1 \ +openshell sandbox get "$SANDBOX_B" >/dev/null 2>&1 \ && fail "Sandbox '$SANDBOX_B' still exists after cleanup" \ || pass "Sandbox '$SANDBOX_B' cleaned up" diff --git a/test/e2e/test-full-e2e.sh b/test/e2e/test-full-e2e.sh old mode 100644 new mode 100755 From 103f64477d0670df74c32c1fa91b34120271f94f Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 10:07:36 +0200 Subject: [PATCH 13/20] chore: add explicit priority to all prek hooks commitlint, tsc-check, and pyright-check were missing priority. Set all three to priority 10 (validation tier). --- .pre-commit-config.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8269c4fa..37973836 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -174,6 +174,7 @@ repos: - id: commitlint stages: [commit-msg] additional_dependencies: ["@commitlint/config-conventional@20"] + priority: 10 # ── pre-push hooks ───────────────────────────────────────────────────────── - repo: local @@ -185,6 +186,7 @@ repos: pass_filenames: false always_run: true stages: [pre-push] + priority: 10 - id: pyright-check name: Pyright (nemoclaw-blueprint) @@ -193,6 +195,7 @@ repos: pass_filenames: false always_run: true stages: [pre-push] + priority: 10 default_language_version: python: python3 From 3356c4992cbb52df08cd7d795d00aef82ffc7588 Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 10:07:43 +0200 Subject: [PATCH 14/20] chore: apply shfmt formatting to all shell scripts MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Auto-formatted by shfmt -i 2 -ci -bn to pass the prek shfmt hook. No logic changes — indentation and spacing only. --- nemoclaw-blueprint/migrations/snapshot.py | 0 scripts/check-spdx-headers.sh | 4 +- scripts/fix-coredns.sh | 6 +- scripts/install.sh | 73 ++++++++++++----------- scripts/lib/runtime.sh | 15 +++-- scripts/nemoclaw-start.sh | 9 +-- scripts/setup.sh | 27 +++++---- scripts/smoke-macos-install.sh | 21 ++++--- scripts/start-services.sh | 23 +++---- scripts/walkthrough.sh | 7 ++- test/e2e-test.sh | 17 +++--- uninstall.sh | 69 +++++++++++---------- 12 files changed, 148 insertions(+), 123 deletions(-) mode change 100644 => 100755 nemoclaw-blueprint/migrations/snapshot.py mode change 100644 => 100755 scripts/install.sh mode change 100644 => 100755 scripts/lib/runtime.sh mode change 100644 => 100755 scripts/smoke-macos-install.sh diff --git a/nemoclaw-blueprint/migrations/snapshot.py b/nemoclaw-blueprint/migrations/snapshot.py old mode 100644 new mode 100755 diff --git a/scripts/check-spdx-headers.sh b/scripts/check-spdx-headers.sh index e67d0d49..4f56378f 100755 --- a/scripts/check-spdx-headers.sh +++ b/scripts/check-spdx-headers.sh @@ -13,11 +13,11 @@ LICENSE="SPDX-License-Identifier: Apache-2.0" failed=0 for file in "$@"; do file_head="$(head -n 5 -- "$file")" - if ! grep -Fq "$COPYRIGHT" <<< "$file_head"; then + if ! grep -Fq "$COPYRIGHT" <<<"$file_head"; then echo "Missing SPDX-FileCopyrightText: $file" failed=1 fi - if ! grep -Fq "$LICENSE" <<< "$file_head"; then + if ! grep -Fq "$LICENSE" <<<"$file_head"; then echo "Missing SPDX-License-Identifier: $file" failed=1 fi diff --git a/scripts/fix-coredns.sh b/scripts/fix-coredns.sh index 512ba851..9b587ab3 100755 --- a/scripts/fix-coredns.sh +++ b/scripts/fix-coredns.sh @@ -57,11 +57,11 @@ fi echo "Patching CoreDNS to forward to $UPSTREAM_DNS..." -docker exec "$CLUSTER" kubectl patch configmap coredns -n kube-system --type merge -p "{\"data\":{\"Corefile\":\".:53 {\\n errors\\n health\\n ready\\n kubernetes cluster.local in-addr.arpa ip6.arpa {\\n pods insecure\\n fallthrough in-addr.arpa ip6.arpa\\n }\\n hosts /etc/coredns/NodeHosts {\\n ttl 60\\n reload 15s\\n fallthrough\\n }\\n prometheus :9153\\n cache 30\\n loop\\n reload\\n loadbalance\\n forward . $UPSTREAM_DNS\\n}\\n\"}}" > /dev/null +docker exec "$CLUSTER" kubectl patch configmap coredns -n kube-system --type merge -p "{\"data\":{\"Corefile\":\".:53 {\\n errors\\n health\\n ready\\n kubernetes cluster.local in-addr.arpa ip6.arpa {\\n pods insecure\\n fallthrough in-addr.arpa ip6.arpa\\n }\\n hosts /etc/coredns/NodeHosts {\\n ttl 60\\n reload 15s\\n fallthrough\\n }\\n prometheus :9153\\n cache 30\\n loop\\n reload\\n loadbalance\\n forward . $UPSTREAM_DNS\\n}\\n\"}}" >/dev/null -docker exec "$CLUSTER" kubectl rollout restart deploy/coredns -n kube-system > /dev/null +docker exec "$CLUSTER" kubectl rollout restart deploy/coredns -n kube-system >/dev/null echo "CoreDNS patched. Waiting for rollout..." -docker exec "$CLUSTER" kubectl rollout status deploy/coredns -n kube-system --timeout=30s > /dev/null +docker exec "$CLUSTER" kubectl rollout status deploy/coredns -n kube-system --timeout=30s >/dev/null echo "Done. DNS should resolve in ~10 seconds." diff --git a/scripts/install.sh b/scripts/install.sh old mode 100644 new mode 100755 index 9e3dc1a2..c7594023 --- a/scripts/install.sh +++ b/scripts/install.sh @@ -14,9 +14,12 @@ GREEN='\033[0;32m' YELLOW='\033[1;33m' NC='\033[0m' -info() { echo -e "${GREEN}[install]${NC} $1"; } -warn() { echo -e "${YELLOW}[install]${NC} $1"; } -fail() { echo -e "${RED}[install]${NC} $1"; exit 1; } +info() { echo -e "${GREEN}[install]${NC} $1"; } +warn() { echo -e "${YELLOW}[install]${NC} $1"; } +fail() { + echo -e "${RED}[install]${NC} $1" + exit 1 +} define_runtime_helpers() { socket_exists() { @@ -37,8 +40,7 @@ define_runtime_helpers() { for socket_path in \ "$home_dir/.colima/default/docker.sock" \ - "$home_dir/.config/colima/default/docker.sock" - do + "$home_dir/.config/colima/default/docker.sock"; do if socket_exists "$socket_path"; then printf '%s\n' "$socket_path" return 0 @@ -115,7 +117,7 @@ refresh_path() { npm_bin="$(npm config get prefix 2>/dev/null)/bin" || true if [ -n "$npm_bin" ] && [ -d "$npm_bin" ]; then case ":$PATH:" in - *":$npm_bin:"*) ;; # already on PATH + *":$npm_bin:"*) ;; # already on PATH *) export PATH="$npm_bin:$PATH" ;; esac fi @@ -131,14 +133,14 @@ ARCH="$(uname -m)" case "$OS" in Darwin) OS_LABEL="macOS" ;; - Linux) OS_LABEL="Linux" ;; - *) fail "Unsupported OS: $OS" ;; + Linux) OS_LABEL="Linux" ;; + *) fail "Unsupported OS: $OS" ;; esac case "$ARCH" in - x86_64|amd64) ARCH_LABEL="x86_64" ;; - aarch64|arm64) ARCH_LABEL="aarch64" ;; - *) fail "Unsupported architecture: $ARCH" ;; + x86_64 | amd64) ARCH_LABEL="x86_64" ;; + aarch64 | arm64) ARCH_LABEL="aarch64" ;; + *) fail "Unsupported architecture: $ARCH" ;; esac info "Detected $OS_LABEL ($ARCH_LABEL)" @@ -148,16 +150,16 @@ info "Detected $OS_LABEL ($ARCH_LABEL)" NODE_MGR="none" NEED_RESHIM=false -if command -v asdf > /dev/null 2>&1 && asdf plugin list 2>/dev/null | grep -q nodejs; then +if command -v asdf >/dev/null 2>&1 && asdf plugin list 2>/dev/null | grep -q nodejs; then NODE_MGR="asdf" elif [ -n "${NVM_DIR:-}" ] && [ -s "${NVM_DIR}/nvm.sh" ]; then NODE_MGR="nvm" elif [ -s "$HOME/.nvm/nvm.sh" ]; then export NVM_DIR="$HOME/.nvm" NODE_MGR="nvm" -elif command -v fnm > /dev/null 2>&1; then +elif command -v fnm >/dev/null 2>&1; then NODE_MGR="fnm" -elif command -v brew > /dev/null 2>&1 && [ "$OS" = "Darwin" ]; then +elif command -v brew >/dev/null 2>&1 && [ "$OS" = "Darwin" ]; then NODE_MGR="brew" elif [ "$OS" = "Linux" ]; then NODE_MGR="nodesource" @@ -170,8 +172,8 @@ version_major() { } ensure_supported_runtime() { - command -v node > /dev/null 2>&1 || fail "${RUNTIME_REQUIREMENT_MSG} Node.js was not found on PATH." - command -v npm > /dev/null 2>&1 || fail "${RUNTIME_REQUIREMENT_MSG} npm was not found on PATH." + command -v node >/dev/null 2>&1 || fail "${RUNTIME_REQUIREMENT_MSG} Node.js was not found on PATH." + command -v npm >/dev/null 2>&1 || fail "${RUNTIME_REQUIREMENT_MSG} npm was not found on PATH." local node_version npm_version node_major npm_major node_version="$(node -v 2>/dev/null || true)" @@ -182,7 +184,7 @@ ensure_supported_runtime() { [[ "$node_major" =~ ^[0-9]+$ ]] || fail "Could not determine Node.js version from '${node_version}'. ${RUNTIME_REQUIREMENT_MSG}" [[ "$npm_major" =~ ^[0-9]+$ ]] || fail "Could not determine npm version from '${npm_version}'. ${RUNTIME_REQUIREMENT_MSG}" - if (( node_major < MIN_NODE_MAJOR || npm_major < MIN_NPM_MAJOR )); then + if ((node_major < MIN_NODE_MAJOR || npm_major < MIN_NPM_MAJOR)); then fail "Unsupported runtime detected: Node.js ${node_version:-unknown}, npm ${npm_version:-unknown}. ${RUNTIME_REQUIREMENT_MSG} Upgrade Node.js and rerun the installer." fi @@ -193,7 +195,7 @@ ensure_supported_runtime() { install_node() { local current_major="" - if command -v node > /dev/null 2>&1; then + if command -v node >/dev/null 2>&1; then current_major="$(node -v 2>/dev/null | sed 's/^v//' | cut -d. -f1)" fi @@ -230,8 +232,8 @@ install_node() { brew link --overwrite node@22 2>/dev/null || true ;; nodesource) - curl -fsSL https://deb.nodesource.com/setup_22.x | sudo -E bash - > /dev/null 2>&1 - sudo apt-get install -y -qq nodejs > /dev/null 2>&1 + curl -fsSL https://deb.nodesource.com/setup_22.x | sudo -E bash - >/dev/null 2>&1 + sudo apt-get install -y -qq nodejs >/dev/null 2>&1 ;; none) fail "No Node.js version manager found. Install Node.js 22 manually, then re-run." @@ -247,12 +249,12 @@ ensure_supported_runtime # ── Install Docker ─────────────────────────────────────────────── install_docker() { - if command -v docker > /dev/null 2>&1 && docker info > /dev/null 2>&1; then + if command -v docker >/dev/null 2>&1 && docker info >/dev/null 2>&1; then info "Docker already running" return 0 fi - if command -v docker > /dev/null 2>&1; then + if command -v docker >/dev/null 2>&1; then # Docker installed but not running if [ "$OS" = "Darwin" ]; then local colima_socket="" @@ -272,7 +274,7 @@ install_docker() { fail "Docker Desktop appears to be installed but is not running. Start Docker Desktop and re-run." fi - if command -v colima > /dev/null 2>&1; then + if command -v colima >/dev/null 2>&1; then info "Starting Colima..." colima start return 0 @@ -285,7 +287,7 @@ install_docker() { case "$OS" in Darwin) - if ! command -v brew > /dev/null 2>&1; then + if ! command -v brew >/dev/null 2>&1; then fail "Homebrew required to install Docker on macOS. Install from https://brew.sh" fi info "Installing Colima + Docker CLI via Homebrew..." @@ -294,14 +296,14 @@ install_docker() { colima start ;; Linux) - sudo apt-get update -qq > /dev/null 2>&1 - sudo apt-get install -y -qq docker.io > /dev/null 2>&1 + sudo apt-get update -qq >/dev/null 2>&1 + sudo apt-get install -y -qq docker.io >/dev/null 2>&1 sudo usermod -aG docker "$(whoami)" info "Docker installed. You may need to log out and back in for group changes." ;; esac - if ! docker info > /dev/null 2>&1; then + if ! docker info >/dev/null 2>&1; then fail "Docker installed but not running. Start Docker and re-run." fi @@ -313,7 +315,7 @@ install_docker # ── Install OpenShell CLI binary ───────────────────────────────── install_openshell() { - if command -v openshell > /dev/null 2>&1; then + if command -v openshell >/dev/null 2>&1; then info "openshell already installed: $(openshell --version 2>&1 || echo 'unknown')" return 0 fi @@ -323,20 +325,20 @@ install_openshell() { case "$OS" in Darwin) case "$ARCH_LABEL" in - x86_64) ASSET="openshell-x86_64-apple-darwin.tar.gz" ;; + x86_64) ASSET="openshell-x86_64-apple-darwin.tar.gz" ;; aarch64) ASSET="openshell-aarch64-apple-darwin.tar.gz" ;; esac ;; Linux) case "$ARCH_LABEL" in - x86_64) ASSET="openshell-x86_64-unknown-linux-musl.tar.gz" ;; + x86_64) ASSET="openshell-x86_64-unknown-linux-musl.tar.gz" ;; aarch64) ASSET="openshell-aarch64-unknown-linux-musl.tar.gz" ;; esac ;; esac tmpdir="$(mktemp -d)" - if command -v gh > /dev/null 2>&1; then + if command -v gh >/dev/null 2>&1; then GH_TOKEN="${GITHUB_TOKEN:-}" gh release download --repo NVIDIA/OpenShell \ --pattern "$ASSET" --dir "$tmpdir" else @@ -377,13 +379,12 @@ pre_extract_openclaw() { info "Pre-extracting openclaw@${openclaw_version} with system tar (GH-503 workaround)…" local tmpdir tmpdir="$(mktemp -d)" - if npm pack "openclaw@${openclaw_version}" --pack-destination "$tmpdir" > /dev/null 2>&1; then + if npm pack "openclaw@${openclaw_version}" --pack-destination "$tmpdir" >/dev/null 2>&1; then local tgz tgz="$(find "$tmpdir" -maxdepth 1 -name 'openclaw-*.tgz' -print -quit)" if [ -n "$tgz" ] && [ -f "$tgz" ]; then if mkdir -p "${install_dir}/node_modules/openclaw" \ - && tar xzf "$tgz" -C "${install_dir}/node_modules/openclaw" --strip-components=1 - then + && tar xzf "$tgz" -C "${install_dir}/node_modules/openclaw" --strip-components=1; then info "openclaw pre-extracted successfully" else warn "Failed to extract openclaw tarball" @@ -431,12 +432,12 @@ refresh_path # ── Verify ─────────────────────────────────────────────────────── -if ! command -v nemoclaw > /dev/null 2>&1; then +if ! command -v nemoclaw >/dev/null 2>&1; then # Try refreshing PATH one more time refresh_path fi -if ! command -v nemoclaw > /dev/null 2>&1; then +if ! command -v nemoclaw >/dev/null 2>&1; then npm_bin="$(npm config get prefix 2>/dev/null)/bin" || true if [ -n "$npm_bin" ] && [ -x "$npm_bin/nemoclaw" ]; then warn "nemoclaw installed at $npm_bin/nemoclaw but not on current PATH." diff --git a/scripts/lib/runtime.sh b/scripts/lib/runtime.sh old mode 100644 new mode 100755 index 3bf54684..a6bba65f --- a/scripts/lib/runtime.sh +++ b/scripts/lib/runtime.sh @@ -20,8 +20,7 @@ find_colima_docker_socket() { for socket_path in \ "$home_dir/.colima/default/docker.sock" \ - "$home_dir/.config/colima/default/docker.sock" - do + "$home_dir/.config/colima/default/docker.sock"; do if socket_exists "$socket_path"; then printf '%s\n' "$socket_path" return 0 @@ -69,7 +68,7 @@ docker_host_runtime() { local docker_host="${1:-${DOCKER_HOST:-}}" case "$docker_host" in - unix://*"/.colima/default/docker.sock"|unix://*"/.config/colima/default/docker.sock") + unix://*"/.colima/default/docker.sock" | unix://*"/.config/colima/default/docker.sock") printf 'colima\n' ;; unix://*"/.docker/run/docker.sock") @@ -89,7 +88,7 @@ infer_container_runtime_from_info() { local normalized normalized="$(printf '%s' "$info" | tr '[:upper:]' '[:lower:]')" - if [[ -z "${normalized// }" ]]; then + if [[ -z "${normalized// /}" ]]; then printf 'unknown\n' elif [[ "$normalized" == *podman* ]]; then printf 'podman\n' @@ -128,13 +127,13 @@ first_non_loopback_nameserver() { } get_colima_vm_nameserver() { - if ! command -v colima > /dev/null 2>&1; then + if ! command -v colima >/dev/null 2>&1; then return 1 fi local profile="${COLIMA_PROFILE:-default}" local resolv_conf - resolv_conf="$(colima ssh --profile "$profile" -- cat /etc/resolv.conf < /dev/null 2>/dev/null || true)" + resolv_conf="$(colima ssh --profile "$profile" -- cat /etc/resolv.conf /dev/null || true)" first_non_loopback_nameserver "$resolv_conf" } @@ -217,10 +216,10 @@ check_local_provider_health() { case "$provider" in vllm-local) - curl -sf http://localhost:8000/v1/models > /dev/null 2>&1 + curl -sf http://localhost:8000/v1/models >/dev/null 2>&1 ;; ollama-local) - curl -sf http://localhost:11434/api/tags > /dev/null 2>&1 + curl -sf http://localhost:11434/api/tags >/dev/null 2>&1 ;; *) return 1 diff --git a/scripts/nemoclaw-start.sh b/scripts/nemoclaw-start.sh index d28b9637..05350642 100755 --- a/scripts/nemoclaw-start.sh +++ b/scripts/nemoclaw-start.sh @@ -40,7 +40,8 @@ PYAUTH print_dashboard_urls() { local token chat_ui_base local_url remote_url - token="$(python3 - <<'PYTOKEN' + token="$( + python3 - <<'PYTOKEN' import json import os path = os.path.expanduser('~/.openclaw/openclaw.json') @@ -51,7 +52,7 @@ except Exception: else: print(cfg.get('gateway', {}).get('auth', {}).get('token', '')) PYTOKEN -)" + )" chat_ui_base="${CHAT_UI_URL%/}" local_url="http://127.0.0.1:${PUBLIC_PORT}/" @@ -66,7 +67,7 @@ PYTOKEN } start_auto_pair() { - nohup python3 - <<'PYAUTOPAIR' >> /tmp/gateway.log 2>&1 & + nohup python3 - <<'PYAUTOPAIR' >>/tmp/gateway.log 2>&1 & import json import subprocess import time @@ -136,7 +137,7 @@ if [ ${#NEMOCLAW_CMD[@]} -gt 0 ]; then exec "${NEMOCLAW_CMD[@]}" fi -nohup openclaw gateway run > /tmp/gateway.log 2>&1 & +nohup openclaw gateway run >/tmp/gateway.log 2>&1 & echo "[gateway] openclaw gateway launched (pid $!)" start_auto_pair print_dashboard_urls diff --git a/scripts/setup.sh b/scripts/setup.sh index 22b3ccfe..6aeb6808 100755 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -36,7 +36,10 @@ REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)" info() { echo -e "${GREEN}>>>${NC} $1"; } warn() { echo -e "${YELLOW}>>>${NC} $1"; } -fail() { echo -e "${RED}>>>${NC} $1"; exit 1; } +fail() { + echo -e "${RED}>>>${NC} $1" + exit 1 +} upsert_provider() { local name="$1" @@ -49,7 +52,7 @@ upsert_provider() { --config "$config" 2>&1 | grep -q "AlreadyExists"; then openshell provider update "$name" \ --credential "$credential" \ - --config "$config" > /dev/null + --config "$config" >/dev/null info "Updated $name provider" else info "Created $name provider" @@ -78,8 +81,8 @@ if docker_host="$(detect_docker_host)"; then fi # Check prerequisites -command -v openshell > /dev/null || fail "openshell CLI not found. Install the binary from https://github.com/NVIDIA/OpenShell/releases" -command -v docker > /dev/null || fail "docker not found" +command -v openshell >/dev/null || fail "openshell CLI not found. Install the binary from https://github.com/NVIDIA/OpenShell/releases" +command -v docker >/dev/null || fail "docker not found" [ -n "${NVIDIA_API_KEY:-}" ] || fail "NVIDIA_API_KEY not set. Get one from build.nvidia.com" CONTAINER_RUNTIME="$(infer_container_runtime_from_info "$(docker info 2>/dev/null || true)")" @@ -105,9 +108,9 @@ fi # 1. Gateway — always start fresh to avoid stale state info "Starting OpenShell gateway..." -openshell gateway destroy -g nemoclaw > /dev/null 2>&1 || true +openshell gateway destroy -g nemoclaw >/dev/null 2>&1 || true GATEWAY_ARGS=(--name nemoclaw) -command -v nvidia-smi > /dev/null 2>&1 && GATEWAY_ARGS+=(--gpu) +command -v nvidia-smi >/dev/null 2>&1 && GATEWAY_ARGS+=(--gpu) openshell gateway start "${GATEWAY_ARGS[@]}" 2>&1 | grep -E "Gateway|✓|Error|error" || true # Verify gateway is actually healthy (may need a moment after start) @@ -148,15 +151,15 @@ fi # 4a. Ollama (macOS local inference) if [ "$(uname -s)" = "Darwin" ]; then - if ! command -v ollama > /dev/null 2>&1; then + if ! command -v ollama >/dev/null 2>&1; then info "Installing Ollama..." brew install ollama 2>/dev/null || warn "Ollama install failed (brew required). Install manually: https://ollama.com" fi - if command -v ollama > /dev/null 2>&1; then + if command -v ollama >/dev/null 2>&1; then # Start Ollama service if not running if ! check_local_provider_health "ollama-local"; then info "Starting Ollama service..." - OLLAMA_HOST=0.0.0.0:11434 ollama serve > /dev/null 2>&1 & + OLLAMA_HOST=0.0.0.0:11434 ollama serve >/dev/null 2>&1 & sleep 2 fi OLLAMA_LOCAL_BASE_URL="$(get_local_provider_base_url "ollama-local")" @@ -170,11 +173,11 @@ fi # 4b. Inference route — default to nvidia-nim info "Setting inference route to nvidia-nim / Nemotron 3 Super..." -openshell inference set --no-verify --provider nvidia-nim --model nvidia/nemotron-3-super-120b-a12b > /dev/null 2>&1 +openshell inference set --no-verify --provider nvidia-nim --model nvidia/nemotron-3-super-120b-a12b >/dev/null 2>&1 # 5. Build and create sandbox info "Deleting old ${SANDBOX_NAME} sandbox (if any)..." -openshell sandbox delete "$SANDBOX_NAME" > /dev/null 2>&1 || true +openshell sandbox delete "$SANDBOX_NAME" >/dev/null 2>&1 || true info "Building and creating NemoClaw sandbox (this takes a few minutes on first run)..." @@ -192,7 +195,7 @@ CREATE_LOG=$(mktemp /tmp/nemoclaw-create-XXXXXX.log) set +e openshell sandbox create --from "$BUILD_CTX/Dockerfile" --name "$SANDBOX_NAME" \ --provider nvidia-nim \ - -- env NVIDIA_API_KEY="$NVIDIA_API_KEY" > "$CREATE_LOG" 2>&1 + -- env NVIDIA_API_KEY="$NVIDIA_API_KEY" >"$CREATE_LOG" 2>&1 CREATE_RC=$? set -e rm -rf "$BUILD_CTX" diff --git a/scripts/smoke-macos-install.sh b/scripts/smoke-macos-install.sh old mode 100644 new mode 100755 index 443ef86a..9dfd3d0a --- a/scripts/smoke-macos-install.sh +++ b/scripts/smoke-macos-install.sh @@ -14,7 +14,10 @@ NC='\033[0m' info() { echo -e "${GREEN}[smoke]${NC} $1"; } warn() { echo -e "${YELLOW}[smoke]${NC} $1"; } -fail() { echo -e "${RED}[smoke]${NC} $1"; exit 1; } +fail() { + echo -e "${RED}[smoke]${NC} $1" + exit 1 +} SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)" @@ -97,7 +100,7 @@ while [ $# -gt 0 ]; do DELETE_MODELS=true shift ;; - -h|--help) + -h | --help) usage exit 0 ;; @@ -151,7 +154,7 @@ ensure_clean_start() { fail "Existing NemoClaw/OpenShell state detected. Re-run with --allow-existing-state if you really want to test on this machine." fi - if command -v openshell > /dev/null 2>&1; then + if command -v openshell >/dev/null 2>&1; then if openshell sandbox list 2>/dev/null | grep -Eq '[[:alnum:]]'; then fail "Existing OpenShell sandboxes detected. Re-run with --allow-existing-state only if you are prepared for uninstall.sh to remove them." fi @@ -173,12 +176,12 @@ feed_install_answers() { done printf 'n\n' - ) > "$answers_pipe" + ) >"$answers_pipe" } start_log_follow() { local logfile="$1" - : > "$logfile" + : >"$logfile" tail -n +1 -f "$logfile" & LOG_FOLLOW_PID=$! } @@ -198,7 +201,7 @@ run_install() { ANSWER_WRITER_PID=$! start_log_follow "$INSTALL_LOG" set +e - bash "$REPO_DIR/install.sh" < "$answers_pipe" >> "$INSTALL_LOG" 2>&1 + bash "$REPO_DIR/install.sh" <"$answers_pipe" >>"$INSTALL_LOG" 2>&1 INSTALL_STATUS=$? set -e stop_log_follow @@ -218,7 +221,7 @@ run_uninstall() { info "Running uninstall.sh for cleanup" start_log_follow "$UNINSTALL_LOG" set +e - bash "$REPO_DIR/uninstall.sh" "${args[@]}" >> "$UNINSTALL_LOG" 2>&1 + bash "$REPO_DIR/uninstall.sh" "${args[@]}" >>"$UNINSTALL_LOG" 2>&1 UNINSTALL_STATUS=$? set -e stop_log_follow @@ -233,7 +236,7 @@ verify_cleanup() { leftovers=1 fi - if command -v openshell > /dev/null 2>&1; then + if command -v openshell >/dev/null 2>&1; then local sandbox_output sandbox_output="$(openshell sandbox list 2>/dev/null || true)" if printf '%s' "$sandbox_output" | grep -Eq '[[:alnum:]]'; then @@ -242,7 +245,7 @@ verify_cleanup() { fi fi - if command -v docker > /dev/null 2>&1 && docker info > /dev/null 2>&1; then + if command -v docker >/dev/null 2>&1 && docker info >/dev/null 2>&1; then local related_containers related_containers="$( docker ps -a --format '{{.Image}} {{.Names}}' 2>/dev/null \ diff --git a/scripts/start-services.sh b/scripts/start-services.sh index cbce0f18..303caf69 100755 --- a/scripts/start-services.sh +++ b/scripts/start-services.sh @@ -49,9 +49,12 @@ RED='\033[0;31m' YELLOW='\033[1;33m' NC='\033[0m' -info() { echo -e "${GREEN}[services]${NC} $1"; } -warn() { echo -e "${YELLOW}[services]${NC} $1"; } -fail() { echo -e "${RED}[services]${NC} $1"; exit 1; } +info() { echo -e "${GREEN}[services]${NC} $1"; } +warn() { echo -e "${YELLOW}[services]${NC} $1"; } +fail() { + echo -e "${RED}[services]${NC} $1" + exit 1 +} is_running() { local pidfile="$PIDDIR/$1.pid" @@ -68,8 +71,8 @@ start_service() { info "$name already running (PID $(cat "$PIDDIR/$name.pid"))" return 0 fi - nohup "$@" > "$PIDDIR/$name.log" 2>&1 & - echo $! > "$PIDDIR/$name.pid" + nohup "$@" >"$PIDDIR/$name.log" 2>&1 & + echo $! >"$PIDDIR/$name.pid" info "$name started (PID $!)" } @@ -127,10 +130,10 @@ do_start() { warn "Create a bot via @BotFather on Telegram and set the token." fi - command -v node > /dev/null || fail "node not found. Install Node.js first." + command -v node >/dev/null || fail "node not found. Install Node.js first." # Verify sandbox is running - if command -v openshell > /dev/null 2>&1; then + if command -v openshell >/dev/null 2>&1; then if ! openshell sandbox list 2>&1 | grep -q "Ready"; then warn "No sandbox in Ready state. Telegram bridge may not work until sandbox is running." fi @@ -145,7 +148,7 @@ do_start() { fi # 3. cloudflared tunnel - if command -v cloudflared > /dev/null 2>&1; then + if command -v cloudflared >/dev/null 2>&1; then start_service cloudflared \ cloudflared tunnel --url "http://localhost:$DASHBOARD_PORT" else @@ -194,7 +197,7 @@ do_start() { # Dispatch case "$ACTION" in - stop) do_stop ;; + stop) do_stop ;; status) show_status ;; - start) do_start ;; + start) do_start ;; esac diff --git a/scripts/walkthrough.sh b/scripts/walkthrough.sh index b50e24b0..70e5615e 100755 --- a/scripts/walkthrough.sh +++ b/scripts/walkthrough.sh @@ -43,7 +43,10 @@ set -euo pipefail -[ -n "${NVIDIA_API_KEY:-}" ] || { echo "NVIDIA_API_KEY required"; exit 1; } +[ -n "${NVIDIA_API_KEY:-}" ] || { + echo "NVIDIA_API_KEY required" + exit 1 +} echo "" echo " ┌─────────────────────────────────────────────────────┐" @@ -61,7 +64,7 @@ echo " │ \"Install requests and get the top HN story\" │" echo " └─────────────────────────────────────────────────────┘" echo "" -if ! command -v tmux > /dev/null 2>&1; then +if ! command -v tmux >/dev/null 2>&1; then echo "tmux not found. Run these in two separate terminals:" echo "" echo " Terminal 1 (TUI):" diff --git a/test/e2e-test.sh b/test/e2e-test.sh index 2ec70331..cf20c4e2 100755 --- a/test/e2e-test.sh +++ b/test/e2e-test.sh @@ -13,7 +13,10 @@ YELLOW='\033[1;33m' NC='\033[0m' pass() { echo -e "${GREEN}PASS${NC}: $1"; } -fail() { echo -e "${RED}FAIL${NC}: $1"; exit 1; } +fail() { + echo -e "${RED}FAIL${NC}: $1" + exit 1 +} info() { echo -e "${YELLOW}TEST${NC}: $1"; } # ------------------------------------------------------- @@ -25,12 +28,12 @@ openclaw --version && pass "OpenClaw CLI installed" || fail "OpenClaw CLI not fo info "2. Verify plugin can be installed" # ------------------------------------------------------- openclaw plugins install /opt/nemoclaw 2>&1 && pass "Plugin installed" || { - # If plugins install isn't available, verify the built artifacts exist - if [ -f /opt/nemoclaw/dist/index.js ]; then - pass "Plugin built successfully (dist/index.js exists)" - else - fail "Plugin build artifacts missing" - fi + # If plugins install isn't available, verify the built artifacts exist + if [ -f /opt/nemoclaw/dist/index.js ]; then + pass "Plugin built successfully (dist/index.js exists)" + else + fail "Plugin build artifacts missing" + fi } # ------------------------------------------------------- diff --git a/uninstall.sh b/uninstall.sh index 60d05673..4ad94911 100755 --- a/uninstall.sh +++ b/uninstall.sh @@ -21,9 +21,9 @@ set -euo pipefail # --------------------------------------------------------------------------- if [[ -z "${NO_COLOR:-}" && -t 1 ]]; then if [[ "${COLORTERM:-}" == "truecolor" || "${COLORTERM:-}" == "24bit" ]]; then - C_GREEN=$'\033[38;2;118;185;0m' # #76B900 — exact NVIDIA green + C_GREEN=$'\033[38;2;118;185;0m' # #76B900 — exact NVIDIA green else - C_GREEN=$'\033[38;5;148m' # closest 256-color on dark backgrounds + C_GREEN=$'\033[38;5;148m' # closest 256-color on dark backgrounds fi C_BOLD=$'\033[1m' C_DIM=$'\033[2m' @@ -36,12 +36,16 @@ fi info() { printf "${C_GREEN}[uninstall]${C_RESET} %s\n" "$*"; } warn() { printf "${C_YELLOW}[uninstall]${C_RESET} %s\n" "$*"; } -fail() { printf "${C_RED}[uninstall]${C_RESET} %s\n" "$*" >&2; exit 1; } -ok() { printf " ${C_GREEN}✓${C_RESET} %s\n" "$*"; } +fail() { + printf "${C_RED}[uninstall]${C_RESET} %s\n" "$*" >&2 + exit 1 +} +ok() { printf " ${C_GREEN}✓${C_RESET} %s\n" "$*"; } # spin "label" cmd [args...] — spinner wrapper, same as installer. spin() { - local msg="$1"; shift + local msg="$1" + shift if [[ ! -t 1 ]]; then info "$msg" @@ -49,7 +53,8 @@ spin() { return fi - local log; log=$(mktemp) + local log + log=$(mktemp) "$@" >"$log" 2>&1 & local pid=$! i=0 local frames=('⠋' '⠙' '⠹' '⠸' '⠼' '⠴' '⠦' '⠧' '⠇' '⠏') @@ -59,7 +64,8 @@ spin() { sleep 0.08 done - wait "$pid"; local status=$? + wait "$pid" + local status=$? if [[ $status -eq 0 ]]; then printf "\r ${C_GREEN}✓${C_RESET} %s\n" "$msg" else @@ -147,7 +153,7 @@ while [ $# -gt 0 ]; do DELETE_MODELS=true shift ;; - -h|--help) + -h | --help) usage exit 0 ;; @@ -184,15 +190,18 @@ confirm() { read -r reply || true fi case "$reply" in - y|Y|yes|YES) ;; - *) info "Aborted."; exit 0 ;; + y | Y | yes | YES) ;; + *) + info "Aborted." + exit 0 + ;; esac } run_optional() { local description="$1" shift - if "$@" > /dev/null 2>&1; then + if "$@" >/dev/null 2>&1; then info "$description" else warn "$description skipped" @@ -243,7 +252,7 @@ stop_helper_services() { } stop_openshell_forward_processes() { - if ! command -v pgrep > /dev/null 2>&1; then + if ! command -v pgrep >/dev/null 2>&1; then warn "pgrep not found; skipping local OpenShell forward process cleanup." return 0 fi @@ -261,7 +270,7 @@ stop_openshell_forward_processes() { fi for pid in "${pids[@]}"; do - if kill "$pid" > /dev/null 2>&1 || kill -9 "$pid" > /dev/null 2>&1; then + if kill "$pid" >/dev/null 2>&1 || kill -9 "$pid" >/dev/null 2>&1; then info "Stopped OpenShell forward process $pid" else warn "Failed to stop OpenShell forward process $pid" @@ -270,7 +279,7 @@ stop_openshell_forward_processes() { } remove_openshell_resources() { - if ! command -v openshell > /dev/null 2>&1; then + if ! command -v openshell >/dev/null 2>&1; then warn "openshell not found; skipping gateway/provider/sandbox cleanup." return 0 fi @@ -285,9 +294,9 @@ remove_openshell_resources() { } remove_nemoclaw_cli() { - if command -v npm > /dev/null 2>&1; then - npm unlink -g nemoclaw > /dev/null 2>&1 || true - if npm uninstall -g --loglevel=error nemoclaw > /dev/null 2>&1; then + if command -v npm >/dev/null 2>&1; then + npm unlink -g nemoclaw >/dev/null 2>&1 || true + if npm uninstall -g --loglevel=error nemoclaw >/dev/null 2>&1; then info "Removed global nemoclaw npm package" else warn "Global nemoclaw npm package not found or already removed" @@ -314,12 +323,12 @@ remove_nemoclaw_state() { } remove_related_docker_containers() { - if ! command -v docker > /dev/null 2>&1; then + if ! command -v docker >/dev/null 2>&1; then warn "docker not found; skipping Docker container cleanup." return 0 fi - if ! docker info > /dev/null 2>&1; then + if ! docker info >/dev/null 2>&1; then warn "docker is not running; skipping Docker container cleanup." return 0 fi @@ -351,7 +360,7 @@ remove_related_docker_containers() { local removed_any=false local container_id for container_id in "${container_ids[@]}"; do - if docker rm -f "$container_id" > /dev/null 2>&1; then + if docker rm -f "$container_id" >/dev/null 2>&1; then info "Removed Docker container $container_id" removed_any=true else @@ -365,12 +374,12 @@ remove_related_docker_containers() { } remove_related_docker_images() { - if ! command -v docker > /dev/null 2>&1; then + if ! command -v docker >/dev/null 2>&1; then warn "docker not found; skipping Docker image cleanup." return 0 fi - if ! docker info > /dev/null 2>&1; then + if ! docker info >/dev/null 2>&1; then warn "docker is not running; skipping Docker image cleanup." return 0 fi @@ -402,7 +411,7 @@ remove_related_docker_images() { local removed_any=false local image_id for image_id in "${image_ids[@]}"; do - if docker rmi -f "$image_id" > /dev/null 2>&1; then + if docker rmi -f "$image_id" >/dev/null 2>&1; then info "Removed Docker image $image_id" removed_any=true else @@ -422,12 +431,12 @@ gateway_volume_candidates() { } remove_related_docker_volumes() { - if ! command -v docker > /dev/null 2>&1; then + if ! command -v docker >/dev/null 2>&1; then warn "docker not found; skipping Docker volume cleanup." return 0 fi - if ! docker info > /dev/null 2>&1; then + if ! docker info >/dev/null 2>&1; then warn "docker is not running; skipping Docker volume cleanup." return 0 fi @@ -446,8 +455,8 @@ remove_related_docker_volumes() { local removed_any=false for volume_name in "${volume_names[@]}"; do - if docker volume inspect "$volume_name" > /dev/null 2>&1; then - if docker volume rm -f "$volume_name" > /dev/null 2>&1; then + if docker volume inspect "$volume_name" >/dev/null 2>&1; then + if docker volume rm -f "$volume_name" >/dev/null 2>&1; then info "Removed Docker volume $volume_name" removed_any=true else @@ -467,14 +476,14 @@ remove_optional_ollama_models() { return 0 fi - if ! command -v ollama > /dev/null 2>&1; then + if ! command -v ollama >/dev/null 2>&1; then warn "ollama not found; skipping model cleanup." return 0 fi local model for model in "${OLLAMA_MODELS[@]}"; do - if ollama rm "$model" > /dev/null 2>&1; then + if ollama rm "$model" >/dev/null 2>&1; then info "Removed Ollama model '$model'" else warn "Ollama model '$model' not found or already removed" @@ -495,7 +504,7 @@ remove_openshell_binary() { local removed=false local current_path="" - if command -v openshell > /dev/null 2>&1; then + if command -v openshell >/dev/null 2>&1; then current_path="$(command -v openshell)" fi From 8fd18ddc0ac7798137de6ce769bbe016188792a7 Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 10:27:41 +0200 Subject: [PATCH 15/20] fix(security): use mktemp for temp files, stop leaking API key in walkthrough - test-inference-local.sh, test-inference.sh: replace hardcoded /tmp/req.json with mktemp + trap cleanup (TOCTOU fix). - walkthrough.sh: print a placeholder instead of expanding the real NVIDIA_API_KEY in the tmux-fallback instructions. - Makefile: add comment clarifying lint-ts/lint-py are for targeted runs. --- Makefile | 1 + scripts/test-inference-local.sh | 6 ++++-- scripts/test-inference.sh | 6 ++++-- scripts/walkthrough.sh | 2 +- 4 files changed, 10 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 89330130..d7a4ddd7 100644 --- a/Makefile +++ b/Makefile @@ -6,6 +6,7 @@ check: lint: check +# Targeted subproject checks (not part of `make check` — use for focused runs). lint-ts: cd nemoclaw && npm run check diff --git a/scripts/test-inference-local.sh b/scripts/test-inference-local.sh index ebdfbcb0..9fddf8ec 100755 --- a/scripts/test-inference-local.sh +++ b/scripts/test-inference-local.sh @@ -3,5 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # Test inference.local routing through OpenShell provider (local vLLM) -echo '{"model":"nvidia/nemotron-3-nano-30b-a3b","messages":[{"role":"user","content":"say hello"}]}' >/tmp/req.json -curl -s https://inference.local/v1/chat/completions -H "Content-Type: application/json" -d @/tmp/req.json +TMPFILE=$(mktemp) +trap 'rm -f "$TMPFILE"' EXIT +echo '{"model":"nvidia/nemotron-3-nano-30b-a3b","messages":[{"role":"user","content":"say hello"}]}' >"$TMPFILE" +curl -s https://inference.local/v1/chat/completions -H "Content-Type: application/json" -d @"$TMPFILE" diff --git a/scripts/test-inference.sh b/scripts/test-inference.sh index 5553eaed..03b0f330 100755 --- a/scripts/test-inference.sh +++ b/scripts/test-inference.sh @@ -3,5 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # Test inference.local routing through OpenShell provider -echo '{"model":"nvidia/nemotron-3-super-120b-a12b","messages":[{"role":"user","content":"say hello"}]}' >/tmp/req.json -curl -s https://inference.local/v1/chat/completions -H "Content-Type: application/json" -d @/tmp/req.json +TMPFILE=$(mktemp) +trap 'rm -f "$TMPFILE"' EXIT +echo '{"model":"nvidia/nemotron-3-super-120b-a12b","messages":[{"role":"user","content":"say hello"}]}' >"$TMPFILE" +curl -s https://inference.local/v1/chat/completions -H "Content-Type: application/json" -d @"$TMPFILE" diff --git a/scripts/walkthrough.sh b/scripts/walkthrough.sh index 70e5615e..5a309429 100755 --- a/scripts/walkthrough.sh +++ b/scripts/walkthrough.sh @@ -72,7 +72,7 @@ if ! command -v tmux >/dev/null 2>&1; then echo "" echo " Terminal 2 (Agent):" echo " openshell sandbox connect nemoclaw" - echo " export NVIDIA_API_KEY=$NVIDIA_API_KEY" + echo ' export NVIDIA_API_KEY=' echo " nemoclaw-start" echo " openclaw agent --agent main --local --session-id live" exit 0 From c0aac9c3adffa42c656db54721f475ce1d60e6d3 Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 10:28:29 +0200 Subject: [PATCH 16/20] ci: consolidate install steps into a single block per job --- .github/workflows/pr.yaml | 29 ++++++++++++----------------- scripts/walkthrough.sh | 2 +- 2 files changed, 13 insertions(+), 18 deletions(-) diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index b6d345b4..8ebca759 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -36,18 +36,14 @@ jobs: - name: Install uv uses: astral-sh/setup-uv@v4 - - name: Install root dependencies - run: npm install --ignore-scripts - - - name: Install and build TypeScript plugin - working-directory: nemoclaw + - name: Install dependencies run: | - npm install - npm run build + npm install --ignore-scripts + cd nemoclaw && npm install + cd ../nemoclaw-blueprint && uv sync --extra dev - - name: Install Python dependencies - working-directory: nemoclaw-blueprint - run: uv sync --extra dev + - name: Build TypeScript plugin + run: cd nemoclaw && npm run build - name: Run all pre-commit hooks run: npx prek run --all-files @@ -68,14 +64,13 @@ jobs: node-version: "22" cache: npm - - name: Install root dependencies - run: npm install - - - name: Install and build TypeScript plugin - working-directory: nemoclaw + - name: Install dependencies run: | - npm install - npm run build + npm install --ignore-scripts + cd nemoclaw && npm install + + - name: Build TypeScript plugin + run: cd nemoclaw && npm run build - name: Run all unit tests with coverage run: npx vitest run --coverage diff --git a/scripts/walkthrough.sh b/scripts/walkthrough.sh index 5a309429..1acda809 100755 --- a/scripts/walkthrough.sh +++ b/scripts/walkthrough.sh @@ -72,7 +72,7 @@ if ! command -v tmux >/dev/null 2>&1; then echo "" echo " Terminal 2 (Agent):" echo " openshell sandbox connect nemoclaw" - echo ' export NVIDIA_API_KEY=' + echo ' export NVIDIA_API_KEY=' echo " nemoclaw-start" echo " openclaw agent --agent main --local --session-id live" exit 0 From 1771948ac4341a2a6b8b14716f0f66a3a966395d Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 10:36:46 +0200 Subject: [PATCH 17/20] fix(ci): switch hadolint from Docker to native binary hadolint-docker times out pulling ghcr.io images in CI. Use a local system hook with the binary installed via curl in the workflow. --- .github/workflows/pr.yaml | 11 +++++++---- .pre-commit-config.yaml | 10 +++++++--- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 8ebca759..c2615066 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -36,6 +36,12 @@ jobs: - name: Install uv uses: astral-sh/setup-uv@v4 + - name: Install hadolint + run: | + curl -fsSL -o /usr/local/bin/hadolint \ + "https://github.com/hadolint/hadolint/releases/download/v2.14.0/hadolint-Linux-x86_64" + chmod +x /usr/local/bin/hadolint + - name: Install dependencies run: | npm install --ignore-scripts @@ -45,10 +51,7 @@ jobs: - name: Build TypeScript plugin run: cd nemoclaw && npm run build - - name: Run all pre-commit hooks - run: npx prek run --all-files - - - name: Run pre-push hooks (tsc + pyright) + - name: Run all hooks (pre-commit + pre-push) run: npx prek run --all-files --stage pre-push test-unit: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 37973836..f9bf94dc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -133,10 +133,14 @@ repos: - --exclude=SC1091 priority: 10 - - repo: https://github.com/hadolint/hadolint - rev: v2.14.0 + - repo: local hooks: - - id: hadolint-docker + - id: hadolint + name: hadolint + entry: hadolint + language: system + files: (Dockerfile[^/]*|.*\.dockerfile)$ + types: [file] priority: 10 - repo: local From 8df98accd1e9a5e1138ef59367914ba71c73abdc Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 10:44:16 +0200 Subject: [PATCH 18/20] perf: enable tsc incremental caching for pre-push hook Add --incremental to tsc --noEmit so subsequent type-checks reuse the .tsbuildinfo cache (supported since TS 4.0). Gitignore the cache file. See: https://thoughtspile.github.io/2021/06/14/faster-pre-commit/ --- .github/workflows/pr.yaml | 7 +++++-- .gitignore | 1 + .pre-commit-config.yaml | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index c2615066..3befb203 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -38,8 +38,11 @@ jobs: - name: Install hadolint run: | - curl -fsSL -o /usr/local/bin/hadolint \ - "https://github.com/hadolint/hadolint/releases/download/v2.14.0/hadolint-Linux-x86_64" + HADOLINT_URL="https://github.com/hadolint/hadolint/releases/latest/download/hadolint-Linux-x86_64" + curl -fsSL -o /usr/local/bin/hadolint "$HADOLINT_URL" + EXPECTED=$(curl -fsSL "${HADOLINT_URL}.sha256" | awk '{print $1}') + ACTUAL=$(sha256sum /usr/local/bin/hadolint | awk '{print $1}') + [ "$EXPECTED" = "$ACTUAL" ] || { echo "::error::hadolint checksum mismatch"; exit 1; } chmod +x /usr/local/bin/hadolint - name: Install dependencies diff --git a/.gitignore b/.gitignore index 6561d6a9..9ddd809b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ # Build artifacts and caches *.pyc +*.tsbuildinfo .pytest_cache/ __pycache__/ coverage/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f9bf94dc..1d26ecbf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -185,7 +185,7 @@ repos: hooks: - id: tsc-check name: TypeScript type check (tsc --noEmit) - entry: bash -c 'cd nemoclaw && npx tsc --noEmit' + entry: bash -c 'cd nemoclaw && npx tsc --noEmit --incremental' language: system pass_filenames: false always_run: true From c689f093f8c2d8c77d55a009cee3ff6dcdcd5bef Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 10:49:21 +0200 Subject: [PATCH 19/20] ci: bump all GitHub Actions to latest major versions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - actions/checkout v4 → v6 - actions/setup-node v4 → v6 - actions/setup-python v5 → v6 - astral-sh/setup-uv v4 → v7 - actions/upload-artifact stays at v4 (latest) - rossjrw/pr-preview-action stays at v1 (latest) --- .github/workflows/commit-lint.yaml | 4 ++-- .github/workflows/docker-pin-check.yaml | 2 +- .github/workflows/docs-preview-pr.yaml | 6 +++--- .github/workflows/docs.yaml | 6 +++--- .github/workflows/nightly-e2e.yaml | 2 +- .github/workflows/pr.yaml | 14 +++++++------- 6 files changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/commit-lint.yaml b/.github/workflows/commit-lint.yaml index 03d259ea..dec431f7 100644 --- a/.github/workflows/commit-lint.yaml +++ b/.github/workflows/commit-lint.yaml @@ -20,12 +20,12 @@ jobs: timeout-minutes: 5 steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 - name: Setup Node.js - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version: "22" cache: npm diff --git a/.github/workflows/docker-pin-check.yaml b/.github/workflows/docker-pin-check.yaml index bc72381d..740a95ee 100644 --- a/.github/workflows/docker-pin-check.yaml +++ b/.github/workflows/docker-pin-check.yaml @@ -22,7 +22,7 @@ jobs: timeout-minutes: 5 steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Check Dockerfile base-image pin run: bash scripts/update-docker-pin.sh --check diff --git a/.github/workflows/docs-preview-pr.yaml b/.github/workflows/docs-preview-pr.yaml index 0c0ce821..e8b3c9be 100644 --- a/.github/workflows/docs-preview-pr.yaml +++ b/.github/workflows/docs-preview-pr.yaml @@ -28,17 +28,17 @@ jobs: timeout-minutes: 10 steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Set up Python if: github.event.action != 'closed' - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.11" - name: Install uv if: github.event.action != 'closed' - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@v7 - name: Install doc dependencies if: github.event.action != 'closed' diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index d59f31df..3b42ccfa 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -26,15 +26,15 @@ jobs: timeout-minutes: 10 steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.11" - name: Install uv - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@v7 - name: Install doc dependencies run: uv sync --group docs diff --git a/.github/workflows/nightly-e2e.yaml b/.github/workflows/nightly-e2e.yaml index 31a12219..903ec43e 100644 --- a/.github/workflows/nightly-e2e.yaml +++ b/.github/workflows/nightly-e2e.yaml @@ -29,7 +29,7 @@ jobs: timeout-minutes: 45 steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Run full E2E test env: diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 3befb203..d95796d6 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -20,21 +20,21 @@ jobs: timeout-minutes: 10 steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Setup Node.js - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version: "22" cache: npm - name: Setup Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.11" - name: Install uv - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@v7 - name: Install hadolint run: | @@ -62,10 +62,10 @@ jobs: timeout-minutes: 10 steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Setup Node.js - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version: "22" cache: npm @@ -89,7 +89,7 @@ jobs: timeout-minutes: 15 steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Build sandbox test image run: docker build -f test/Dockerfile.sandbox -t nemoclaw-sandbox-test . From 939c8f5308658c8fb1dedc7715003667f28339ab Mon Sep 17 00:00:00 2001 From: Ruben Hagege Date: Mon, 23 Mar 2026 11:02:37 +0200 Subject: [PATCH 20/20] docs: add hadolint to prerequisites in CONTRIBUTING.md --- CONTRIBUTING.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 58ca61f7..48c3dd0f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,6 +18,7 @@ Install the following before you begin. - Python 3.11+ (for blueprint and documentation builds) - Docker (running) - [uv](https://docs.astral.sh/uv/) (for Python dependency management) +- [hadolint](https://github.com/hadolint/hadolint) (Dockerfile linter — `brew install hadolint` on macOS) ## Getting Started