diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 70ecbf311f1..62b46bfd232 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -216,6 +216,7 @@ /.github/workflows/aiguard.yml @DataDog/asm-js /.github/workflows/appsec.yml @DataDog/asm-js /.github/workflows/debugger.yml @DataDog/debugger-nodejs +/.github/workflows/instrumentation.yml @DataDog/apm-idm-js /.github/workflows/serverless.yml @DataDog/serverless-aws @DataDog/apm-serverless /.github/workflows/llmobs.yml @DataDog/ml-observability /.github/workflows/profiling.yml @DataDog/profiling-js diff --git a/.github/actions/install/action.yml b/.github/actions/install/action.yml index 06f94076966..2c705f28ee1 100644 --- a/.github/actions/install/action.yml +++ b/.github/actions/install/action.yml @@ -9,7 +9,7 @@ runs: using: composite steps: - id: yarn-cache - uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4 + uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 with: key: yarn-cache-${{ github.workflow }}-${{ github.job }}-${{ hashFiles('yarn.lock') }}-v2 path: node_modules.tar diff --git a/.github/actions/install/branch-diff/action.yml b/.github/actions/install/branch-diff/action.yml index 21c253a87b3..1827f26d95f 100644 --- a/.github/actions/install/branch-diff/action.yml +++ b/.github/actions/install/branch-diff/action.yml @@ -7,7 +7,7 @@ inputs: runs: using: composite steps: - - uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4 + - uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 with: path: ~/.npm key: ${{ github.workflow }}-branch-diff-3.1.1 diff --git a/.github/actions/node/action.yml b/.github/actions/node/action.yml index 4740b96d530..a110ce68845 100644 --- a/.github/actions/node/action.yml +++ b/.github/actions/node/action.yml @@ -29,7 +29,7 @@ runs: id: cache-key shell: bash run: echo "block=$(( $(date -u +%s) / 1200 ))" >> "$GITHUB_OUTPUT" - - uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4 + - uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 id: node-version-cache with: path: /tmp/.node-resolved-version-${{ steps.node-version.outputs.version }} diff --git a/.github/actions/plugins/test-and-upstream/action.yml b/.github/actions/plugins/test-and-upstream/action.yml index 142b1dfbbc2..9ae3c79a2eb 100644 --- a/.github/actions/plugins/test-and-upstream/action.yml +++ b/.github/actions/plugins/test-and-upstream/action.yml @@ -1,12 +1,20 @@ name: Plugin and Upstream Tests description: Run plugin tests and upstream test suite +inputs: + node-floor: + description: 'Lower Node alias: oldest-maintenance-lts or newest-maintenance-lts.' + required: false + default: oldest-maintenance-lts runs: using: composite steps: - uses: ./.github/actions/dd-sts-api-key id: dd-sts - uses: ./.github/actions/testagent/start - - uses: ./.github/actions/node/oldest-maintenance-lts + - if: ${{ inputs.node-floor == 'oldest-maintenance-lts' }} + uses: ./.github/actions/node/oldest-maintenance-lts + - if: ${{ inputs.node-floor == 'newest-maintenance-lts' }} + uses: ./.github/actions/node/newest-maintenance-lts - uses: ./.github/actions/install - run: yarn test:plugins:ci shell: bash diff --git a/.github/actions/plugins/test/action.yml b/.github/actions/plugins/test/action.yml index 7dd54c55c54..d5f94b0b1a5 100644 --- a/.github/actions/plugins/test/action.yml +++ b/.github/actions/plugins/test/action.yml @@ -1,12 +1,20 @@ name: Plugin Tests description: Run plugin tests +inputs: + node-floor: + description: 'Lower Node alias: oldest-maintenance-lts or newest-maintenance-lts.' + required: false + default: oldest-maintenance-lts runs: using: composite steps: - uses: ./.github/actions/dd-sts-api-key id: dd-sts - uses: ./.github/actions/testagent/start - - uses: ./.github/actions/node/oldest-maintenance-lts + - if: ${{ inputs.node-floor == 'oldest-maintenance-lts' }} + uses: ./.github/actions/node/oldest-maintenance-lts + - if: ${{ inputs.node-floor == 'newest-maintenance-lts' }} + uses: ./.github/actions/node/newest-maintenance-lts - uses: ./.github/actions/install - run: yarn test:plugins:ci shell: bash diff --git a/.github/actions/plugins/upstream/action.yml b/.github/actions/plugins/upstream/action.yml index eb875f49895..7f5e1361821 100644 --- a/.github/actions/plugins/upstream/action.yml +++ b/.github/actions/plugins/upstream/action.yml @@ -1,12 +1,20 @@ name: Plugin Upstream Tests description: Run upstream test suite +inputs: + node-floor: + description: 'Lower Node alias: oldest-maintenance-lts or newest-maintenance-lts.' + required: false + default: oldest-maintenance-lts runs: using: composite steps: - uses: ./.github/actions/dd-sts-api-key id: dd-sts - uses: ./.github/actions/testagent/start - - uses: ./.github/actions/node/oldest-maintenance-lts + - if: ${{ inputs.node-floor == 'oldest-maintenance-lts' }} + uses: ./.github/actions/node/oldest-maintenance-lts + - if: ${{ inputs.node-floor == 'newest-maintenance-lts' }} + uses: ./.github/actions/node/newest-maintenance-lts - uses: ./.github/actions/install - run: yarn test:plugins:upstream shell: bash diff --git a/.github/actions/push_to_test_optimization/action.yml b/.github/actions/push_to_test_optimization/action.yml index 0553234be82..0a8ae1bd11f 100644 --- a/.github/actions/push_to_test_optimization/action.yml +++ b/.github/actions/push_to_test_optimization/action.yml @@ -17,4 +17,7 @@ runs: uses: DataDog/junit-upload-github-action@24449d01fc01e721fa36ccd2caa3caae6922f0e8 # v3.0.0 with: api_key: ${{ inputs.dd_api_key }} + # TODO: remove once https://github.com/DataDog/junit-upload-github-action/pull/54 lands + # and junit-upload-github-action releases are tied to datadog-ci releases. + datadog-ci-version: 5.13.1 service: dd-trace-js-tests diff --git a/.github/workflows/aiguard.yml b/.github/workflows/aiguard.yml index 6383a30a4dc..ca1a46c61d1 100644 --- a/.github/workflows/aiguard.yml +++ b/.github/workflows/aiguard.yml @@ -8,8 +8,8 @@ on: - cron: 0 4 * * * concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true env: MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} diff --git a/.github/workflows/all-green.yml b/.github/workflows/all-green.yml index 44ce6e371bb..21e5c82155e 100644 --- a/.github/workflows/all-green.yml +++ b/.github/workflows/all-green.yml @@ -8,8 +8,8 @@ on: - cron: 0 4 * * * concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true jobs: all-green: diff --git a/.github/workflows/apm-capabilities.yml b/.github/workflows/apm-capabilities.yml index 4a86bac99f6..7a1455df52c 100644 --- a/.github/workflows/apm-capabilities.yml +++ b/.github/workflows/apm-capabilities.yml @@ -14,8 +14,8 @@ on: type: string concurrency: - group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true env: MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} @@ -46,22 +46,21 @@ jobs: runs-on: ubuntu-latest permissions: id-token: write + strategy: + matrix: + node-version: [oldest, maintenance, active, latest] steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: ./.github/actions/dd-sts-api-key id: dd-sts - - uses: ./.github/actions/node/oldest-maintenance-lts + - uses: ./.github/actions/node + with: + version: ${{ matrix.node-version }} - uses: ./.github/actions/install - run: yarn test:trace:core:ci - - uses: ./.github/actions/node/newest-maintenance-lts - - run: yarn test:trace:core:ci - - uses: ./.github/actions/node/active-lts - - run: yarn test:trace:core:ci - - uses: ./.github/actions/node/latest - - run: yarn test:trace:core:ci - uses: ./.github/actions/coverage with: - flags: apm-capabilities-tracing-ubuntu + flags: apm-capabilities-tracing-ubuntu-${{ matrix.node-version }} dd_api_key: ${{ steps.dd-sts.outputs.api_key }} - uses: ./.github/actions/push_to_test_optimization if: "!cancelled()" diff --git a/.github/workflows/apm-integrations.yml b/.github/workflows/apm-integrations.yml index d13813e7d21..26466358c9d 100644 --- a/.github/workflows/apm-integrations.yml +++ b/.github/workflows/apm-integrations.yml @@ -14,8 +14,8 @@ on: type: string concurrency: - group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true env: MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} @@ -1106,27 +1106,9 @@ jobs: PLUGINS: pino steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/dd-sts-api-key - id: dd-sts - - uses: ./.github/actions/testagent/start - - uses: ./.github/actions/node/newest-maintenance-lts - - uses: ./.github/actions/install - - run: yarn test:plugins:ci - - uses: ./.github/actions/node/latest - - run: yarn test:plugins:ci - # - run: yarn test:plugins:upstream - - if: always() - uses: ./.github/actions/testagent/logs - with: - suffix: plugins-${{ github.job }} - - uses: ./.github/actions/coverage - with: - flags: apm-integrations-pino - dd_api_key: ${{ steps.dd-sts.outputs.api_key }} - - uses: ./.github/actions/push_to_test_optimization - if: "!cancelled()" + - uses: ./.github/actions/plugins/test with: - dd_api_key: ${{ steps.dd-sts.outputs.api_key }} + node-floor: newest-maintenance-lts passport-http: runs-on: ubuntu-latest diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index 21148617dfa..359b189f311 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -14,8 +14,8 @@ on: type: string concurrency: - group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true env: MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml index bc7ef7a94d3..d9c82d8d2e8 100644 --- a/.github/workflows/audit.yml +++ b/.github/workflows/audit.yml @@ -6,8 +6,8 @@ on: - dependabot/** concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true jobs: dependencies: diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 56bbe017670..450885671dd 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -38,7 +38,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1 + uses: github/codeql-action/init@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2 with: languages: ${{ matrix.language }} config-file: .github/codeql_config.yml @@ -48,7 +48,7 @@ jobs: # queries: ./path/to/local/query, your-org/your-repo/queries@main - name: Autobuild - uses: github/codeql-action/autobuild@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1 + uses: github/codeql-action/autobuild@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1 + uses: github/codeql-action/analyze@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2 diff --git a/.github/workflows/debugger.yml b/.github/workflows/debugger.yml index bcea3b25225..10cbf94beca 100644 --- a/.github/workflows/debugger.yml +++ b/.github/workflows/debugger.yml @@ -8,8 +8,8 @@ on: - cron: 0 4 * * * concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true env: MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} diff --git a/.github/workflows/eslint-rules.yml b/.github/workflows/eslint-rules.yml index 8aa059a83c5..c3836713e77 100644 --- a/.github/workflows/eslint-rules.yml +++ b/.github/workflows/eslint-rules.yml @@ -10,8 +10,8 @@ on: - "eslint-rules/**" concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true jobs: eslint-rules: diff --git a/.github/workflows/instrumentation.yml b/.github/workflows/instrumentation.yml new file mode 100644 index 00000000000..04355f7afe1 --- /dev/null +++ b/.github/workflows/instrumentation.yml @@ -0,0 +1,419 @@ +name: Instrumentation + +on: + pull_request: + push: + branches: [master, mq-working-branch-master-*] + schedule: + - cron: 0 4 * * * + workflow_dispatch: + inputs: + latest-version: + description: "Node version to use" + required: false + type: string + +concurrency: + group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref || github.run_id }} + cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + +env: + MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} + LATEST_VERSION: ${{ inputs.latest-version }} + +jobs: + esbuild: + runs-on: ubuntu-latest + permissions: + id-token: write + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/dd-sts-api-key + id: dd-sts + - uses: ./.github/actions/node/oldest-maintenance-lts + - uses: ./.github/actions/install + - run: yarn test:esbuild:ci + - uses: ./.github/actions/node/latest + - run: yarn test:esbuild:ci + - uses: ./.github/actions/coverage + with: + flags: platform-esbuild + dd_api_key: ${{ steps.dd-sts.outputs.api_key }} + + webpack: + runs-on: ubuntu-latest + permissions: + id-token: write + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/dd-sts-api-key + id: dd-sts + - uses: ./.github/actions/node/oldest-maintenance-lts + - uses: ./.github/actions/install + - run: yarn test:webpack:ci + - uses: ./.github/actions/node/latest + - run: yarn test:webpack:ci + - uses: ./.github/actions/coverage + with: + flags: platform-webpack + dd_api_key: ${{ steps.dd-sts.outputs.api_key }} + + instrumentation-bluebird: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: bluebird + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-body-parser: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: body-parser + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-child_process: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: child_process + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-cookie-parser: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: cookie-parser + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-express-mongo-sanitize: + runs-on: ubuntu-latest + permissions: + id-token: write + services: + mongodb: + image: circleci/mongo + ports: + - 27017:27017 + env: + PLUGINS: express-mongo-sanitize + SERVICES: mongo + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-express-session: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: express-session + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-express: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: express + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-fs: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: fs + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-generic-pool: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: generic-pool + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + # TODO: Retries below work around a flaky bug in Node.js http code. Revert to using + # ./.github/actions/instrumentations/test once fixed upstream. + instrumentation-http: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: http + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/dd-sts-api-key + id: dd-sts + - uses: ./.github/actions/node/oldest-maintenance-lts + - uses: ./.github/actions/install + - name: Run instrumentation tests (oldest-maintenance, with retries) + uses: nick-fields/retry@ad984534de44a9489a53aefd81eb77f87c70dc60 # v4.0.0 + with: + max_attempts: 5 + timeout_minutes: 15 + retry_wait_seconds: 20 + command: yarn test:instrumentations:ci + - uses: ./.github/actions/node/latest + - name: Run instrumentation tests (latest, with retries) + uses: nick-fields/retry@ad984534de44a9489a53aefd81eb77f87c70dc60 # v4.0.0 + with: + max_attempts: 5 + timeout_minutes: 15 + retry_wait_seconds: 20 + command: yarn test:instrumentations:ci + - uses: ./.github/actions/coverage + with: + flags: instrumentations-${{ github.job }} + dd_api_key: ${{ steps.dd-sts.outputs.api_key }} + - uses: ./.github/actions/push_to_test_optimization + if: "!cancelled()" + with: + dd_api_key: ${{ steps.dd-sts.outputs.api_key }} + + instrumentation-knex: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: knex + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-light-my-request: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: light-my-request + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-mongoose: + runs-on: ubuntu-latest + permissions: + id-token: write + services: + mongodb: + image: circleci/mongo + ports: + - 27017:27017 + env: + PLUGINS: mongoose + SERVICES: mongo + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-multer: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: multer + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-mysql2: + runs-on: ubuntu-latest + permissions: + id-token: write + services: + mysql: + image: mariadb:10.4 + env: + MYSQL_ALLOW_EMPTY_PASSWORD: "yes" + MYSQL_DATABASE: "db" + ports: + - 3306:3306 + env: + PLUGINS: mysql2 + SERVICES: mysql2 + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-passport: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: passport + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-passport-http: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: passport-http + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-passport-local: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: passport-local + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-pg: + runs-on: ubuntu-latest + permissions: + id-token: write + services: + postgres: + image: postgres:9.5 + env: + POSTGRES_PASSWORD: postgres + ports: + - 5432:5432 + env: + PG_TEST_NATIVE: "true" + PLUGINS: pg + SERVICES: postgres + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-promise-js: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: promise-js + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-promise: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: promise + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-q: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: q + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-url: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: url + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentation-when: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: when + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/instrumentations/test + + instrumentations-misc: + runs-on: ubuntu-latest + permissions: + id-token: write + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/dd-sts-api-key + id: dd-sts + - uses: ./.github/actions/testagent/start + - uses: ./.github/actions/node/oldest-maintenance-lts + - uses: ./.github/actions/install + - run: yarn test:instrumentations:misc:ci + shell: bash + - uses: ./.github/actions/node/newest-maintenance-lts + - run: yarn test:instrumentations:misc:ci + shell: bash + - uses: ./.github/actions/node/active-lts + - run: yarn test:instrumentations:misc:ci + shell: bash + - uses: ./.github/actions/node/latest + - run: yarn test:instrumentations:misc:ci + shell: bash + - uses: ./.github/actions/coverage + with: + flags: platform-instrumentations-misc + dd_api_key: ${{ steps.dd-sts.outputs.api_key }} + - if: always() + uses: ./.github/actions/testagent/logs + with: + suffix: test-${{ github.job }} + - uses: ./.github/actions/push_to_test_optimization + if: "!cancelled()" + with: + dd_api_key: ${{ steps.dd-sts.outputs.api_key }} + + integration-bundler: + strategy: + fail-fast: false + matrix: + version: [oldest, maintenance, active, latest] + bundler: [esbuild, webpack] + name: ${{ github.workflow }} / integration (node-${{ matrix.version }}) + runs-on: ubuntu-latest + permissions: + id-token: write + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/dd-sts-api-key + id: dd-sts + - uses: ./.github/actions/testagent/start + - uses: ./.github/actions/node + with: + version: ${{ matrix.version }} + # Disable core dumps since some integration tests intentionally abort and core dump generation takes around 5-10s + - uses: ./.github/actions/install + - run: sudo sysctl -w kernel.core_pattern='|/bin/false' + - run: yarn test:integration:${{ matrix.bundler }} + - uses: ./.github/actions/push_to_test_optimization + if: "!cancelled()" + with: + dd_api_key: ${{ steps.dd-sts.outputs.api_key }} diff --git a/.github/workflows/llmobs.yml b/.github/workflows/llmobs.yml index dae6b370d25..30c02220523 100644 --- a/.github/workflows/llmobs.yml +++ b/.github/workflows/llmobs.yml @@ -14,8 +14,8 @@ on: type: string concurrency: - group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true env: MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} @@ -96,7 +96,7 @@ jobs: - uses: ./.github/actions/dd-sts-api-key id: dd-sts - uses: ./.github/actions/testagent/start - - uses: ./.github/actions/node/oldest-maintenance-lts + - uses: ./.github/actions/node/newest-maintenance-lts - uses: ./.github/actions/install - run: yarn test:plugins:ci - run: yarn test:llmobs:plugins:ci @@ -280,7 +280,7 @@ jobs: if: "!cancelled()" with: dd_api_key: ${{ steps.dd-sts.outputs.api_key }} - + langgraph: runs-on: ubuntu-latest permissions: @@ -290,3 +290,15 @@ jobs: steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: ./.github/actions/plugins/test + with: + node-floor: newest-maintenance-lts + + modelcontextprotocol-sdk: + runs-on: ubuntu-latest + permissions: + id-token: write + env: + PLUGINS: modelcontextprotocol-sdk + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/plugins/test diff --git a/.github/workflows/openfeature.yml b/.github/workflows/openfeature.yml index a98d0da99ba..d82bdbc068f 100644 --- a/.github/workflows/openfeature.yml +++ b/.github/workflows/openfeature.yml @@ -8,8 +8,8 @@ on: - cron: 0 4 * * * concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true env: MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} diff --git a/.github/workflows/platform.yml b/.github/workflows/platform.yml index b38cb931012..c088a2738e8 100644 --- a/.github/workflows/platform.yml +++ b/.github/workflows/platform.yml @@ -14,8 +14,8 @@ on: type: string concurrency: - group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true env: MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} @@ -74,10 +74,13 @@ jobs: - uses: ./.github/actions/node/active-lts - uses: ./.github/actions/install - run: yarn test:integration:bun - - uses: DataDog/junit-upload-github-action@24449d01fc01e721fa36ccd2caa3caae6922f0e8 # v3.0.0 + - uses: DataDog/junit-upload-github-action@24449d01fc01e721fa36ccd2caa3caae6922f0e8 # v3.0.0 if: always() && github.actor != 'dependabot[bot]' with: api_key: ${{ steps.dd-sts.outputs.api_key }} + # TODO: remove once https://github.com/DataDog/junit-upload-github-action/pull/54 lands + # and junit-upload-github-action releases are tied to datadog-ci releases. + datadog-ci-version: 5.13.1 service: dd-trace-js-tests core: @@ -112,365 +115,6 @@ jobs: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: ./.github/actions/plugins/test - esbuild: - runs-on: ubuntu-latest - permissions: - id-token: write - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/dd-sts-api-key - id: dd-sts - - uses: ./.github/actions/node/oldest-maintenance-lts - - uses: ./.github/actions/install - - run: yarn test:esbuild:ci - - uses: ./.github/actions/node/latest - - run: yarn test:esbuild:ci - - uses: ./.github/actions/coverage - with: - flags: platform-esbuild - dd_api_key: ${{ steps.dd-sts.outputs.api_key }} - - webpack: - runs-on: ubuntu-latest - permissions: - id-token: write - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/dd-sts-api-key - id: dd-sts - - uses: ./.github/actions/node/oldest-maintenance-lts - - uses: ./.github/actions/install - - run: yarn test:webpack:ci - - uses: ./.github/actions/node/latest - - run: yarn test:webpack:ci - - uses: ./.github/actions/coverage - with: - flags: platform-webpack - dd_api_key: ${{ steps.dd-sts.outputs.api_key }} - - instrumentation-bluebird: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: bluebird - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-body-parser: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: body-parser - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-child_process: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: child_process - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-cookie-parser: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: cookie-parser - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-express-mongo-sanitize: - runs-on: ubuntu-latest - permissions: - id-token: write - services: - mongodb: - image: circleci/mongo - ports: - - 27017:27017 - env: - PLUGINS: express-mongo-sanitize - SERVICES: mongo - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-express-session: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: express-session - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-express: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: express - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-fs: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: fs - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-generic-pool: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: generic-pool - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - # TODO: Retries below work around a flaky bug in Node.js http code. Revert to using - # ./.github/actions/instrumentations/test once fixed upstream. - instrumentation-http: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: http - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/dd-sts-api-key - id: dd-sts - - uses: ./.github/actions/node/oldest-maintenance-lts - - uses: ./.github/actions/install - - name: Run instrumentation tests (oldest-maintenance, with retries) - uses: nick-fields/retry@ad984534de44a9489a53aefd81eb77f87c70dc60 # v4.0.0 - with: - max_attempts: 5 - timeout_minutes: 15 - retry_wait_seconds: 20 - command: yarn test:instrumentations:ci - - uses: ./.github/actions/node/latest - - name: Run instrumentation tests (latest, with retries) - uses: nick-fields/retry@ad984534de44a9489a53aefd81eb77f87c70dc60 # v4.0.0 - with: - max_attempts: 5 - timeout_minutes: 15 - retry_wait_seconds: 20 - command: yarn test:instrumentations:ci - - uses: ./.github/actions/coverage - with: - flags: instrumentations-${{ github.job }} - dd_api_key: ${{ steps.dd-sts.outputs.api_key }} - - uses: ./.github/actions/push_to_test_optimization - if: "!cancelled()" - with: - dd_api_key: ${{ steps.dd-sts.outputs.api_key }} - - instrumentation-knex: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: knex - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-mongoose: - runs-on: ubuntu-latest - permissions: - id-token: write - services: - mongodb: - image: circleci/mongo - ports: - - 27017:27017 - env: - PLUGINS: mongoose - SERVICES: mongo - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-multer: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: multer - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-mysql2: - runs-on: ubuntu-latest - permissions: - id-token: write - services: - mysql: - image: mariadb:10.4 - env: - MYSQL_ALLOW_EMPTY_PASSWORD: "yes" - MYSQL_DATABASE: "db" - ports: - - 3306:3306 - env: - PLUGINS: mysql2 - SERVICES: mysql2 - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-passport: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: passport - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-passport-http: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: passport-http - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-passport-local: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: passport-local - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-pg: - runs-on: ubuntu-latest - permissions: - id-token: write - services: - postgres: - image: postgres:9.5 - env: - POSTGRES_PASSWORD: postgres - ports: - - 5432:5432 - env: - PG_TEST_NATIVE: "true" - PLUGINS: pg - SERVICES: postgres - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-promise-js: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: promise-js - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-promise: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: promise - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-q: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: q - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-url: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: url - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentation-when: - runs-on: ubuntu-latest - permissions: - id-token: write - env: - PLUGINS: when - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/instrumentations/test - - instrumentations-misc: - runs-on: ubuntu-latest - permissions: - id-token: write - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/dd-sts-api-key - id: dd-sts - - uses: ./.github/actions/testagent/start - - uses: ./.github/actions/node/oldest-maintenance-lts - - uses: ./.github/actions/install - - run: yarn test:instrumentations:misc:ci - shell: bash - - uses: ./.github/actions/node/newest-maintenance-lts - - run: yarn test:instrumentations:misc:ci - shell: bash - - uses: ./.github/actions/node/active-lts - - run: yarn test:instrumentations:misc:ci - shell: bash - - uses: ./.github/actions/node/latest - - run: yarn test:instrumentations:misc:ci - shell: bash - - uses: ./.github/actions/coverage - with: - flags: platform-instrumentations-misc - dd_api_key: ${{ steps.dd-sts.outputs.api_key }} - - if: always() - uses: ./.github/actions/testagent/logs - with: - suffix: test-${{ github.job }} - - uses: ./.github/actions/push_to_test_optimization - if: "!cancelled()" - with: - dd_api_key: ${{ steps.dd-sts.outputs.api_key }} - # TODO: Split this up as it runs tests for multiple different teams. integration: strategy: @@ -495,8 +139,6 @@ jobs: - uses: ./.github/actions/install - run: sudo sysctl -w kernel.core_pattern='|/bin/false' - run: yarn test:integration - - run: yarn test:integration:esbuild - - run: yarn test:integration:webpack - uses: ./.github/actions/push_to_test_optimization if: "!cancelled()" with: diff --git a/.github/workflows/profiling.yml b/.github/workflows/profiling.yml index b871f196482..15864ddd5dc 100644 --- a/.github/workflows/profiling.yml +++ b/.github/workflows/profiling.yml @@ -14,8 +14,8 @@ on: type: string concurrency: - group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true env: MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 8c36c8b78b3..66eedcf5b6e 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -6,8 +6,8 @@ on: branches: [master, mq-working-branch-master-*] concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true jobs: actionlint: diff --git a/.github/workflows/release-validate.yml b/.github/workflows/release-validate.yml index b835e25cbb7..5a732539809 100644 --- a/.github/workflows/release-validate.yml +++ b/.github/workflows/release-validate.yml @@ -6,8 +6,8 @@ on: - v[0-9]+.[0-9]+.[0-9]+-proposal concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true jobs: validate-proposal: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4057f8fbe32..d9c8b564969 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -9,8 +9,8 @@ on: workflow_dispatch: concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true jobs: publish-v3: @@ -164,20 +164,3 @@ jobs: git tag --force dev git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git :refs/tags/dev git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git --tags - - status: - needs: ["publish-v3", "publish-v4", "publish-latest"] - if: always() && contains(needs.*.result, 'success') - runs-on: ubuntu-latest - permissions: - id-token: write - contents: read - pull-requests: read - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - with: - sparse-checkout: scripts/release/status.js - - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 - - run: node scripts/release/status.js diff --git a/.github/workflows/serverless.yml b/.github/workflows/serverless.yml index d123f0141c9..b39d309b6a3 100644 --- a/.github/workflows/serverless.yml +++ b/.github/workflows/serverless.yml @@ -14,8 +14,8 @@ on: type: string concurrency: - group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ inputs.latest-version }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true env: MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} @@ -292,8 +292,8 @@ jobs: SERVICES: azurite steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: ./.github/actions/plugins/test + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/plugins/test google-cloud-pubsub: runs-on: ubuntu-latest diff --git a/.github/workflows/system-tests.yml b/.github/workflows/system-tests.yml index 7d58a9468ea..145ad9709c0 100644 --- a/.github/workflows/system-tests.yml +++ b/.github/workflows/system-tests.yml @@ -10,8 +10,8 @@ on: concurrency: # this ensures that only one workflow runs at a time for a given branch on pull requests - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true jobs: build-artifacts: diff --git a/.github/workflows/test-optimization.yml b/.github/workflows/test-optimization.yml index 30f35a4254b..b9aec383837 100644 --- a/.github/workflows/test-optimization.yml +++ b/.github/workflows/test-optimization.yml @@ -8,8 +8,8 @@ on: - cron: 0 4 * * * concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} - cancel-in-progress: ${{ github.ref != 'refs/heads/master' }} + group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/master' && github.run_id || github.ref }} + cancel-in-progress: true env: MOCHA_OPTIONS: ${{ github.ref == 'refs/heads/master' && '--retries 1' || '' }} @@ -54,7 +54,7 @@ jobs: echo "version=$PLAYWRIGHT_VERSION" >> $GITHUB_OUTPUT echo "Playwright version: $PLAYWRIGHT_VERSION" - name: Cache Playwright browsers - uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4 + uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 with: path: ~/.cache/ms-playwright key: playwright-browsers-${{ runner.os }}-${{ steps.playwright-version.outputs.version }} @@ -99,7 +99,7 @@ jobs: echo "dd-trace major version: $MAJOR" - name: Cache Playwright browsers if: matrix.playwright-version == 'oldest' - uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4 + uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 with: path: /github/home/.cache/ms-playwright key: playwright-browsers-oldest-dd${{ steps.dd-version.outputs.major }} @@ -252,7 +252,7 @@ jobs: # as that changes frequently and would have a low cache hit rate - name: Cache Cypress binary if: matrix.cypress-version != 'latest' - uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4 + uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 with: path: ~/.cache/Cypress key: cypress-binary-${{ matrix.cypress-version }} diff --git a/.gitignore b/.gitignore index 01f82f977cf..8e0f8cef586 100644 --- a/.gitignore +++ b/.gitignore @@ -146,5 +146,8 @@ __queuestorage__/AzuriteConfig .claude/* !.claude/skills/ +# ignore oh-my-claudecode local state +.omc/ + # Husky generates a helper dir under .husky/_ (including husky.sh). Don't commit it. .husky/_/ diff --git a/.gitlab/benchmarks.yml b/.gitlab/benchmarks.yml index 3ffde4eea64..3c01362a451 100644 --- a/.gitlab/benchmarks.yml +++ b/.gitlab/benchmarks.yml @@ -143,9 +143,12 @@ benchmark-serverless-trigger: - if: $CI_COMMIT_BRANCH == 'master' interruptible: false - # dont run on merges to release branches (vN.x where N is any integer) + # don't run on merges to release branches ("vN.x" where N is any integer) - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME =~ /^v\d+\.x$/' when: never + # don't run on pushes to release branches + - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH =~ /^v\d+\.x$/' + when: never - interruptible: true trigger: project: DataDog/serverless-tools diff --git a/.gitlab/one-pipeline.locked.yml b/.gitlab/one-pipeline.locked.yml index e1eddc88739..746078fae73 100644 --- a/.gitlab/one-pipeline.locked.yml +++ b/.gitlab/one-pipeline.locked.yml @@ -1,4 +1,4 @@ # DO NOT EDIT THIS FILE MANUALLY # This file is auto-generated by automation. include: - - remote: https://gitlab-templates.ddbuild.io/libdatadog/one-pipeline/ca/4667b01c6ae983f92ecf106e110ced3dac798b229cf2a0539c58aa22e95a99d5/one-pipeline.yml + - remote: https://gitlab-templates.ddbuild.io/libdatadog/one-pipeline/ca/0a900a87c53d3a57a5ab007b3147db4601f15c73ff31dc65f3791c803f2651d9/one-pipeline.yml diff --git a/docs/API.md b/docs/API.md index 4b9c6868b02..0cb5bf4f4c0 100644 --- a/docs/API.md +++ b/docs/API.md @@ -73,6 +73,7 @@ tracer.use('pg', {
+
@@ -153,6 +154,7 @@ tracer.use('pg', { * [memcached](./interfaces/export_.plugins.memcached.html) * [microgateway-core](./interfaces/export_.plugins.microgateway_core.html) * [mocha](./interfaces/export_.plugins.mocha.html) +* [modelcontextprotocol-sdk](./interfaces/export_.plugins.modelcontextprotocol_sdk.html) * [moleculer](./interfaces/export_.plugins.moleculer.html) * [mongodb-core](./interfaces/export_.plugins.mongodb_core.html) * [mongoose](./interfaces/export_.plugins.mongoose.html) diff --git a/docs/package.json b/docs/package.json index 8076b8dc24a..1801467e25f 100644 --- a/docs/package.json +++ b/docs/package.json @@ -4,13 +4,13 @@ "main": "typedoc.js", "scripts": { "build": "typedoc ../index.d.ts", - "pretest": "tsc -p . && tsc --types node test", + "pretest": "tsc -p .", "test": "node test" }, "license": "BSD-3-Clause", "private": true, "devDependencies": { - "typedoc": "^0.28.18", - "typescript": "^5.9.3" + "typedoc": "^0.28.19", + "typescript": "^6.0.3" } } diff --git a/docs/test.ts b/docs/test.ts index e268818ede1..319c33642a6 100644 --- a/docs/test.ts +++ b/docs/test.ts @@ -197,9 +197,9 @@ tracer.dogstatsd.flush() const httpOptions = { service: 'test', - allowlist: ['url', /url/, url => true], - blocklist: ['url', /url/, url => true], - validateStatus: code => code < 400, + allowlist: ['url', /url/, (url: string) => true], + blocklist: ['url', /url/, (url: string) => true], + validateStatus: (code: number) => code < 400, headers: ['host'], middleware: true }; @@ -382,6 +382,7 @@ tracer.use('microgateway-core', httpServerOptions); tracer.use('mocha'); tracer.use('mocha', { service: 'mocha-service' }); tracer.use('moleculer', moleculerOptions); +tracer.use('modelcontextprotocol-sdk'); tracer.use('mongodb-core'); tracer.use('mongoose'); tracer.use('mysql'); diff --git a/docs/tsconfig.json b/docs/tsconfig.json index 263508a814d..726e90ca58c 100644 --- a/docs/tsconfig.json +++ b/docs/tsconfig.json @@ -1,13 +1,13 @@ { "compilerOptions": { "lib": ["es2017"], - "moduleResolution": "node", - "module": "commonjs", - "baseUrl": ".", + "module": "nodenext", + "moduleResolution": "nodenext", "strict": true, "types": ["node"] }, "files": [ - "../index.d.ts" + "../index.d.ts", + "test.ts" ] } diff --git a/docs/yarn.lock b/docs/yarn.lock index ad6752974c4..1796b171e92 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -111,7 +111,7 @@ mdurl@^2.0.0: resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-2.0.0.tgz#80676ec0433025dd3e17ee983d0fe8de5a2237e0" integrity sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w== -minimatch@^10.2.4: +minimatch@^10.2.5: version "10.2.5" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-10.2.5.tgz#bd48687a0be38ed2961399105600f832095861d1" integrity sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg== @@ -123,28 +123,28 @@ punycode.js@^2.3.1: resolved "https://registry.yarnpkg.com/punycode.js/-/punycode.js-2.3.1.tgz#6b53e56ad75588234e79f4affa90972c7dd8cdb7" integrity sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA== -typedoc@^0.28.18: - version "0.28.18" - resolved "https://registry.yarnpkg.com/typedoc/-/typedoc-0.28.18.tgz#f7578fd9aa3ace83db8cce9bf1e8d41b88ec0b94" - integrity sha512-NTWTUOFRQ9+SGKKTuWKUioUkjxNwtS3JDRPVKZAXGHZy2wCA8bdv2iJiyeePn0xkmK+TCCqZFT0X7+2+FLjngA== +typedoc@^0.28.19: + version "0.28.19" + resolved "https://registry.yarnpkg.com/typedoc/-/typedoc-0.28.19.tgz#0940c6b98eafae27cba71e57855d593f88a80649" + integrity sha512-wKh+lhdmMFivMlc6vRRcMGXeGEHGU2g8a2CkPTJjJlwRf1iXbimWIPcFolCqe4E0d/FRtGszpIrsp3WLpDB8Pw== dependencies: "@gerrit0/mini-shiki" "^3.23.0" lunr "^2.3.9" markdown-it "^14.1.1" - minimatch "^10.2.4" - yaml "^2.8.2" + minimatch "^10.2.5" + yaml "^2.8.3" -typescript@^5.9.3: - version "5.9.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.9.3.tgz#5b4f59e15310ab17a216f5d6cf53ee476ede670f" - integrity sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw== +typescript@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-6.0.3.tgz#90251dc007916e972786cb94d74d15b185577d21" + integrity sha512-y2TvuxSZPDyQakkFRPZHKFm+KKVqIisdg9/CZwm9ftvKXLP8NRWj38/ODjNbr43SsoXqNuAisEf1GdCxqWcdBw== uc.micro@^2.0.0, uc.micro@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/uc.micro/-/uc.micro-2.1.0.tgz#f8d3f7d0ec4c3dea35a7e3c8efa4cb8b45c9e7ee" integrity sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A== -yaml@^2.8.2: +yaml@^2.8.3: version "2.8.3" resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.8.3.tgz#a0d6bd2efb3dd03c59370223701834e60409bd7d" integrity sha512-AvbaCLOO2Otw/lW5bmh9d/WEdcDFdQp2Z2ZUH3pX9U2ihyUY0nvLv7J6TrWowklRGPYbB/IuIMfYgxaCPg5Bpg== diff --git a/eslint.config.mjs b/eslint.config.mjs index bb79b34eafe..e961b7d59d2 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -463,6 +463,17 @@ export default [ 'eslint-rules/eslint-env-aliases': 'error', 'eslint-rules/eslint-log-printf-style': 'error', + // Inline `.evaluate()` callbacks (Playwright/Puppeteer) are serialized with + // `toString()` and run in chromium — coverage counters inside would ReferenceError. + 'no-restricted-syntax': ['error', { + selector: + "CallExpression[callee.property.name='evaluate']" + + ":matches([arguments.0.type='ArrowFunctionExpression'], [arguments.0.type='FunctionExpression'])", + message: + 'Move the inline `.evaluate(...)` callback into a `*-browser-scripts.js` file ' + + '(NYC-excluded in nyc.config.js) and import it here.', + }], + 'n/no-restricted-require': ['error', [ ...GLOBAL_RESTRICTED_REQUIRES, { diff --git a/ext/tags.js b/ext/tags.js index dc02b074693..f4a8545f89c 100644 --- a/ext/tags.js +++ b/ext/tags.js @@ -25,6 +25,7 @@ const tags = { HTTP_RESPONSE_HEADERS: 'http.response.headers', HTTP_USERAGENT: 'http.useragent', HTTP_CLIENT_IP: 'http.client_ip', + NETWORK_CLIENT_IP: 'network.client.ip', // Messaging diff --git a/index.d.ts b/index.d.ts index 4a02a8c3ae3..89a07e30a84 100644 --- a/index.d.ts +++ b/index.d.ts @@ -272,6 +272,7 @@ interface Plugins { "memcached": tracer.plugins.memcached; "microgateway-core": tracer.plugins.microgateway_core; "mocha": tracer.plugins.mocha; + "modelcontextprotocol-sdk": tracer.plugins.modelcontextprotocol_sdk; "moleculer": tracer.plugins.moleculer; "mongodb-core": tracer.plugins.mongodb_core; "mongoose": tracer.plugins.mongoose; @@ -2848,11 +2849,18 @@ declare namespace tracer { * [mocha](https://mochajs.org/) module. */ interface mocha extends Integration {} - + + /** + * This plugin automatically instruments the + * [modelcontextprotocol-sdk](https://github.com/npmjs/package/@modelcontextprotocol/sdk) library. + */ + interface modelcontextprotocol_sdk extends Instrumentation {} + /** * This plugin automatically instruments the * [moleculer](https://moleculer.services/) module. */ + interface moleculer extends Moleculer { /** * Configuration for Moleculer clients. Set to false to disable client diff --git a/integration-tests/aiguard/index.spec.js b/integration-tests/aiguard/index.spec.js index 61e5d613dbc..9f4a74fcf37 100644 --- a/integration-tests/aiguard/index.spec.js +++ b/integration-tests/aiguard/index.spec.js @@ -40,6 +40,7 @@ describe('AIGuard SDK integration tests', () => { DD_SERVICE: 'ai_guard_integration_test', DD_ENV: 'test', DD_TRACE_ENABLED: 'true', + DD_TRACE_CLIENT_IP_ENABLED: 'false', DD_TRACE_AGENT_PORT: String(agent.port), DD_AI_GUARD_ENABLED: 'true', DD_AI_GUARD_BLOCK: 'true', @@ -69,6 +70,43 @@ describe('AIGuard SDK integration tests', () => { }) }) + it('adds client ip tags to the request root span when AI Guard runs', async () => { + const response = await executeRequest(`${url}/allow`, 'GET', { + 'x-forwarded-for': '203.0.113.10, 10.0.0.1', + }) + + assert.strictEqual(response.status, 200) + + await agent.assertMessageReceived(({ payload }) => { + const requestSpan = payload[0].find(span => span.name === 'express.request') + const guardSpan = payload[0].find(span => span.name === 'ai_guard') + + assert.notStrictEqual(requestSpan, undefined) + assert.notStrictEqual(guardSpan, undefined) + assert.strictEqual(requestSpan.meta['http.client_ip'], '203.0.113.10') + assert.ok(requestSpan.meta['network.client.ip']) + }) + }) + + it('does not add client ip tags when no AI Guard span is created', async () => { + const response = await executeRequest(`${url}/no-aiguard`, 'GET', { + 'x-forwarded-for': '203.0.113.10, 10.0.0.1', + }) + + assert.strictEqual(response.status, 200) + assert.deepStrictEqual(response.body, { ok: true }) + + await agent.assertMessageReceived(({ payload }) => { + const requestSpan = payload[0].find(span => span.name === 'express.request') + const guardSpan = payload[0].find(span => span.name === 'ai_guard') + + assert.notStrictEqual(requestSpan, undefined) + assert.strictEqual(guardSpan, undefined) + assert.strictEqual(requestSpan.meta['http.client_ip'], undefined) + assert.strictEqual(requestSpan.meta['network.client.ip'], undefined) + }) + }) + const directApiSuite = [ { endpoint: '/allow', action: 'ALLOW', reason: 'The prompt looks harmless' }, { endpoint: '/deny', action: 'DENY', reason: 'I am feeling suspicious today' }, diff --git a/integration-tests/aiguard/server.js b/integration-tests/aiguard/server.js index 254f9643814..d2cc0e7a0a5 100644 --- a/integration-tests/aiguard/server.js +++ b/integration-tests/aiguard/server.js @@ -6,6 +6,10 @@ const express = require('express') const app = express() +app.get('/no-aiguard', (req, res) => { + res.status(200).json({ ok: true }) +}) + app.get('/allow', async (req, res) => { const evaluation = await tracer.aiguard.evaluate([ { role: 'system', content: 'You are a beautiful AI' }, diff --git a/integration-tests/ci-visibility/vitest-tests/early-flake-detection.mjs b/integration-tests/ci-visibility/vitest-tests/early-flake-detection.mjs index a85036dac8e..011b2ed4666 100644 --- a/integration-tests/ci-visibility/vitest-tests/early-flake-detection.mjs +++ b/integration-tests/ci-visibility/vitest-tests/early-flake-detection.mjs @@ -3,6 +3,7 @@ import { sum } from './sum' let numAttempt = 0 let numOtherAttempt = 0 +let numLastAttempt = 0 describe('early flake detection', () => { test('can retry tests that eventually pass', { repeats: process.env.SHOULD_REPEAT && 2 }, () => { @@ -30,4 +31,9 @@ describe('early flake detection', () => { expect(sum(1, 2)).to.equal(numOtherAttempt++ < 3 ? 3 : 4) }) } + if (process.env.SHOULD_ADD_LAST_ATTEMPT_PASS) { + test('can retry tests that pass only on the last attempt', () => { + expect(sum(1, 2)).to.equal(numLastAttempt++ === 3 ? 3 : 4) + }) + } }) diff --git a/integration-tests/ci-visibility/vitest-tests/hooks-flaky-test-retries.mjs b/integration-tests/ci-visibility/vitest-tests/hooks-flaky-test-retries.mjs new file mode 100644 index 00000000000..5a0eecdc3f5 --- /dev/null +++ b/integration-tests/ci-visibility/vitest-tests/hooks-flaky-test-retries.mjs @@ -0,0 +1,26 @@ +import { describe, test, expect, beforeEach, afterEach } from 'vitest' +import { sum } from './sum' + +let numAttempt = 0 + +describe('flaky test retries with hooks', () => { + beforeEach(() => { + // setup + }) + + afterEach(() => { + // teardown + }) + + test('can retry tests that eventually pass', () => { + expect(sum(1, 2)).to.equal(numAttempt++) + }) + + test('can retry tests that never pass', () => { + expect(sum(1, 2)).to.equal(0) + }) + + test('does not retry if unnecessary', () => { + expect(sum(1, 2)).to.equal(3) + }) +}) diff --git a/integration-tests/ci-visibility/vitest-tests/hooks-test-management.mjs b/integration-tests/ci-visibility/vitest-tests/hooks-test-management.mjs new file mode 100644 index 00000000000..827f5351f49 --- /dev/null +++ b/integration-tests/ci-visibility/vitest-tests/hooks-test-management.mjs @@ -0,0 +1,19 @@ +import { describe, test, expect, beforeEach, afterEach } from 'vitest' + +describe('test management with hooks', () => { + beforeEach(() => { + // setup + }) + + afterEach(() => { + // teardown + }) + + test('can apply management to a failing test with hooks', () => { + expect(1 + 2).to.equal(4) // intentionally fails + }) + + test('can pass normally with hooks', () => { + expect(1 + 2).to.equal(3) + }) +}) diff --git a/integration-tests/ci-visibility/vitest-tests/hooks-test-quarantine-failing-after-each.mjs b/integration-tests/ci-visibility/vitest-tests/hooks-test-quarantine-failing-after-each.mjs new file mode 100644 index 00000000000..e493f4a6af0 --- /dev/null +++ b/integration-tests/ci-visibility/vitest-tests/hooks-test-quarantine-failing-after-each.mjs @@ -0,0 +1,11 @@ +import { describe, test, afterEach } from 'vitest' + +describe('quarantine tests with failing afterEach', () => { + afterEach(() => { + throw new Error('afterEach hook failed') + }) + + test('can quarantine a test whose afterEach hook fails', () => { + // test body passes, but afterEach throws — causing the test to be reported as failed + }) +}) diff --git a/integration-tests/ci-visibility/vitest-tests/test-quarantine.mjs b/integration-tests/ci-visibility/vitest-tests/test-quarantine.mjs index afe8b5f8bab..713936316c5 100644 --- a/integration-tests/ci-visibility/vitest-tests/test-quarantine.mjs +++ b/integration-tests/ci-visibility/vitest-tests/test-quarantine.mjs @@ -10,4 +10,8 @@ describe('quarantine tests', () => { test('can pass normally', () => { expect(1 + 2).to.equal(3) }) + + test('can quarantine a passing test', () => { + expect(1 + 2).to.equal(3) + }) }) diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index 9ff6d53c8e6..5e362b2d6e6 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -354,6 +354,10 @@ moduleTypes.forEach(({ // These tests require Cypress >=10 features (defineConfig, setupNodeEvents) const over10It = (version !== '6.7.0') ? it : it.skip + // Cypress <14 shipped an older ts-node ESM loader that doesn't implement the + // current Node.js ESM hooks chain (ERR_LOADER_CHAIN_INCOMPLETE), so TS configs + // under `"type": "module"` can't be loaded at all, regardless of dd-trace. + const over14It = (version === 'latest' || semver.gte(version, '14.0.0')) ? it : it.skip over10It('is backwards compatible with the old manual plugin approach', async () => { receiver.setInfoResponse({ endpoints: [] }) @@ -750,6 +754,177 @@ moduleTypes.forEach(({ assert.strictEqual(exitCode, 0, 'cypress process should exit successfully') }) + // Regression guard: when the surrounding package has "type": "module", + // the .ts config is transpiled and loaded as ESM. Cypress's CJS + // addHook path cannot intercept the ESM `import 'cypress'`, so the + // only route to `wrapConfig` is the CLI-wrap path that rewrites + // --config-file to a wrapper. An earlier version bailed out on `.ts` + // here and silently skipped instrumentation — no test_session / + // test_module / test_suite / test spans reached the intake. + // + // Set up the ESM project inside a dedicated subdirectory so Cypress + // resolves `type: module` and the tsconfig only for this test. Using + // the sandbox root would leak cached ts-node / webpack state into + // later tests (Cypress caches based on the project root). + over14It('reports tests with a TypeScript config file under "type": "module"', async () => { + const subprojectDir = path.join(cwd, 'esm-ts-subproject') + fs.rmSync(subprojectDir, { recursive: true, force: true }) + fs.mkdirSync(path.join(subprojectDir, 'cypress', 'e2e'), { recursive: true }) + fs.writeFileSync(path.join(subprojectDir, 'package.json'), JSON.stringify({ + name: 'esm-ts-subproject', + type: 'module', + }, null, 2)) + // `module: nodenext` so ts-node transpiles the `.ts` as ESM — real-world + // ESM TS projects already ship this; the default (CommonJS) emit would + // produce `exports is not defined in ES module scope` at runtime. + fs.writeFileSync(path.join(subprojectDir, 'tsconfig.json'), JSON.stringify({ + compilerOptions: { module: 'nodenext', moduleResolution: 'nodenext', target: 'ES2022' }, + }, null, 2)) + // Minimal self-contained config so the subproject doesn't depend on + // anything under the sandbox's `cypress/` tree beyond the support + // file (which wires dd-trace's browser-side hooks via the shared + // `dd-trace` package already installed in the sandbox). + fs.writeFileSync(path.join(subprojectDir, 'cypress.config.ts'), [ + "import { defineConfig } from 'cypress'", + '', + 'export default defineConfig({', + ' defaultCommandTimeout: 1000,', + ' e2e: {', + " specPattern: 'cypress/e2e/**/*.cy.js',", + " supportFile: 'cypress/support/e2e.js',", + ' },', + ' video: false,', + ' screenshotOnRunFailure: false,', + '})', + '', + ].join('\n')) + fs.mkdirSync(path.join(subprojectDir, 'cypress', 'support'), { recursive: true }) + fs.copyFileSync( + path.join(cwd, 'cypress', 'support', 'e2e.js'), + path.join(subprojectDir, 'cypress', 'support', 'e2e.js') + ) + // Minimal passing spec so the test is self-contained and doesn't + // depend on the rest of the sandbox's e2e tree. + fs.writeFileSync(path.join(subprojectDir, 'cypress', 'e2e', 'basic-pass.cy.js'), [ + '/* eslint-disable */', + "describe('basic pass suite', () => {", + " it('can pass', () => {", + " cy.visit('/')", + " cy.get('.hello-world').should('have.text', 'Hello World')", + ' })', + '})', + '', + ].join('\n')) + + let testOutput = '' + try { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + // Full span hierarchy must be present — not just a stray telemetry span. + const sessionEvents = events.filter(event => event.type === 'test_session_end') + const moduleEvents = events.filter(event => event.type === 'test_module_end') + const suiteEvents = events.filter(event => event.type === 'test_suite_end') + const testEvents = events.filter(event => event.type === 'test') + + assert.strictEqual(sessionEvents.length, 1, `one test_session span\n${testOutput}`) + assert.strictEqual(moduleEvents.length, 1, `one test_module span\n${testOutput}`) + assert.ok(suiteEvents.length >= 1, `at least one test_suite span\n${testOutput}`) + + const passedTest = testEvents.find(event => + event.content.resource === 'cypress/e2e/basic-pass.cy.js.basic pass suite can pass' + ) + assertObjectContains(passedTest?.content, { + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }, 20000) + + const envVars = getCiVisAgentlessConfig(receiver.port) + + // Run Cypress *from* the subproject so its project root is the + // ESM-configured directory; keeping the original `cwd` would pick + // up the sandbox's own package.json (no `type: module`). + childProcess = exec( + path.join(cwd, 'node_modules/.bin/cypress') + ' run', + { + cwd: subprojectDir, + env: { + ...envVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + }, + } + ) + childProcess.stdout?.on('data', (d) => { testOutput += d }) + childProcess.stderr?.on('data', (d) => { testOutput += d }) + + const [[exitCode]] = await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + + assert.strictEqual(exitCode, 0, `cypress process should exit successfully\n${testOutput}`) + } finally { + fs.rmSync(subprojectDir, { recursive: true, force: true }) + } + }) + + // Regression guard: when OTEL_TRACES_EXPORTER=otlp is set in the + // environment (e.g. by an unrelated OpenTelemetry-instrumented shell), + // the tracer must still ship Test Optimization spans to + // /api/v2/citestcycle instead of silently replacing the Test + // Optimization exporter with OtlpHttpTraceExporter and dropping all + // test_session / test_module / test_suite / test spans. + over10It('keeps Test Optimization exporter when OTEL_TRACES_EXPORTER=otlp is set', async () => { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const sessionEvents = events.filter(event => event.type === 'test_session_end') + const testEvents = events.filter(event => event.type === 'test') + + assert.strictEqual(sessionEvents.length, 1, 'one test_session span must reach citestcycle') + + const passedTest = testEvents.find(event => + event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass' + ) + assertObjectContains(passedTest?.content, { + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }, 20000) + + const envVars = getCiVisAgentlessConfig(receiver.port) + + childProcess = exec( + testCommand, + { + cwd, + env: { + ...envVars, + // Simulates a user shell that already exports OTEL_* vars for + // a separate OTEL collector. The Test Optimization exporter + // must win inside isCiVisibility mode. + OTEL_TRACES_EXPORTER: 'otlp', + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: 'cypress/e2e/basic-pass.js', + }, + } + ) + + const [[exitCode]] = await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + + assert.strictEqual(exitCode, 0, 'cypress process should exit successfully') + }) + over10It('does not modify the user support file and cleans up the injected wrapper', async () => { const supportFilePath = path.join(cwd, 'cypress/support/e2e.js') const originalSupportContent = fs.readFileSync(supportFilePath, 'utf8') diff --git a/integration-tests/debugger/diagnostics.spec.js b/integration-tests/debugger/diagnostics.spec.js index 7718807c2a1..b21a94276ed 100644 --- a/integration-tests/debugger/diagnostics.spec.js +++ b/integration-tests/debugger/diagnostics.spec.js @@ -208,6 +208,9 @@ describe('Dynamic Instrumentation', function () { let receivedAckUpdate = false t.agent.on('remote-config-ack-update', (id, version, state, error) => { + // Transitional UNACKNOWLEDGED can arrive before the worker transitions to ERROR. + if (state === UNACKNOWLEDGED) return + assert.strictEqual(id, `logProbe_${config.id}`) assert.strictEqual(version, 1) assert.strictEqual(state, ERROR) diff --git a/integration-tests/esbuild/package.json b/integration-tests/esbuild/package.json index f964585c22e..925656570fb 100644 --- a/integration-tests/esbuild/package.json +++ b/integration-tests/esbuild/package.json @@ -23,7 +23,7 @@ "@apollo/server": "5.5.0", "@koa/router": "15.4.0", "aws-sdk": "2.1693.0", - "axios": "1.15.0", + "axios": "1.15.1", "express": "4.22.1", "knex": "3.2.9", "koa": "3.2.0", diff --git a/integration-tests/helpers/index.js b/integration-tests/helpers/index.js index ecdc8444b58..2c837ce46ec 100644 --- a/integration-tests/helpers/index.js +++ b/integration-tests/helpers/index.js @@ -2,14 +2,16 @@ const assert = require('assert') const childProcess = require('child_process') -const { execSync, fork, spawn } = childProcess +const { exec, execSync, fork, spawn } = childProcess const { existsSync, readFileSync, unlinkSync, writeFileSync } = require('fs') const fs = require('fs/promises') const http = require('http') const { builtinModules } = require('module') const os = require('os') const path = require('path') -const { inspect } = require('util') +const { inspect, promisify } = require('util') + +const execAsync = promisify(exec) const id = require('../../packages/dd-trace/src/id') const { getCappedRange } = require('../../packages/dd-trace/test/plugins/versions') @@ -384,17 +386,62 @@ function execHelper (command, options) { } } +/** + * Async sibling of {@link execHelper}. Runs in parallel with other awaited operations and + * preserves the bun-only 60s retry semantics. + * + * @param {string} command - Command to run. + * @param {import('child_process').ExecOptions} [options] - Exec options. + * @returns {Promise} + */ +async function execHelperAsync (command, options) { + try { + log('Exec START: ', command) + await execAsync(command, options) + log('Exec SUCCESS: ', command) + return + } catch (execError) { + error('Exec ERROR: ', command, execError) + if (!command.startsWith(BUN)) throw execError + } + log('Exec RETRY BACKOFF: 60 seconds') + await new Promise(resolve => setTimeout(resolve, 60_000)) + try { + log('Exec RETRY START: ', command) + await execAsync(command, options) + log('Exec RETRY SUCCESS: ', command) + } catch (retryError) { + error('Exec RETRY ERROR', command, retryError) + throw retryError + } +} + /** * Pack dd-trace into a tarball at the specified path. * * @param {string} tarballPath - The path where the tarball should be created * @param {NodeJS.ProcessEnv} env - The environment to use for the pack command + * @returns {Promise} */ -function packTarball (tarballPath, env) { - execHelper(`${BUN} pm pack --ignore-scripts --quiet --gzip-level 0 --filename ${tarballPath}`, { env }) +async function packTarball (tarballPath, env) { + await execHelperAsync(`${BUN} pm pack --ignore-scripts --quiet --gzip-level 0 --filename ${tarballPath}`, { env }) log('Tarball packed successfully:', tarballPath) } +/** + * Copy each integration-tests path into the sandbox folder concurrently. + * + * @param {string[]} integrationTestsPaths - Source paths to copy from. + * @param {string} folder - Destination sandbox folder. + * @returns {Promise} + */ +async function copyIntegrationTests (integrationTestsPaths, folder) { + await Promise.all(integrationTestsPaths.map(p => process.platform === 'win32' + ? execHelperAsync(`Copy-Item -Recurse -Path "${p}" -Destination "${folder}"`, { shell: 'powershell.exe' }) + : execHelperAsync(`cp -R ${p} ${folder}`) + )) +} + /** * Pack the tarball with file locking to coordinate between parallel workers. * Only one worker will pack the tarball, others will wait for it to be ready. @@ -424,7 +471,7 @@ async function packTarballWithLock (tarballPath, env) { } // We have the lock, pack the tarball - packTarball(tarballPath, env) + await packTarball(tarballPath, env) } catch (err) { if (err.code === 'EEXIST') { // Lock exists, another process is packing - wait for the tarball to appear @@ -496,7 +543,12 @@ async function createSandbox ( const addOptions = { cwd: folder, env: restOfEnv } const addFlags = ['--trust'] - await packTarballWithLock(out, restOfEnv) + // Tarball packing and integration-tests copy touch independent paths (sandbox root vs. the + // sandbox folder) and neither writes anything `bun add` will read, so run them concurrently. + await Promise.all([ + packTarballWithLock(out, restOfEnv), + copyIntegrationTests(integrationTestsPaths, folder), + ]) if (process.env.OFFLINE === '1' || process.env.OFFLINE === 'true') { addFlags.push('--prefer-offline') @@ -514,14 +566,6 @@ async function createSandbox ( ...addOptions, timeout: 90_000, }) - - for (const path of integrationTestsPaths) { - if (process.platform === 'win32') { - execHelper(`Copy-Item -Recurse -Path "${path}" -Destination "${folder}"`, { shell: 'powershell.exe' }) - } else { - execHelper(`cp -R ${path} ${folder}`) - } - } if (process.platform === 'win32') { // On Windows, we can only sync entire filesystem volume caches. execHelper(`Write-VolumeCache ${folder[0]}`, { shell: 'powershell.exe' }) diff --git a/integration-tests/opentelemetry-traces.spec.js b/integration-tests/opentelemetry-traces.spec.js index 0b9757f21fc..b908159e8f5 100644 --- a/integration-tests/opentelemetry-traces.spec.js +++ b/integration-tests/opentelemetry-traces.spec.js @@ -4,9 +4,14 @@ const assert = require('node:assert/strict') const { fork } = require('child_process') const { join } = require('path') -const { FakeAgent, sandboxCwd, useSandbox } = require('./helpers') - -function waitForOtlpTraces (agent, timeout = 10000) { +const { assertObjectContains, FakeAgent, sandboxCwd, useSandbox } = require('./helpers') + +/** + * @param {FakeAgent} agent + * @param {number} timeout + * @returns {Promise<{ headers: Record, payload: object }>} + */ +function waitForOtlpTraces (agent, timeout) { return new Promise((resolve, reject) => { const timer = setTimeout(() => reject(new Error('Timeout waiting for OTLP traces')), timeout) agent.once('otlp-traces', (msg) => { @@ -61,44 +66,33 @@ describe('OTLP Trace Export', () => { }) }) - const { headers, payload } = await tracesPromise - await exitPromise + const [{ headers, payload }] = await Promise.all([tracesPromise, exitPromise]) assert.strictEqual(headers['content-type'], 'application/json') - // Validate ExportTraceServiceRequest top-level structure - assert.ok(payload.resourceSpans, 'payload should have resourceSpans') - assert.strictEqual(payload.resourceSpans.length, 1) + assertObjectContains(payload, { + resourceSpans: [{ + resource: { + attributes: [ + { key: 'service.name', value: { stringValue: 'otlp-test-service' } }, + { key: 'deployment.environment.name', value: { stringValue: 'test' } }, + { key: 'service.version', value: { stringValue: '1.0.0' } }, + ], + }, + scopeSpans: [{ + scope: { name: 'dd-trace-js' }, + }], + }], + }) const resourceSpan = payload.resourceSpans[0] + assert.ok(resourceSpan.scopeSpans[0].scope.version, 'scope should have a version') - // Validate resource attributes - const resource = resourceSpan.resource - assert.ok(resource, 'resourceSpan should have resource') - assert.ok(Array.isArray(resource.attributes), 'resource should have attributes array') - - const resourceAttrs = Object.fromEntries( - resource.attributes.map(({ key, value }) => [key, value]) - ) - assert.deepStrictEqual(resourceAttrs['service.name'], { stringValue: 'otlp-test-service' }) - assert.deepStrictEqual(resourceAttrs['deployment.environment'], { stringValue: 'test' }) - assert.deepStrictEqual(resourceAttrs['service.version'], { stringValue: '1.0.0' }) - - // Validate scopeSpans - assert.ok(Array.isArray(resourceSpan.scopeSpans), 'resourceSpan should have scopeSpans') - assert.strictEqual(resourceSpan.scopeSpans.length, 1) - - const scopeSpan = resourceSpan.scopeSpans[0] - assert.strictEqual(scopeSpan.scope.name, 'dd-trace-js') - assert.ok(scopeSpan.scope.version, 'scope should have a version') - - // Validate spans - const spans = scopeSpan.spans + const spans = resourceSpan.scopeSpans[0].spans assert.strictEqual(spans.length, 3, 'should have 3 spans') // Sort by name for stable ordering spans.sort((a, b) => a.name.localeCompare(b.name)) - const [dbSpan, errSpan, webSpan] = spans // All spans should share the same traceId @@ -112,14 +106,20 @@ describe('OTLP Trace Export', () => { assert.deepStrictEqual(dbSpan.parentSpanId, webSpan.spanId, 'child span should reference parent') assert.deepStrictEqual(errSpan.parentSpanId, webSpan.spanId, 'error span should reference parent') - // Validate span names - assert.strictEqual(webSpan.name, 'GET /api/test') - assert.strictEqual(dbSpan.name, 'db.query') - assert.strictEqual(errSpan.name, 'error.operation') - - // Validate span kind (server=2, client=3 per OTLP proto SpanKind enum) - assert.strictEqual(webSpan.kind, 2, 'web.request should be SERVER kind') - assert.strictEqual(dbSpan.kind, 3, 'db.query should be CLIENT kind') + assertObjectContains(webSpan, { + name: 'GET /api/test', + kind: 2, // SERVER + }) + // Status.code should either be unset or zero + assert.ok(!webSpan.status?.code) + assertObjectContains(dbSpan, { + name: 'db.query', + kind: 3, // CLIENT + }) + assertObjectContains(errSpan, { + name: 'error.operation', + status: { code: 2, message: 'test error message' }, + }) // Validate timing fields for (const span of spans) { @@ -128,28 +128,15 @@ describe('OTLP Trace Export', () => { assert.ok(span.endTimeUnixNano >= span.startTimeUnixNano, 'endTime should be >= startTime') } - // Validate error span status - assert.strictEqual(errSpan.status.code, 2, 'error span should have STATUS_CODE_ERROR') - assert.strictEqual(errSpan.status.message, 'test error message') - - // Validate non-error span status - assert.strictEqual(webSpan.status.code, 0, 'non-error span should have STATUS_CODE_UNSET') - - // Validate span attributes include service.name and resource.name - const webAttrs = Object.fromEntries( - webSpan.attributes.map(({ key, value }) => [key, value]) - ) - assert.deepStrictEqual(webAttrs['service.name'], { stringValue: 'otlp-test-service' }) - assert.deepStrictEqual(webAttrs['operation.name'], { stringValue: 'web.request' }) - assert.deepStrictEqual(webAttrs['resource.name'], { stringValue: 'GET /api/test' }) - - // Validate custom tags appear as attributes - assert.deepStrictEqual(webAttrs['http.method'], { stringValue: 'GET' }) - assert.deepStrictEqual(webAttrs['http.url'], { stringValue: '/api/test' }) - - const dbAttrs = Object.fromEntries( - dbSpan.attributes.map(({ key, value }) => [key, value]) - ) - assert.deepStrictEqual(dbAttrs['db.type'], { stringValue: 'postgres' }) + assertObjectContains(webSpan.attributes, [ + { key: 'service.name', value: { stringValue: 'otlp-test-service' } }, + { key: 'operation.name', value: { stringValue: 'web.request' } }, + { key: 'resource.name', value: { stringValue: 'GET /api/test' } }, + { key: 'http.method', value: { stringValue: 'GET' } }, + { key: 'http.url', value: { stringValue: '/api/test' } }, + ]) + assertObjectContains(dbSpan.attributes, [ + { key: 'db.type', value: { stringValue: 'postgres' } }, + ]) }) }) diff --git a/integration-tests/vitest/vitest.spec.js b/integration-tests/vitest/vitest.spec.js index 20f37a296e0..d9f42418416 100644 --- a/integration-tests/vitest/vitest.spec.js +++ b/integration-tests/vitest/vitest.spec.js @@ -60,6 +60,7 @@ const { GIT_COMMIT_SHA, GIT_REPOSITORY_URL, DD_CI_LIBRARY_CONFIGURATION_ERROR, + TEST_FINAL_STATUS, } = require('../../packages/dd-trace/src/plugins/util/test') const { DD_HOST_CPU_COUNT } = require('../../packages/dd-trace/src/plugins/util/env') const { TELEMETRY_COVERAGE_UPLOAD } = require('../../packages/dd-trace/src/ci-visibility/telemetry') @@ -739,6 +740,7 @@ versions.forEach((version) => { // 'early flake detection can retry tests that eventually pass', // will be considered new // 'early flake detection can retry tests that always pass', // will be considered new // 'early flake detection can retry tests that eventually fail', // will be considered new + // 'early flake detection can retry tests that pass only on the last attempt', // will be considered new // 'early flake detection does not retry if the test is skipped', // skipped so not retried 'early flake detection does not retry if it is not new', ], @@ -751,7 +753,7 @@ versions.forEach((version) => { const tests = events.filter(event => event.type === 'test').map(test => test.content) - assert.strictEqual(tests.length, 14) + assert.strictEqual(tests.length, 18) assertObjectContains(tests.map(test => test.meta[TEST_NAME]), [ 'early flake detection can retry tests that eventually pass', @@ -763,18 +765,22 @@ versions.forEach((version) => { 'early flake detection can retry tests that eventually fail', 'early flake detection can retry tests that eventually fail', 'early flake detection can retry tests that eventually fail', + 'early flake detection can retry tests that pass only on the last attempt', + 'early flake detection can retry tests that pass only on the last attempt', + 'early flake detection can retry tests that pass only on the last attempt', 'early flake detection can retry tests that eventually pass', 'early flake detection can retry tests that always pass', 'early flake detection does not retry if it is not new', 'early flake detection does not retry if the test is skipped', 'early flake detection can retry tests that eventually fail', + 'early flake detection can retry tests that pass only on the last attempt', ]) const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') - // 4 executions of the 3 new tests + 1 new skipped test (not retried) - assert.strictEqual(newTests.length, 13) + // 4 executions of the 4 new tests + 1 new skipped test (not retried) + assert.strictEqual(newTests.length, 17) const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.strictEqual(retriedTests.length, 9) // 3 retries of the 3 new tests + assert.strictEqual(retriedTests.length, 12) // 3 retries of the 4 new tests retriedTests.forEach(test => { assert.strictEqual(test.meta[TEST_RETRY_REASON], TEST_RETRY_REASON_TYPES.efd) @@ -783,7 +789,21 @@ versions.forEach((version) => { // exit code should be 0 and test session should be reported as passed, // even though there are some failing executions const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') - assert.strictEqual(failedTests.length, 3) + assert.strictEqual(failedTests.length, 6) + + // Verifies that task.result.state is reset before the last repetition runs. + // Without this reset, vitest keeps a stale 'fail' from prior repetitions and + // incorrectly reports the last execution as failed even when it succeeds. + const lastAttemptPassTests = tests + .filter(test => + test.meta[TEST_NAME] === 'early flake detection can retry tests that pass only on the last attempt') + .sort((a, b) => (a.start < b.start ? -1 : a.start > b.start ? 1 : 0)) + assert.strictEqual(lastAttemptPassTests.length, NUM_RETRIES_EFD + 1) + assert.strictEqual( + lastAttemptPassTests.filter(test => test.meta[TEST_STATUS] === 'fail').length, + NUM_RETRIES_EFD + ) + assert.strictEqual(lastAttemptPassTests[lastAttemptPassTests.length - 1].meta[TEST_STATUS], 'pass') const testSessionEvent = events.find(event => event.type === 'test_session_end').content assert.strictEqual(testSessionEvent.meta[TEST_STATUS], 'pass') assert.strictEqual(testSessionEvent.meta[TEST_EARLY_FLAKE_ENABLED], 'true') @@ -798,6 +818,7 @@ versions.forEach((version) => { TEST_DIR: 'ci-visibility/vitest-tests/early-flake-detection*', NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', SHOULD_ADD_EVENTUALLY_FAIL: '1', + SHOULD_ADD_LAST_ATTEMPT_PASS: '1', }, } ) @@ -1842,6 +1863,7 @@ versions.forEach((version) => { if (isFirstAttempt) { assert.ok(!(TEST_IS_RETRY in test.meta)) assert.ok(!(TEST_RETRY_REASON in test.meta)) + assert.ok(!(TEST_FINAL_STATUS in test.meta)) continue } assert.strictEqual(test.meta[TEST_IS_RETRY], 'true') @@ -1856,6 +1878,16 @@ versions.forEach((version) => { assert.strictEqual(test.meta[TEST_HAS_FAILED_ALL_RETRIES], 'true') assert.strictEqual(test.meta[TEST_MANAGEMENT_ATTEMPT_TO_FIX_PASSED], 'false') } + if (shouldAlwaysPass) { + assert.strictEqual(test.meta[TEST_FINAL_STATUS], 'pass') + } else if (isQuarantining || isDisabling) { + assert.strictEqual(test.meta[TEST_FINAL_STATUS], 'skip') + } else { + assert.strictEqual(test.meta[TEST_FINAL_STATUS], 'fail') + } + } else { + // Intermediate ATF executions must not carry a final status tag + assert.ok(!(TEST_FINAL_STATUS in test.meta)) } } else { assert.ok(!(TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX in test.meta)) @@ -2192,7 +2224,7 @@ versions.forEach((version) => { .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', payloads => { const events = payloads.flatMap(({ payload }) => payload.events) const tests = events.filter(event => event.type === 'test').map(event => event.content) - assert.strictEqual(tests.length, 2) + assert.strictEqual(tests.length, 3) const testSession = events.find(event => event.type === 'test_session_end').content @@ -2208,6 +2240,7 @@ versions.forEach((version) => { [ 'ci-visibility/vitest-tests/test-quarantine.mjs.quarantine tests can quarantine a test', 'ci-visibility/vitest-tests/test-quarantine.mjs.quarantine tests can pass normally', + 'ci-visibility/vitest-tests/test-quarantine.mjs.quarantine tests can quarantine a passing test', ] ) @@ -2729,5 +2762,450 @@ versions.forEach((version) => { }) }) } + + context('final status tag', () => { + it('sets final_status tag to test status on regular tests without retry features', async () => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + flaky_test_retries_enabled: false, + early_flake_detection: { enabled: false }, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + tests.forEach(test => { + assert.strictEqual( + test.meta[TEST_FINAL_STATUS], + test.meta[TEST_STATUS], + `Expected TEST_FINAL_STATUS to match TEST_STATUS for test "${test.meta[TEST_NAME]}"` + ) + }) + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + // Runs test-visibility-passed-suite (pass/skip), test-visibility-failed-suite + // (fail/pass with hooks), and test-visibility-failed-hooks (fail due to hook throws) + TEST_DIR: 'ci-visibility/vitest-tests/test-visibility*', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', + }, + } + ) + + await Promise.all([once(childProcess, 'exit'), eventsPromise]) + }) + + it('sets final_status tag to test status reported to test framework on last retry (ATR active only)', + async () => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + flaky_test_retries_enabled: true, + early_flake_detection: { enabled: false }, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + const assertAtrFinalStatus = (testName, expectedFinalStatus) => { + const group = tests.filter(t => t.meta[TEST_NAME] === testName) + group.sort((a, b) => (a.start < b.start ? -1 : a.start > b.start ? 1 : 0)) + .forEach((test, index) => { + if (index < group.length - 1) { + assert.ok(!(TEST_FINAL_STATUS in test.meta), + `TEST_FINAL_STATUS should not be set on attempt ${index} of "${testName}"` + ) + } else { + assert.strictEqual(test.meta[TEST_FINAL_STATUS], expectedFinalStatus) + } + }) + } + + // Test that always passes on the first try: final_status is set immediately + const alwaysPassingTests = tests.filter( + test => test.meta[TEST_NAME] === 'flaky test retries does not retry if unnecessary' + ) + assert.strictEqual(alwaysPassingTests.length, 1) + assert.strictEqual(alwaysPassingTests[0].meta[TEST_FINAL_STATUS], 'pass') + + assertAtrFinalStatus('flaky test retries can retry tests that eventually pass', 'pass') + assertAtrFinalStatus('flaky test retries can retry tests that never pass', 'fail') + + // With hooks: same behavior + const alwaysPassingTestsWithHooks = tests.filter( + test => test.meta[TEST_NAME] === 'flaky test retries with hooks does not retry if unnecessary' + ) + assert.strictEqual(alwaysPassingTestsWithHooks.length, 1) + assert.strictEqual(alwaysPassingTestsWithHooks[0].meta[TEST_FINAL_STATUS], 'pass') + + assertAtrFinalStatus('flaky test retries with hooks can retry tests that eventually pass', 'pass') + assertAtrFinalStatus('flaky test retries with hooks can retry tests that never pass', 'fail') + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/{flaky-test-retries,hooks-flaky-test-retries}.mjs', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', + }, + } + ) + + await Promise.all([once(childProcess, 'exit'), eventsPromise]) + }) + + it('sets final_status tag to test status reported to test framework on last retry (EFD active only)', + async () => { + receiver.setKnownTests({ + vitest: { + 'ci-visibility/vitest-tests/early-flake-detection.mjs': [ + 'early flake detection does not retry if it is not new', + ], + 'ci-visibility/vitest-tests/hooks-flaky-test-retries.mjs': [ + 'flaky test retries with hooks does not retry if unnecessary', + ], + }, + }) + receiver.setSettings({ + early_flake_detection: { + enabled: true, + slow_test_retries: { '5s': NUM_RETRIES_EFD }, + faulty_session_threshold: 100, + }, + known_tests_enabled: true, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + // Known test: not retried, every execution is already the final one + const knownTests = tests.filter( + test => test.meta[TEST_NAME] === 'early flake detection does not retry if it is not new' + ) + assert.strictEqual(knownTests.length, 1) + assert.ok(!(TEST_IS_NEW in knownTests[0].meta)) + assert.ok(!(TEST_IS_RETRY in knownTests[0].meta)) + assert.strictEqual(knownTests[0].meta[TEST_FINAL_STATUS], knownTests[0].meta[TEST_STATUS]) + + const assertEfdFinalStatus = (testName, expectedFinalStatus) => { + const group = tests.filter(t => t.meta[TEST_NAME] === testName) + group.sort((a, b) => (a.start < b.start ? -1 : a.start > b.start ? 1 : 0)) + .forEach((test, index) => { + if (index < group.length - 1) { + assert.ok(!(TEST_FINAL_STATUS in test.meta)) + } else { + assert.strictEqual(test.meta[TEST_FINAL_STATUS], expectedFinalStatus) + } + }) + } + + assertEfdFinalStatus('early flake detection can retry tests that eventually pass', 'pass') + assertEfdFinalStatus('early flake detection can retry tests that always pass', 'pass') + + // With hooks: same behavior + const knownTestsWithHooks = tests.filter( + test => test.meta[TEST_NAME] === 'flaky test retries with hooks does not retry if unnecessary' + ) + assert.strictEqual(knownTestsWithHooks.length, 1) + assert.ok(!(TEST_IS_NEW in knownTestsWithHooks[0].meta)) + assert.ok(!(TEST_IS_RETRY in knownTestsWithHooks[0].meta)) + assert.strictEqual( + knownTestsWithHooks[0].meta[TEST_FINAL_STATUS], knownTestsWithHooks[0].meta[TEST_STATUS]) + + assertEfdFinalStatus('flaky test retries with hooks can retry tests that eventually pass', 'pass') + assertEfdFinalStatus('flaky test retries with hooks can retry tests that never pass', 'fail') + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/{early-flake-detection,hooks-flaky-test-retries}.mjs', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', + }, + } + ) + + await Promise.all([once(childProcess, 'exit'), eventsPromise]) + }) + + it('sets final_status tag only on last ATR retry when EFD is enabled but not active and ATR is active', + async () => { + // All tests are known so EFD will be enabled but not active for them + receiver.setKnownTests({ + vitest: { + 'ci-visibility/vitest-tests/flaky-test-retries.mjs': [ + 'flaky test retries can retry tests that eventually pass', + 'flaky test retries can retry tests that never pass', + 'flaky test retries does not retry if unnecessary', + ], + 'ci-visibility/vitest-tests/hooks-flaky-test-retries.mjs': [ + 'flaky test retries with hooks can retry tests that eventually pass', + 'flaky test retries with hooks can retry tests that never pass', + 'flaky test retries with hooks does not retry if unnecessary', + ], + }, + }) + receiver.setSettings({ + flaky_test_retries_enabled: true, + early_flake_detection: { + enabled: true, + slow_test_retries: { '5s': 3 }, + faulty_session_threshold: 100, + }, + known_tests_enabled: true, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + const eventuallyPassingTests = tests.filter( + test => test.meta[TEST_NAME] === 'flaky test retries can retry tests that eventually pass' + ) + eventuallyPassingTests.sort((a, b) => (a.start < b.start ? -1 : a.start > b.start ? 1 : 0)) + .forEach((test, idx) => { + if (idx < eventuallyPassingTests.length - 1) { + assert.ok(!(TEST_FINAL_STATUS in test.meta), + 'TEST_FINAL_STATUS should not be set on previous ATR runs' + ) + } else { + assert.strictEqual(test.meta[TEST_FINAL_STATUS], test.meta[TEST_STATUS]) + assert.strictEqual(test.meta[TEST_STATUS], 'pass') + } + }) + + const alwaysPassingTests = tests.filter( + test => test.meta[TEST_NAME] === 'flaky test retries does not retry if unnecessary' + ) + assert.strictEqual(alwaysPassingTests.length, 1) + assert.strictEqual(alwaysPassingTests[0].meta[TEST_FINAL_STATUS], 'pass') + + // With hooks: same behavior + const eventuallyPassingTestsWithHooks = tests.filter( + test => test.meta[TEST_NAME] === 'flaky test retries with hooks can retry tests that eventually pass' + ) + eventuallyPassingTestsWithHooks.sort((a, b) => (a.start < b.start ? -1 : a.start > b.start ? 1 : 0)) + .forEach((test, idx) => { + if (idx < eventuallyPassingTestsWithHooks.length - 1) { + assert.ok(!(TEST_FINAL_STATUS in test.meta), + 'TEST_FINAL_STATUS should not be set on previous ATR runs' + ) + } else { + assert.strictEqual(test.meta[TEST_FINAL_STATUS], test.meta[TEST_STATUS]) + assert.strictEqual(test.meta[TEST_STATUS], 'pass') + } + }) + + const alwaysPassingTestsWithHooks = tests.filter( + test => test.meta[TEST_NAME] === 'flaky test retries with hooks does not retry if unnecessary' + ) + assert.strictEqual(alwaysPassingTestsWithHooks.length, 1) + assert.strictEqual(alwaysPassingTestsWithHooks[0].meta[TEST_FINAL_STATUS], 'pass') + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/{flaky-test-retries,hooks-flaky-test-retries}.mjs', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', + }, + } + ) + + await Promise.all([once(childProcess, 'exit'), eventsPromise]) + }) + + if (version === 'latest') { + it('sets final_status tag to skip for disabled tests', async () => { + receiver.setSettings({ test_management: { enabled: true } }) + receiver.setTestManagementTests({ + vitest: { + suites: { + 'ci-visibility/vitest-tests/test-disabled.mjs': { + tests: { + 'disable tests can disable a test': { + properties: { disabled: true }, + }, + }, + }, + 'ci-visibility/vitest-tests/hooks-test-management.mjs': { + tests: { + 'test management with hooks can apply management to a failing test with hooks': { + properties: { disabled: true }, + }, + }, + }, + }, + }, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + const disabledTest = tests.find(test => test.meta[TEST_NAME] === 'disable tests can disable a test') + assert.ok(disabledTest, 'Expected to find the disabled test') + assert.strictEqual(disabledTest.meta[TEST_STATUS], 'skip') + assert.strictEqual(disabledTest.meta[TEST_MANAGEMENT_IS_DISABLED], 'true') + assert.strictEqual(disabledTest.meta[TEST_FINAL_STATUS], 'skip') + + // With hooks: same behavior + const disabledTestWithHooks = tests.find( + test => test.meta[TEST_NAME] === + 'test management with hooks can apply management to a failing test with hooks' + ) + assert.ok(disabledTestWithHooks, 'Expected to find the disabled test with hooks') + assert.strictEqual(disabledTestWithHooks.meta[TEST_STATUS], 'skip') + assert.strictEqual(disabledTestWithHooks.meta[TEST_MANAGEMENT_IS_DISABLED], 'true') + assert.strictEqual(disabledTestWithHooks.meta[TEST_FINAL_STATUS], 'skip') + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/{test-disabled,hooks-test-management}.mjs', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init --no-warnings', + }, + } + ) + + await Promise.all([once(childProcess, 'exit'), eventsPromise]) + }) + + it('sets final_status tag to skip for quarantined tests', async () => { + receiver.setSettings({ test_management: { enabled: true } }) + receiver.setTestManagementTests({ + vitest: { + suites: { + 'ci-visibility/vitest-tests/test-quarantine.mjs': { + tests: { + 'quarantine tests can quarantine a test': { + properties: { quarantined: true }, + }, + 'quarantine tests can quarantine a passing test': { + properties: { quarantined: true }, + }, + }, + }, + 'ci-visibility/vitest-tests/hooks-test-management.mjs': { + tests: { + 'test management with hooks can apply management to a failing test with hooks': { + properties: { quarantined: true }, + }, + }, + }, + 'ci-visibility/vitest-tests/hooks-test-quarantine-failing-after-each.mjs': { + tests: { + 'quarantine tests with failing afterEach can quarantine a test whose afterEach hook fails': { + properties: { quarantined: true }, + }, + }, + }, + }, + }, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + const quarantinedTest = tests.find( + test => test.meta[TEST_NAME] === 'quarantine tests can quarantine a test' + ) + assert.ok(quarantinedTest, 'Expected to find the quarantined test') + // Quarantined test still runs and reports its actual status, + // but the final status must be 'skip' (errors are suppressed) + assert.strictEqual(quarantinedTest.meta[TEST_STATUS], 'fail') + assert.strictEqual(quarantinedTest.meta[TEST_MANAGEMENT_IS_QUARANTINED], 'true') + assert.strictEqual(quarantinedTest.meta[TEST_FINAL_STATUS], 'skip') + + const passingTest = tests.find(test => test.meta[TEST_NAME] === 'quarantine tests can pass normally') + assert.ok(passingTest, 'Expected to find the passing test') + assert.strictEqual(passingTest.meta[TEST_STATUS], 'pass') + assert.strictEqual(passingTest.meta[TEST_FINAL_STATUS], 'pass') + + // Quarantined test that actually passes must still report final_status=skip + const quarantinedPassingTest = tests.find( + test => test.meta[TEST_NAME] === 'quarantine tests can quarantine a passing test' + ) + assert.ok(quarantinedPassingTest, 'Expected to find the quarantined passing test') + assert.strictEqual(quarantinedPassingTest.meta[TEST_STATUS], 'pass') + assert.strictEqual(quarantinedPassingTest.meta[TEST_MANAGEMENT_IS_QUARANTINED], 'true') + assert.strictEqual(quarantinedPassingTest.meta[TEST_FINAL_STATUS], 'skip') + + // With hooks: same behavior + const quarantinedTestWithHooks = tests.find( + test => test.meta[TEST_NAME] === + 'test management with hooks can apply management to a failing test with hooks' + ) + assert.ok(quarantinedTestWithHooks, 'Expected to find the quarantined test with hooks') + assert.strictEqual(quarantinedTestWithHooks.meta[TEST_STATUS], 'fail') + assert.strictEqual(quarantinedTestWithHooks.meta[TEST_MANAGEMENT_IS_QUARANTINED], 'true') + assert.strictEqual(quarantinedTestWithHooks.meta[TEST_FINAL_STATUS], 'skip') + + const passingTestWithHooks = tests.find( + test => test.meta[TEST_NAME] === 'test management with hooks can pass normally with hooks' + ) + assert.ok(passingTestWithHooks, 'Expected to find the passing test with hooks') + assert.strictEqual(passingTestWithHooks.meta[TEST_STATUS], 'pass') + assert.strictEqual(passingTestWithHooks.meta[TEST_FINAL_STATUS], 'pass') + + // With hooks where afterEach throws: test body passes but hook causes failure — still skip + const quarantinedTestFailingAfterEach = tests.find( + test => test.meta[TEST_NAME] === + 'quarantine tests with failing afterEach can quarantine a test whose afterEach hook fails' + ) + assert.ok(quarantinedTestFailingAfterEach, 'Expected to find the quarantined test with failing afterEach') + assert.strictEqual(quarantinedTestFailingAfterEach.meta[TEST_STATUS], 'fail') + assert.strictEqual(quarantinedTestFailingAfterEach.meta[TEST_MANAGEMENT_IS_QUARANTINED], 'true') + assert.strictEqual(quarantinedTestFailingAfterEach.meta[TEST_FINAL_STATUS], 'skip') + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/' + + '{test-quarantine,hooks-test-management,hooks-test-quarantine-failing-after-each}.mjs', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init --no-warnings', + }, + } + ) + + await Promise.all([once(childProcess, 'exit'), eventsPromise]) + }) + } + }) }) }) diff --git a/nyc.config.js b/nyc.config.js index 3ff611feb5b..9f1ef320263 100644 --- a/nyc.config.js +++ b/nyc.config.js @@ -30,6 +30,7 @@ module.exports = { ], exclude: [ '**/.bun/**', + '**/*-browser-scripts.js', // Serialized into browsers; coverage counters would ReferenceError. '**/*.spec.*', '**/fixtures/**', '**/integration-tests/**', diff --git a/package.json b/package.json index 48bf3f3ea6c..0544c6911df 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dd-trace", - "version": "5.98.0", + "version": "5.99.0", "description": "Datadog APM tracing client for JavaScript", "main": "index.js", "typings": "index.d.ts", @@ -153,11 +153,11 @@ "@datadog/wasm-js-rewriter": "5.0.1", "@opentelemetry/api": ">=1.0.0 <1.10.0", "@opentelemetry/api-logs": "<1.0.0", - "oxc-parser": "^0.121.0" + "oxc-parser": "^0.126.0" }, "devDependencies": { "@actions/core": "^3.0.0", - "@actions/github": "^9.0.0", + "@actions/github": "^9.1.0", "@babel/helpers": "^7.29.2", "@eslint/eslintrc": "^3.3.5", "@eslint/js": "^9.39.2", @@ -167,14 +167,14 @@ "@stylistic/eslint-plugin": "^5.10.0", "@types/mocha": "^10.0.10", "@types/node": "^18.19.106", - "@types/sinon": "^21.0.0", + "@types/sinon": "^21.0.1", "axios": "^1.15.0", "benchmark": "^2.1.4", "body-parser": "^2.2.2", - "bun": "1.3.11", + "bun": "1.3.12", "codeowners-audit": "^2.9.0", "eslint": "^9.39.2", - "eslint-plugin-cypress": "^6.2.2", + "eslint-plugin-cypress": "^6.3.1", "eslint-plugin-import": "^2.32.0", "eslint-plugin-jsdoc": "^62.9.0", "eslint-plugin-mocha": "^11.2.0", @@ -200,10 +200,10 @@ "retry": "^0.13.1", "semifies": "^1.0.0", "semver": "^7.7.2", - "sinon": "^21.0.3", + "sinon": "^21.1.2", "tiktoken": "^1.0.21", - "typescript": "^6.0.2", - "workerpool": "^10.0.0", + "typescript": "^6.0.3", + "workerpool": "^10.0.2", "yaml": "^2.8.3", "yarn-deduplicate": "^6.0.2" } diff --git a/packages/datadog-instrumentations/src/cypress-config.js b/packages/datadog-instrumentations/src/cypress-config.js index 0773c8928e8..13a573bc055 100644 --- a/packages/datadog-instrumentations/src/cypress-config.js +++ b/packages/datadog-instrumentations/src/cypress-config.js @@ -229,29 +229,84 @@ function wrapConfig (config) { return config } +/** + * Returns `true` if the nearest package.json walking up from `filePath` + * sets `"type": "module"`. Used to decide whether ambiguous extensions + * (`.js`, `.ts`) are loaded as ESM or CJS. + * + * @param {string} filePath absolute path to a file under the project + * @returns {boolean} + */ +function isUnderEsmPackage (filePath) { + let dir = path.dirname(filePath) + while (true) { + const candidate = path.join(dir, 'package.json') + try { + const pkg = JSON.parse(fs.readFileSync(candidate, 'utf8')) + return pkg && pkg.type === 'module' + } catch { /* no package.json at this level */ } + const parent = path.dirname(dir) + if (parent === dir) return false + dir = parent + } +} + /** * @param {string} originalConfigFile absolute path to the original config file * @returns {string} path to the generated wrapper file */ function createConfigWrapper (originalConfigFile) { + // Decide the wrapper's module mode (ESM vs CJS). It must match how + // Cypress would interpret the user's original config so that (1) Cypress + // keeps the loader it would have used (notably the ts-node registration + // for `.ts` configs), and (2) the wrapper body parses in that mode. + const originalExt = path.extname(originalConfigFile) + const isEsm = originalExt === '.mjs' || originalExt === '.mts' || + (originalExt !== '.cjs' && originalExt !== '.cts' && isUnderEsmPackage(originalConfigFile)) + + // Preserve `.ts`/`.cts`/`.mts` so Cypress keeps ts-node registered for + // the wrapper. For plain JS originals, pick the extension that encodes + // the chosen module mode directly. + let wrapperExt + if (originalExt === '.ts' || originalExt === '.cts' || originalExt === '.mts') { + wrapperExt = originalExt + } else { + wrapperExt = isEsm ? '.mjs' : '.cjs' + } + const wrapperFile = path.join( path.dirname(originalConfigFile), - `.dd-cypress-config-${process.pid}.mjs` + `.dd-cypress-config-${process.pid}${wrapperExt}` ) const cypressConfigPath = require.resolve('./cypress-config') - // Always use ESM: it can import both CJS and ESM configs, so it works - // regardless of the original file's extension or "type": "module" in package.json. - // Import cypress-config.js directly (CJS default = module.exports object). - fs.writeFileSync(wrapperFile, [ - `import originalConfig from ${JSON.stringify(pathToFileURL(originalConfigFile).href)}`, - `import cypressConfig from ${JSON.stringify(pathToFileURL(cypressConfigPath).href)}`, - '', - 'export default cypressConfig.wrapConfig(originalConfig)', - '', - ].join('\n')) - + // ESM body: `import` default-interops a CJS module (cypress-config.js) + // by exposing its `module.exports` as the default binding, and handles + // both CJS and ESM user configs transparently. + // CJS body: avoids top-level `import` — older Cypress transpiles `.ts` + // configs through CJS ts-node, where `require('file://...')` is not + // supported. Guards against ES-module-default shape so TS-authored + // configs using `export default` still work. + const body = isEsm + ? [ + `import originalConfig from ${JSON.stringify(pathToFileURL(originalConfigFile).href)}`, + `import cypressConfig from ${JSON.stringify(pathToFileURL(cypressConfigPath).href)}`, + '', + 'export default cypressConfig.wrapConfig(originalConfig)', + '', + ].join('\n') + : [ + `const cypressConfig = require(${JSON.stringify(cypressConfigPath)})`, + `const originalExports = require(${JSON.stringify(originalConfigFile)})`, + 'const originalConfig = originalExports && originalExports.__esModule', + ' ? originalExports.default', + ' : originalExports', + 'module.exports = cypressConfig.wrapConfig(originalConfig)', + '', + ].join('\n') + + fs.writeFileSync(wrapperFile, body) return wrapperFile } @@ -291,10 +346,7 @@ function wrapCliConfigFileOptions (options) { } } - // Skip .ts files — Cypress transpiles them internally via its own loader. - // The ESM wrapper can't import .ts directly. The defineConfig shimmer - // handles .ts configs since they're transpiled to CJS by Cypress. - if (!configFilePath || !fs.existsSync(configFilePath) || path.extname(configFilePath) === '.ts') return noop + if (!configFilePath || !fs.existsSync(configFilePath)) return noop try { const wrapperFile = createConfigWrapper(configFilePath) diff --git a/packages/datadog-instrumentations/src/helpers/check-require-cache.js b/packages/datadog-instrumentations/src/helpers/check-require-cache.js index f12f19258fd..25cb7c0f538 100644 --- a/packages/datadog-instrumentations/src/helpers/check-require-cache.js +++ b/packages/datadog-instrumentations/src/helpers/check-require-cache.js @@ -97,7 +97,10 @@ module.exports.checkForPotentialConflicts = function () { } module.exports.flushStartupLogs = function (log) { + // Some callers pass `./log/writer` (simple pass-through) while others pass the main `./log` + // module (which supports lazy delegate functions). Invoke closures here so both work. while (warnings.length) { - log.warn(warnings.shift()) + const entry = warnings.shift() + log.warn(typeof entry === 'function' ? entry() : entry) } } diff --git a/packages/datadog-instrumentations/src/helpers/hooks.js b/packages/datadog-instrumentations/src/helpers/hooks.js index 3d648a03ffa..f1495d32ba7 100644 --- a/packages/datadog-instrumentations/src/helpers/hooks.js +++ b/packages/datadog-instrumentations/src/helpers/hooks.js @@ -17,6 +17,7 @@ module.exports = { '@apollo/server': () => require('../apollo-server'), '@apollo/gateway': () => require('../apollo'), '@langchain/langgraph': { esmFirst: true, fn: () => require('../langgraph') }, + '@modelcontextprotocol/sdk': () => require('../modelcontextprotocol-sdk'), 'apollo-server-core': () => require('../apollo-server-core'), '@aws-sdk/smithy-client': () => require('../aws-sdk'), '@azure/event-hubs': () => require('../azure-event-hubs'), diff --git a/packages/datadog-instrumentations/src/helpers/rewriter/compiler.js b/packages/datadog-instrumentations/src/helpers/rewriter/compiler.js index 88c7504a751..4230ffdcd94 100644 --- a/packages/datadog-instrumentations/src/helpers/rewriter/compiler.js +++ b/packages/datadog-instrumentations/src/helpers/rewriter/compiler.js @@ -1,5 +1,13 @@ 'use strict' +/** + * This file is meant to be only thin wrappers over core + * parsing/traversing/generating functionality with the goal to eventually move + * them out of the project. No other code should be added to this file such as + * helpers etc, and the API should be kept exactly as an external API would be + * expected to be. + */ + const log = require('../../../../dd-trace/src/log') // eslint-disable-next-line camelcase, no-undef @@ -24,14 +32,13 @@ const compiler = { } catch (e) { log.error(e) - // Fallback for when OXC is not available. const meriyah = require('../../../../../vendor/dist/meriyah') - compiler.parse = (sourceText, { range, sourceType } = {}) => { + compiler.parse = (sourceText, { range, isModule } = {}) => { return meriyah.parse(sourceText.toString(), { loc: range, ranges: range, - module: sourceType === 'module', + module: isModule, }) } } diff --git a/packages/datadog-instrumentations/src/helpers/rewriter/instrumentations/index.js b/packages/datadog-instrumentations/src/helpers/rewriter/instrumentations/index.js index 28b13f15191..9a67278604a 100644 --- a/packages/datadog-instrumentations/src/helpers/rewriter/instrumentations/index.js +++ b/packages/datadog-instrumentations/src/helpers/rewriter/instrumentations/index.js @@ -5,4 +5,5 @@ module.exports = [ ...require('./bullmq'), ...require('./langchain'), ...require('./langgraph'), + ...require('./modelcontextprotocol-sdk'), ] diff --git a/packages/datadog-instrumentations/src/helpers/rewriter/instrumentations/modelcontextprotocol-sdk.js b/packages/datadog-instrumentations/src/helpers/rewriter/instrumentations/modelcontextprotocol-sdk.js new file mode 100644 index 00000000000..d5474bdee12 --- /dev/null +++ b/packages/datadog-instrumentations/src/helpers/rewriter/instrumentations/modelcontextprotocol-sdk.js @@ -0,0 +1,59 @@ +'use strict' + +// NOTE: Protocol.request (dist/esm|cjs/shared/protocol.js) is intentionally not instrumented here. +// It will be used for distributed tracing header injection when server-side coverage is added. + +module.exports = [ + { + module: { + name: '@modelcontextprotocol/sdk', + versionRange: '>=1.27.1', + filePath: 'dist/esm/client/index.js', + }, + functionQuery: { + methodName: 'callTool', + className: 'Client', + kind: 'Async', + }, + channelName: 'Client_callTool', + }, + { + module: { + name: '@modelcontextprotocol/sdk', + versionRange: '>=1.27.1', + filePath: 'dist/cjs/client/index.js', + }, + functionQuery: { + methodName: 'callTool', + className: 'Client', + kind: 'Async', + }, + channelName: 'Client_callTool', + }, + { + module: { + name: '@modelcontextprotocol/sdk', + versionRange: '>=1.27.1', + filePath: 'dist/esm/client/index.js', + }, + functionQuery: { + methodName: 'listTools', + className: 'Client', + kind: 'Async', + }, + channelName: 'Client_listTools', + }, + { + module: { + name: '@modelcontextprotocol/sdk', + versionRange: '>=1.27.1', + filePath: 'dist/cjs/client/index.js', + }, + functionQuery: { + methodName: 'listTools', + className: 'Client', + kind: 'Async', + }, + channelName: 'Client_listTools', + }, +] diff --git a/packages/datadog-instrumentations/src/helpers/rewriter/transforms.js b/packages/datadog-instrumentations/src/helpers/rewriter/transforms.js index 9ed00064a0a..619fb8930f7 100644 --- a/packages/datadog-instrumentations/src/helpers/rewriter/transforms.js +++ b/packages/datadog-instrumentations/src/helpers/rewriter/transforms.js @@ -14,11 +14,13 @@ const transforms = module.exports = { if (node.body.some(tracingChannelPredicate)) return const index = node.body.findIndex(child => child.directive === 'use strict') - const code = sourceType === 'module' + const code = isModuleSourceType(sourceType) ? `import { tracingChannel as tr_ch_apm_tracingChannel } from "${dcModule}"` : `const {tracingChannel: tr_ch_apm_tracingChannel} = require("${dcModule}")` - node.body.splice(index + 1, 0, parse(code, { sourceType }).body[0]) + node.body.splice(index + 1, 0, parse(code, { + isModule: isModuleSourceType(sourceType), + }).body[0]) }, tracingChannelDeclaration (state, node) { @@ -51,6 +53,13 @@ function traceAny (state, node, _parent, ancestry) { } } +/** + * @param {string} sourceType + */ +function isModuleSourceType (sourceType) { + return sourceType === 'module' || sourceType === 'esm' +} + function traceFunction (state, node, program) { transforms.tracingChannelDeclaration(state, program) diff --git a/packages/datadog-instrumentations/src/modelcontextprotocol-sdk.js b/packages/datadog-instrumentations/src/modelcontextprotocol-sdk.js new file mode 100644 index 00000000000..ba41d514361 --- /dev/null +++ b/packages/datadog-instrumentations/src/modelcontextprotocol-sdk.js @@ -0,0 +1,7 @@ +'use strict' + +const { addHook, getHooks } = require('./helpers/instrument') + +for (const hook of getHooks('@modelcontextprotocol/sdk')) { + addHook(hook, exports => exports) +} diff --git a/packages/datadog-instrumentations/src/playwright-browser-scripts.js b/packages/datadog-instrumentations/src/playwright-browser-scripts.js new file mode 100644 index 00000000000..c60001f0bfa --- /dev/null +++ b/packages/datadog-instrumentations/src/playwright-browser-scripts.js @@ -0,0 +1,27 @@ +'use strict' + +// Serialized into chromium via Playwright's `page.evaluate`. Excluded from coverage by filename. +// Rename only if you update that glob too. + +/** @returns {{ isRumInstrumented: boolean, isRumActive: boolean, rumSamplingRate: number | null }} */ +function detectRum () { + const isRumInstrumented = !!window.DD_RUM + const isRumActive = window.DD_RUM && window.DD_RUM.getInternalContext + ? !!window.DD_RUM.getInternalContext() + : false + const rumSamplingRate = window.DD_RUM && window.DD_RUM.getInitConfiguration + ? window.DD_RUM.getInitConfiguration().sessionSampleRate + : null + return { isRumInstrumented, isRumActive, rumSamplingRate } +} + +/** @returns {boolean} */ +function stopRumSession () { + if (window.DD_RUM && window.DD_RUM.stopSession) { + window.DD_RUM.stopSession() + return true + } + return false +} + +module.exports = { detectRum, stopRumSession } diff --git a/packages/datadog-instrumentations/src/playwright.js b/packages/datadog-instrumentations/src/playwright.js index 31b04eee3a6..85031f2fb0e 100644 --- a/packages/datadog-instrumentations/src/playwright.js +++ b/packages/datadog-instrumentations/src/playwright.js @@ -52,6 +52,9 @@ let applyRepeatEachIndex = null let startedSuites = [] +// Browser-side callbacks live in a coverage-excluded file so coverage counters can't reach chromium. +const { detectRum, stopRumSession } = require('./playwright-browser-scripts') + const STATUS_TO_TEST_STATUS = { passed: 'pass', failed: 'fail', @@ -1117,16 +1120,7 @@ addHook({ try { if (page) { - const { isRumInstrumented, isRumActive, rumSamplingRate } = await page.evaluate(() => { - const isRumInstrumented = !!window.DD_RUM - const isRumActive = window.DD_RUM && window.DD_RUM.getInternalContext - ? !!window.DD_RUM.getInternalContext() - : false - const rumSamplingRate = window.DD_RUM && window.DD_RUM.getInitConfiguration - ? window.DD_RUM.getInitConfiguration().sessionSampleRate - : null - return { isRumInstrumented, isRumActive, rumSamplingRate } - }) + const { isRumInstrumented, isRumActive, rumSamplingRate } = await page.evaluate(detectRum) if (isRumInstrumented && rumSamplingRate < 100 && !isRumActive) { log.debug("RUM was detected on the page, but it isn't active because the sampling rate is below 100%") } @@ -1209,13 +1203,7 @@ addHook({ fn: async function ({ page }) { try { if (page) { - const isRumActive = await page.evaluate(() => { - if (window.DD_RUM && window.DD_RUM.stopSession) { - window.DD_RUM.stopSession() - return true - } - return false - }) + const isRumActive = await page.evaluate(stopRumSession) if (isRumActive) { // Give some time RUM to flush data, similar to what we do in selenium diff --git a/packages/datadog-instrumentations/src/vitest.js b/packages/datadog-instrumentations/src/vitest.js index a83093973f3..15d4ae1fc7a 100644 --- a/packages/datadog-instrumentations/src/vitest.js +++ b/packages/datadog-instrumentations/src/vitest.js @@ -866,6 +866,9 @@ function wrapVitestTestRunner (VitestTestRunner) { } else { testPassCh.publish({ task, ...ctx.currentStore }) } + if (shouldFlipStatus) { + task.result.state = 'pass' + } } const isRetryReasonAtr = numAttempt > 0 && @@ -1174,7 +1177,12 @@ addHook({ }) } else if (state === 'pass' && !isSwitchedStatus) { if (testCtx) { - testPassCh.publish({ task, ...testCtx.currentStore }) + testPassCh.publish({ + task, + finalStatus: + disabledTasks.has(task) || quarantinedTasks.has(task) ? 'skip' : 'pass', + ...testCtx.currentStore, + }) } } else if (state === 'fail' || isSwitchedStatus) { let testError @@ -1197,7 +1205,9 @@ addHook({ // Check if all EFD retries failed const providedContext = getProvidedContext() - if (providedContext.isEarlyFlakeDetectionEnabled && (newTasks.has(task) || modifiedTasks.has(task))) { + const isEfdRetry = + providedContext.isEarlyFlakeDetectionEnabled && (newTasks.has(task) || modifiedTasks.has(task)) + if (isEfdRetry) { const statuses = taskToStatuses.get(task) // statuses only includes repetitions (not the initial run), so we check against numRepeats (not +1) if (statuses && statuses.length === providedContext.numRepeats && @@ -1207,8 +1217,9 @@ addHook({ } // ATR: set hasFailedAllRetries when all auto test retries were exhausted and every attempt failed - if (providedContext.isFlakyTestRetriesEnabled && !attemptToFixTasks.has(task) && - !newTasks.has(task) && !modifiedTasks.has(task)) { + const isAtrRetry = providedContext.isFlakyTestRetriesEnabled && !attemptToFixTasks.has(task) && + !newTasks.has(task) && !modifiedTasks.has(task) + if (isAtrRetry) { const maxRetries = providedContext.flakyTestRetriesCount ?? 0 if (maxRetries > 0 && task.result?.retryCount === maxRetries) { hasFailedAllRetries = true @@ -1218,11 +1229,28 @@ addHook({ if (testCtx) { const isRetry = task.result?.retryCount > 0 // `duration` is the duration of all the retries, so it can't be used if there are retries + + let finalStatus + if (isSwitchedStatus) { + if (disabledTasks.has(task) || quarantinedTasks.has(task)) { + finalStatus = 'skip' + } else if (isAtrRetry || isEfdRetry) { + finalStatus = hasFailedAllRetries ? 'fail' : 'pass' + } else if (attemptToFixTasks.has(task)) { + finalStatus = attemptToFixFailed ? 'fail' : 'pass' + } else { + finalStatus = undefined + } + } else { + finalStatus = 'fail' + } + testErrorCh.publish({ duration: isRetry ? undefined : duration, error: testError, hasFailedAllRetries, attemptToFixFailed, + finalStatus, ...testCtx.currentStore, }) } diff --git a/packages/datadog-instrumentations/test/helpers/rewriter/index.spec.js b/packages/datadog-instrumentations/test/helpers/rewriter/index.spec.js index f95e8b3c937..69ba6da15a9 100644 --- a/packages/datadog-instrumentations/test/helpers/rewriter/index.spec.js +++ b/packages/datadog-instrumentations/test/helpers/rewriter/index.spec.js @@ -515,4 +515,15 @@ describe('check-require-cache', () => { assert.ok(subs.start.called) }) + + it('should use import when rewriting esm modules', () => { + const filename = resolve(__dirname, 'node_modules', 'test', 'trace-generator-async.js') + + content = readFileSync(filename, 'utf8') + content = rewriter.rewrite(content, filename, 'module') + + assert.match(content, /\bimport\s+.+\s+from\s+"/) + assert.match(content, /tr_ch_apm_tracingChannel/) + assert.doesNotMatch(content, /require\("/) + }) }) diff --git a/packages/datadog-instrumentations/test/light-my-request.spec.js b/packages/datadog-instrumentations/test/light-my-request.spec.js index fdc2345ce4f..664fbb94e9d 100644 --- a/packages/datadog-instrumentations/test/light-my-request.spec.js +++ b/packages/datadog-instrumentations/test/light-my-request.spec.js @@ -6,8 +6,9 @@ const { describe, it, before, after, beforeEach, afterEach } = require('mocha') const sinon = require('sinon') const agent = require('../../dd-trace/test/plugins/agent') +const { withVersions } = require('../../dd-trace/test/setup/mocha') -describe('light-my-request instrumentation', () => { +withVersions('light-my-request', 'light-my-request', version => describe('light-my-request instrumentation', () => { const startServerCh = dc.channel('apm:http:server:request:start') const exitServerCh = dc.channel('apm:http:server:request:exit') const finishServerCh = dc.channel('apm:http:server:request:finish') @@ -18,8 +19,8 @@ describe('light-my-request instrumentation', () => { before(async () => { await agent.load(['http', 'fastify', 'light-my-request'], { client: false }) - inject = require('light-my-request') - Fastify = require('fastify') + inject = require(`../../../versions/light-my-request@${version}`).get() + Fastify = require('../../../versions/fastify').get() }) after(() => { @@ -275,4 +276,4 @@ describe('light-my-request instrumentation', () => { }) }) }) -}) +})) diff --git a/packages/datadog-plugin-aws-sdk/src/base.js b/packages/datadog-plugin-aws-sdk/src/base.js index b82595b6510..ca27a267b5b 100644 --- a/packages/datadog-plugin-aws-sdk/src/base.js +++ b/packages/datadog-plugin-aws-sdk/src/base.js @@ -197,8 +197,7 @@ class BaseAwsSdkPlugin extends ClientPlugin { isEnabled (request) { const serviceId = this.serviceIdentifier.toUpperCase() - const envVarValue = getValueFromEnvSources(`DD_TRACE_AWS_SDK_${serviceId}_ENABLED`) - return envVarValue ? isTrue(envVarValue) : true + return this._tracerConfig[`DD_TRACE_AWS_SDK_${serviceId}_ENABLED`] ?? true } addResponseTags (span, response) { diff --git a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js index b9bf5ecb86e..78c0be1f481 100644 --- a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js @@ -332,7 +332,7 @@ describe('Plugin', () => { }) total++ - }).catch(() => {}, { timeoutMs: 100 }) + }, { timeoutMs: 100 }).catch(() => {}) agent.assertSomeTraces(traces => { const span = sort(traces[0])[0] @@ -344,7 +344,7 @@ describe('Plugin', () => { }) total++ - }).catch((e) => {}, { timeoutMs: 100 }) + }, { timeoutMs: 100 }).catch((e) => {}) s3.listBuckets({}, () => {}) sqs.listQueues({}, () => {}) @@ -395,9 +395,9 @@ describe('Plugin', () => { describe('with env variable _BATCH_PROPAGATION_ENABLED configuration', () => { before(() => { - process.env.DD_TRACE_AWS_SDK_BATCH_PROPAGATION_ENABLED = true - process.env.DD_TRACE_AWS_SDK_KINESIS_BATCH_PROPAGATION_ENABLED = false - process.env.DD_TRACE_AWS_SDK_SQS_BATCH_PROPAGATION_ENABLED = true + process.env.DD_TRACE_AWS_SDK_BATCH_PROPAGATION_ENABLED = 'true' + process.env.DD_TRACE_AWS_SDK_KINESIS_BATCH_PROPAGATION_ENABLED = 'false' + process.env.DD_TRACE_AWS_SDK_SQS_BATCH_PROPAGATION_ENABLED = 'true' return agent.load(['aws-sdk']) }) diff --git a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js index a8e7595c5a7..70c8a99d2d0 100644 --- a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js @@ -168,12 +168,21 @@ describe('Kinesis', function () { }) describe('Disabled', () => { + let savedKinesisEnv + before(() => { + savedKinesisEnv = process.env.DD_TRACE_AWS_SDK_KINESIS_ENABLED process.env.DD_TRACE_AWS_SDK_KINESIS_ENABLED = 'false' + agent.wipe() }) after(() => { - delete process.env.DD_TRACE_AWS_SDK_KINESIS_ENABLED + if (savedKinesisEnv === undefined) { + delete process.env.DD_TRACE_AWS_SDK_KINESIS_ENABLED + } else { + process.env.DD_TRACE_AWS_SDK_KINESIS_ENABLED = savedKinesisEnv + } + agent.wipe() }) it('skip injects trace context to Kinesis putRecord when disabled', done => { diff --git a/packages/datadog-plugin-azure-event-hubs/src/producer.js b/packages/datadog-plugin-azure-event-hubs/src/producer.js index d5437ec7962..7c1536a3fdf 100644 --- a/packages/datadog-plugin-azure-event-hubs/src/producer.js +++ b/packages/datadog-plugin-azure-event-hubs/src/producer.js @@ -1,6 +1,5 @@ 'use strict' -const { getValueFromEnvSources } = require('../../dd-trace/src/config/helper') const ProducerPlugin = require('../../dd-trace/src/plugins/producer') const spanContexts = new WeakMap() @@ -11,8 +10,9 @@ class AzureEventHubsProducerPlugin extends ProducerPlugin { static get prefix () { return 'tracing:apm:azure-event-hubs:send' } bindStart (ctx) { + const batchLinksEnabled = this._tracerConfig.DD_TRACE_AZURE_EVENTHUBS_BATCH_LINKS_ENABLED // we do not want to make these spans when batch linking is disabled. - if (!batchLinksAreEnabled() && ctx.functionName === 'tryAdd') { + if (!batchLinksEnabled && ctx.functionName === 'tryAdd') { return ctx.currentStore } @@ -37,7 +37,7 @@ class AzureEventHubsProducerPlugin extends ProducerPlugin { span.setTag('message.id', ctx.eventData.messageID) } - if (batchLinksAreEnabled()) { + if (batchLinksEnabled) { const spanContext = spanContexts.get(ctx.batch) if (spanContext) { spanContext.push(span.context()) @@ -58,13 +58,11 @@ class AzureEventHubsProducerPlugin extends ProducerPlugin { for (const event of eventData) { injectTraceContext(this.tracer, span, event) } - } else { - if (batchLinksAreEnabled()) { - const contexts = spanContexts.get(eventData) - if (contexts) { - for (const spanContext of contexts) { - span.addLink(spanContext) - } + } else if (batchLinksEnabled) { + const contexts = spanContexts.get(eventData) + if (contexts) { + for (const spanContext of contexts) { + span.addLink(spanContext) } } } @@ -88,9 +86,4 @@ function injectTraceContext (tracer, span, event) { tracer.inject(span, 'text_map', event.properties) } -function batchLinksAreEnabled () { - const eh = getValueFromEnvSources('DD_TRACE_AZURE_EVENTHUBS_BATCH_LINKS_ENABLED') - return eh !== 'false' -} - module.exports = AzureEventHubsProducerPlugin diff --git a/packages/datadog-plugin-azure-service-bus/src/producer.js b/packages/datadog-plugin-azure-service-bus/src/producer.js index 75161af4734..c3d78d83b2b 100644 --- a/packages/datadog-plugin-azure-service-bus/src/producer.js +++ b/packages/datadog-plugin-azure-service-bus/src/producer.js @@ -1,6 +1,5 @@ 'use strict' -const { getValueFromEnvSources } = require('../../dd-trace/src/config/helper') const ProducerPlugin = require('../../dd-trace/src/plugins/producer') const spanContexts = new WeakMap() @@ -10,8 +9,9 @@ class AzureServiceBusProducerPlugin extends ProducerPlugin { static get prefix () { return 'tracing:apm:azure-service-bus:send' } bindStart (ctx) { + const batchLinksEnabled = this._tracerConfig.DD_TRACE_AZURE_SERVICEBUS_BATCH_LINKS_ENABLED // we do not want to make these spans when batch linking is disabled. - if (!batchLinksAreEnabled() && ctx.functionName === 'tryAddMessage') { + if (!batchLinksEnabled && ctx.functionName === 'tryAddMessage') { return ctx.currentStore } @@ -36,7 +36,7 @@ class AzureServiceBusProducerPlugin extends ProducerPlugin { span.setTag('message.id', ctx.msg) } - if (batchLinksAreEnabled()) { + if (batchLinksEnabled) { const spanContext = spanContexts.get(ctx.batch) if (spanContext) { spanContext.push(span.context()) @@ -52,7 +52,7 @@ class AzureServiceBusProducerPlugin extends ProducerPlugin { const isBatch = messages.constructor?.name === 'ServiceBusMessageBatchImpl' if (isBatch) { span.setTag('messaging.batch.message_count', messages.count) - if (batchLinksAreEnabled()) { + if (batchLinksEnabled) { const contexts = spanContexts.get(messages) if (contexts) { for (const spanContext of contexts) { @@ -89,9 +89,4 @@ function injectTraceContext (tracer, span, msg) { tracer.inject(span, 'text_map', msg.applicationProperties) } -function batchLinksAreEnabled () { - const sb = getValueFromEnvSources('DD_TRACE_AZURE_SERVICEBUS_BATCH_LINKS_ENABLED') - return sb !== 'false' -} - module.exports = AzureServiceBusProducerPlugin diff --git a/packages/datadog-plugin-cucumber/src/index.js b/packages/datadog-plugin-cucumber/src/index.js index c4f299103ca..54f5ec4290e 100644 --- a/packages/datadog-plugin-cucumber/src/index.js +++ b/packages/datadog-plugin-cucumber/src/index.js @@ -6,7 +6,7 @@ const realSetTimeout = setTimeout const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') const { storage } = require('../../datadog-core') -const { getEnvironmentVariable, getValueFromEnvSources } = require('../../dd-trace/src/config/helper') +const { getEnvironmentVariable } = require('../../dd-trace/src/config/helper') const { addIntelligentTestRunnerSpanTags, @@ -117,7 +117,7 @@ class CucumberPlugin extends CiPlugin { finishAllTraceSpans(this.testSessionSpan) this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName, - autoInjected: !!getValueFromEnvSources('DD_CIVISIBILITY_AUTO_INSTRUMENTATION_PROVIDER'), + autoInjected: this._tracerConfig.DD_CIVISIBILITY_AUTO_INSTRUMENTATION_PROVIDER, }) this.libraryConfig = null diff --git a/packages/datadog-plugin-http/src/server.js b/packages/datadog-plugin-http/src/server.js index 71a499e18a7..a4c99c0b87e 100644 --- a/packages/datadog-plugin-http/src/server.js +++ b/packages/datadog-plugin-http/src/server.js @@ -45,9 +45,8 @@ class HttpServerPlugin extends ServerPlugin { context.parentStore = store } - // Only AppSec needs the request scope to be active for any async work that - // may be scheduled after the synchronous `request` event returns (e.g. - // Fastify). + // AppSec, IAST, and AI Guard need req/res on the store so downstream + // subscribers can access them from the async context. if (incomingHttpRequestStart.hasSubscribers) { store = { ...store, req, res } } diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index e73767b6a33..84d2136fee5 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -5,7 +5,7 @@ const realSetTimeout = setTimeout const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') const { storage } = require('../../datadog-core') -const { getEnvironmentVariable, getValueFromEnvSources } = require('../../dd-trace/src/config/helper') +const { getEnvironmentVariable } = require('../../dd-trace/src/config/helper') const { appClosing: appClosingTelemetry } = require('../../dd-trace/src/telemetry') const { @@ -166,7 +166,7 @@ class JestPlugin extends CiPlugin { this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName, - autoInjected: !!getValueFromEnvSources('DD_CIVISIBILITY_AUTO_INSTRUMENTATION_PROVIDER'), + autoInjected: this._tracerConfig.DD_CIVISIBILITY_AUTO_INSTRUMENTATION_PROVIDER, }) appClosingTelemetry() diff --git a/packages/datadog-plugin-mocha/src/index.js b/packages/datadog-plugin-mocha/src/index.js index 4a108ecdd29..b9887c10c5c 100644 --- a/packages/datadog-plugin-mocha/src/index.js +++ b/packages/datadog-plugin-mocha/src/index.js @@ -5,7 +5,6 @@ const realDateNow = Date.now.bind(Date) const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') const { storage } = require('../../datadog-core') -const { getValueFromEnvSources } = require('../../dd-trace/src/config/helper') const { TEST_STATUS, @@ -406,7 +405,7 @@ class MochaPlugin extends CiPlugin { finishAllTraceSpans(this.testSessionSpan) this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName, - autoInjected: !!getValueFromEnvSources('DD_CIVISIBILITY_AUTO_INSTRUMENTATION_PROVIDER'), + autoInjected: this._tracerConfig.DD_CIVISIBILITY_AUTO_INSTRUMENTATION_PROVIDER, }) } this.libraryConfig = null diff --git a/packages/datadog-plugin-modelcontextprotocol-sdk/src/index.js b/packages/datadog-plugin-modelcontextprotocol-sdk/src/index.js new file mode 100644 index 00000000000..5291ff99036 --- /dev/null +++ b/packages/datadog-plugin-modelcontextprotocol-sdk/src/index.js @@ -0,0 +1,24 @@ +'use strict' + +const CompositePlugin = require('../../dd-trace/src/plugins/composite') +const mcpLLMObsPlugins = require('../../dd-trace/src/llmobs/plugins/modelcontextprotocol-sdk') +const tracingPlugins = require('./tracing') + +const plugins = {} + +// CRITICAL: LLMObs plugins MUST come first +for (const Plugin of mcpLLMObsPlugins) { + plugins[Plugin.id] = Plugin +} + +// Tracing plugins second +for (const Plugin of tracingPlugins) { + plugins[Plugin.id] = Plugin +} + +class ModelcontextprotocolSdkPlugin extends CompositePlugin { + static id = 'modelcontextprotocol-sdk' + static plugins = plugins +} + +module.exports = ModelcontextprotocolSdkPlugin diff --git a/packages/datadog-plugin-modelcontextprotocol-sdk/src/tracing.js b/packages/datadog-plugin-modelcontextprotocol-sdk/src/tracing.js new file mode 100644 index 00000000000..863da22f71b --- /dev/null +++ b/packages/datadog-plugin-modelcontextprotocol-sdk/src/tracing.js @@ -0,0 +1,55 @@ +'use strict' + +const TracingPlugin = require('../../dd-trace/src/plugins/tracing') + +class McpToolCallPlugin extends TracingPlugin { + static id = 'modelcontextprotocol_client' + static prefix = 'tracing:orchestrion:@modelcontextprotocol/sdk:Client_callTool' + + bindStart (ctx) { + const params = ctx.arguments?.[0] + const toolName = params?.name + + this.startSpan('mcp.client.tool.call', { + resource: toolName, + type: 'mcp', + kind: 'client', + }, ctx) + + return ctx.currentStore + } + + asyncEnd (ctx) { + const result = ctx.result + if (result?.isError) { + const span = ctx.currentStore?.span + const errorText = result.content?.find?.(c => c.type === 'text')?.text || 'Tool call returned isError: true' + span?.setTag('error', new Error(errorText)) + } + super.finish(ctx) + } +} + +class McpListToolsPlugin extends TracingPlugin { + static id = 'modelcontextprotocol_list_tools' + static prefix = 'tracing:orchestrion:@modelcontextprotocol/sdk:Client_listTools' + + bindStart (ctx) { + this.startSpan('mcp.tools.list', { + resource: 'tools/list', + type: 'mcp', + kind: 'client', + }, ctx) + + return ctx.currentStore + } + + asyncEnd (ctx) { + super.finish(ctx) + } +} + +module.exports = [ + McpToolCallPlugin, + McpListToolsPlugin, +] diff --git a/packages/datadog-plugin-modelcontextprotocol-sdk/test/index.spec.js b/packages/datadog-plugin-modelcontextprotocol-sdk/test/index.spec.js new file mode 100644 index 00000000000..0075039f91d --- /dev/null +++ b/packages/datadog-plugin-modelcontextprotocol-sdk/test/index.spec.js @@ -0,0 +1,87 @@ +'use strict' + +const assert = require('node:assert/strict') +const { createIntegrationTestSuite } = require('../../dd-trace/test/setup/helpers/plugin-test-helpers') +const { expectSomeSpan } = require('../../dd-trace/test/plugins/helpers') +const TestSetup = require('./test-setup') + +const testSetup = new TestSetup() + +createIntegrationTestSuite('modelcontextprotocol-sdk', '@modelcontextprotocol/sdk', { + subModule: '@modelcontextprotocol/sdk/client', +}, (meta) => { + const { agent } = meta + + before(async () => { + await testSetup.setup(meta.mod, meta.versionMod) + }) + + after(async () => { + await testSetup.teardown() + }) + + describe('Client.callTool() - mcp.client.tool.call', () => { + it('should generate span with correct tags (happy path)', async () => { + const traceAssertion = expectSomeSpan(agent, { + name: 'mcp.client.tool.call', + type: 'mcp', + resource: 'test-tool', + meta: { + component: 'modelcontextprotocol_client', + '_dd.integration': 'modelcontextprotocol_client', + 'span.kind': 'client', + }, + }) + + const result = await testSetup.clientCallTool() + assert.ok(result.content, 'callTool should return a result with content') + assert.equal(result.content.length, 1) + assert.equal(result.content[0].type, 'text') + assert.equal(result.content[0].text, 'Result from test-tool') + + return traceAssertion + }) + + it('should generate span with error tags (error path)', async () => { + const traceAssertion = expectSomeSpan(agent, { + name: 'mcp.client.tool.call', + type: 'mcp', + resource: 'error-tool', + error: 1, + meta: { + component: 'modelcontextprotocol_client', + '_dd.integration': 'modelcontextprotocol_client', + 'span.kind': 'client', + }, + }) + + // In MCP SDK 1.27+, tool errors are returned as isError:true results, not thrown exceptions + const result = await testSetup.clientCallToolError() + assert.ok(result.isError, 'callTool result should have isError: true') + assert.ok(result.content?.[0]?.text?.includes('Intentional test error'), 'error text should be in content') + + return traceAssertion + }) + }) + + describe('Client.listTools() - mcp.tools.list', () => { + it('should generate span with correct tags (happy path)', async () => { + const traceAssertion = expectSomeSpan(agent, { + name: 'mcp.tools.list', + type: 'mcp', + resource: 'tools/list', + meta: { + component: 'modelcontextprotocol_list_tools', + '_dd.integration': 'modelcontextprotocol_list_tools', + 'span.kind': 'client', + }, + }) + + const result = await testSetup.clientListTools() + assert.ok(result.tools, 'listTools should return tools array') + assert.equal(result.tools.length, 2) + + return traceAssertion + }) + }) +}) diff --git a/packages/datadog-plugin-modelcontextprotocol-sdk/test/test-setup.js b/packages/datadog-plugin-modelcontextprotocol-sdk/test/test-setup.js new file mode 100644 index 00000000000..c4d1091011b --- /dev/null +++ b/packages/datadog-plugin-modelcontextprotocol-sdk/test/test-setup.js @@ -0,0 +1,68 @@ +'use strict' + +class ModelcontextprotocolSdkTestSetup { + async setup (clientModule, versionMod) { + const path = require('path') + const { Client } = clientModule + // Use versionMod.getPath to resolve the SDK root since the package exports map + // remaps @modelcontextprotocol/sdk/package.json to dist/cjs/package.json + const clientEntryPath = versionMod.getPath('@modelcontextprotocol/sdk/client') + const sdkDir = path.resolve(path.dirname(clientEntryPath), '..', '..', '..') + const { McpServer } = require(path.join(sdkDir, 'dist/cjs/server/mcp.js')) + const { InMemoryTransport } = versionMod.get('@modelcontextprotocol/sdk/inMemory.js') + + this._InMemoryTransport = InMemoryTransport + + this._server = new McpServer({ name: 'test-server', version: '1.0.0' }) + + this._server.registerTool( + 'test-tool', + { description: 'A test tool', inputSchema: {} }, + async () => ({ + content: [{ type: 'text', text: 'Result from test-tool' }], + }) + ) + + this._server.registerTool( + 'error-tool', + { description: 'A tool that errors', inputSchema: {} }, + async () => { + throw new Error('Intentional test error') + } + ) + + const [clientTransport, serverTransport] = this._InMemoryTransport.createLinkedPair() + + await this._server.connect(serverTransport) + + this._client = new Client( + { name: 'test-client', version: '1.0.0' } + ) + await this._client.connect(clientTransport) + } + + async teardown () { + if (this._client) { + await this._client.close() + } + if (this._server) { + await this._server.close() + } + this._client = null + this._server = null + } + + async clientCallTool () { + return this._client.callTool({ name: 'test-tool', arguments: {} }) + } + + async clientCallToolError () { + return this._client.callTool({ name: 'error-tool', arguments: {} }) + } + + async clientListTools () { + return this._client.listTools() + } +} + +module.exports = ModelcontextprotocolSdkTestSetup diff --git a/packages/datadog-plugin-mongodb-core/src/index.js b/packages/datadog-plugin-mongodb-core/src/index.js index 3a4c7acce13..22e017d2112 100644 --- a/packages/datadog-plugin-mongodb-core/src/index.js +++ b/packages/datadog-plugin-mongodb-core/src/index.js @@ -1,8 +1,6 @@ 'use strict' -const { isTrue } = require('../../dd-trace/src/util') const DatabasePlugin = require('../../dd-trace/src/plugins/database') -const { getValueFromEnvSources } = require('../../dd-trace/src/config/helper') class MongodbCorePlugin extends DatabasePlugin { static id = 'mongodb-core' @@ -20,11 +18,8 @@ class MongodbCorePlugin extends DatabasePlugin { configure (config) { super.configure(config) - const heartbeatFromEnv = getValueFromEnvSources('DD_TRACE_MONGODB_HEARTBEAT_ENABLED') - this.config.heartbeatEnabled = config.heartbeatEnabled ?? - (heartbeatFromEnv && isTrue(heartbeatFromEnv)) ?? - true + this._tracerConfig.DD_TRACE_MONGODB_HEARTBEAT_ENABLED } bindStart (ctx) { diff --git a/packages/datadog-plugin-mongodb-core/test/mongodb.spec.js b/packages/datadog-plugin-mongodb-core/test/mongodb.spec.js index 4a16cc291ae..a2848cc2466 100644 --- a/packages/datadog-plugin-mongodb-core/test/mongodb.spec.js +++ b/packages/datadog-plugin-mongodb-core/test/mongodb.spec.js @@ -777,13 +777,22 @@ describe('Plugin', () => { }) describe('when heartbeat tracing is disabled via env var', () => { - before(() => { + let savedHeartbeatEnv + + before(async () => { + savedHeartbeatEnv = process.env.DD_TRACE_MONGODB_HEARTBEAT_ENABLED process.env.DD_TRACE_MONGODB_HEARTBEAT_ENABLED = 'false' - return agent.load('mongodb-core', {}) + agent.wipe() + await agent.load('mongodb-core', {}) }) - after(() => { - return agent.close({ ritmReset: false }) + after(async () => { + if (savedHeartbeatEnv === undefined) { + delete process.env.DD_TRACE_MONGODB_HEARTBEAT_ENABLED + } else { + process.env.DD_TRACE_MONGODB_HEARTBEAT_ENABLED = savedHeartbeatEnv + } + await agent.close({ ritmReset: false, wipe: true }) }) beforeEach(async () => { @@ -817,13 +826,22 @@ describe('Plugin', () => { }) describe('when heartbeat tracing is enabled via env var', () => { - before(() => { + let savedHeartbeatEnv + + before(async () => { + savedHeartbeatEnv = process.env.DD_TRACE_MONGODB_HEARTBEAT_ENABLED process.env.DD_TRACE_MONGODB_HEARTBEAT_ENABLED = 'true' - return agent.load('mongodb-core', {}) + agent.wipe() + await agent.load('mongodb-core', {}) }) - after(() => { - return agent.close({ ritmReset: false }) + after(async () => { + if (savedHeartbeatEnv === undefined) { + delete process.env.DD_TRACE_MONGODB_HEARTBEAT_ENABLED + } else { + process.env.DD_TRACE_MONGODB_HEARTBEAT_ENABLED = savedHeartbeatEnv + } + await agent.close({ ritmReset: false, wipe: true }) }) beforeEach(async () => { diff --git a/packages/datadog-plugin-playwright/src/index.js b/packages/datadog-plugin-playwright/src/index.js index d7b4b5b798d..6dcecf669e0 100644 --- a/packages/datadog-plugin-playwright/src/index.js +++ b/packages/datadog-plugin-playwright/src/index.js @@ -3,7 +3,6 @@ const { storage } = require('../../datadog-core') const id = require('../../dd-trace/src/id') const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') -const { getValueFromEnvSources } = require('../../dd-trace/src/config/helper') const { finishAllTraceSpans, @@ -108,7 +107,7 @@ class PlaywrightPlugin extends CiPlugin { finishAllTraceSpans(this.testSessionSpan) this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName, - autoInjected: !!getValueFromEnvSources('DD_CIVISIBILITY_AUTO_INSTRUMENTATION_PROVIDER'), + autoInjected: this._tracerConfig.DD_CIVISIBILITY_AUTO_INSTRUMENTATION_PROVIDER, }) appClosingTelemetry() this.tracer._exporter.flush(onDone) @@ -420,7 +419,7 @@ class PlaywrightPlugin extends CiPlugin { span.finish() finishAllTraceSpans(span) - if (getValueFromEnvSources('DD_PLAYWRIGHT_WORKER')) { + if (this._tracerConfig.DD_PLAYWRIGHT_WORKER) { this.tracer._exporter.flush(onDone) } }) diff --git a/packages/datadog-plugin-vitest/src/index.js b/packages/datadog-plugin-vitest/src/index.js index 363d9aa2dc6..e0daf97b9aa 100644 --- a/packages/datadog-plugin-vitest/src/index.js +++ b/packages/datadog-plugin-vitest/src/index.js @@ -2,7 +2,6 @@ const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') const { storage } = require('../../datadog-core') -const { getValueFromEnvSources } = require('../../dd-trace/src/config/helper') const { TEST_STATUS, @@ -34,6 +33,7 @@ const { isModifiedTest, TEST_IS_MODIFIED, TEST_HAS_DYNAMIC_NAME, + TEST_FINAL_STATUS, } = require('../../dd-trace/src/plugins/util/test') const { COMPONENT } = require('../../dd-trace/src/constants') const { @@ -213,10 +213,13 @@ class VitestPlugin extends CiPlugin { return ctx.currentStore }) - this.addSub('ci:vitest:test:pass', ({ span, task }) => { + this.addSub('ci:vitest:test:pass', ({ span, task, finalStatus }) => { if (span) { this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test', this.getTestTelemetryTags(span)) span.setTag(TEST_STATUS, 'pass') + if (finalStatus) { + span.setTag(TEST_FINAL_STATUS, finalStatus) + } span.finish(this.taskToFinishTime.get(task)) finishAllTraceSpans(span) } @@ -230,6 +233,7 @@ class VitestPlugin extends CiPlugin { promises, hasFailedAllRetries, attemptToFixFailed, + finalStatus, }) => { if (!span) { return @@ -255,6 +259,9 @@ class VitestPlugin extends CiPlugin { if (attemptToFixFailed) { span.setTag(TEST_MANAGEMENT_ATTEMPT_TO_FIX_PASSED, 'false') } + if (finalStatus) { + span.setTag(TEST_FINAL_STATUS, finalStatus) + } if (duration) { span.finish(span._startTime + duration - MILLISECONDS_TO_SUBTRACT_FROM_FAILED_TEST_DURATION) // milliseconds } else { @@ -273,6 +280,7 @@ class VitestPlugin extends CiPlugin { [TEST_SOURCE_FILE]: testSuite, [TEST_SOURCE_START]: 1, // we can't get the proper start line in vitest [TEST_STATUS]: 'skip', + [TEST_FINAL_STATUS]: 'skip', ...(isDisabled ? { [TEST_MANAGEMENT_IS_DISABLED]: 'true' } : {}), ...(isNew ? { [TEST_IS_NEW]: 'true' } : {}), } @@ -285,12 +293,12 @@ class VitestPlugin extends CiPlugin { const { testSuiteAbsolutePath, frameworkVersion } = ctx // TODO: Handle case where the command is not set - this.command = getValueFromEnvSources('DD_CIVISIBILITY_TEST_COMMAND') + this.command = this._tracerConfig.DD_CIVISIBILITY_TEST_COMMAND this.frameworkVersion = frameworkVersion const testSessionSpanContext = this.tracer.extract('text_map', { // TODO: Handle case where the session ID or module ID is not set - 'x-datadog-trace-id': getValueFromEnvSources('DD_CIVISIBILITY_TEST_SESSION_ID'), - 'x-datadog-parent-id': getValueFromEnvSources('DD_CIVISIBILITY_TEST_MODULE_ID'), + 'x-datadog-trace-id': this._tracerConfig.DD_CIVISIBILITY_TEST_SESSION_ID, + 'x-datadog-parent-id': this._tracerConfig.DD_CIVISIBILITY_TEST_MODULE_ID, }) const trimmedCommand = DD_MAJOR < 6 ? this.command : 'vitest run' @@ -415,7 +423,7 @@ class VitestPlugin extends CiPlugin { finishAllTraceSpans(this.testSessionSpan) this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName, - autoInjected: !!getValueFromEnvSources('DD_CIVISIBILITY_AUTO_INSTRUMENTATION_PROVIDER'), + autoInjected: this._tracerConfig.DD_CIVISIBILITY_AUTO_INSTRUMENTATION_PROVIDER, }) this.tracer._exporter.flush(onFinish) }) diff --git a/packages/datadog-plugin-ws/src/close.js b/packages/datadog-plugin-ws/src/close.js index 90ea38dc380..49a387eb6cf 100644 --- a/packages/datadog-plugin-ws/src/close.js +++ b/packages/datadog-plugin-ws/src/close.js @@ -57,11 +57,13 @@ class WSClosePlugin extends TracingPlugin { } bindAsyncStart (ctx) { + if (!ctx.span) return ctx.parentStore if (!ctx.isPeerClose) ctx.span.finish() return ctx.parentStore } asyncStart (ctx) { + if (!ctx.span) return ctx.span.finish() } diff --git a/packages/datadog-plugin-ws/src/producer.js b/packages/datadog-plugin-ws/src/producer.js index 146e56f301c..4a5647d8d8a 100644 --- a/packages/datadog-plugin-ws/src/producer.js +++ b/packages/datadog-plugin-ws/src/producer.js @@ -46,11 +46,13 @@ class WSProducerPlugin extends TracingPlugin { } bindAsyncStart (ctx) { + if (!ctx.span) return ctx.parentStore ctx.span.finish() return ctx.parentStore } asyncStart (ctx) { + if (!ctx.span) return ctx.span.finish() } diff --git a/packages/datadog-plugin-ws/src/receiver.js b/packages/datadog-plugin-ws/src/receiver.js index cb278f94764..ad65501875a 100644 --- a/packages/datadog-plugin-ws/src/receiver.js +++ b/packages/datadog-plugin-ws/src/receiver.js @@ -62,6 +62,7 @@ class WSReceiverPlugin extends TracingPlugin { } asyncStart (ctx) { + if (!ctx.span) return ctx.span.finish() } diff --git a/packages/datadog-plugin-ws/test/index.spec.js b/packages/datadog-plugin-ws/test/index.spec.js index 563dcb4fcfc..91f0ecdcc0a 100644 --- a/packages/datadog-plugin-ws/test/index.spec.js +++ b/packages/datadog-plugin-ws/test/index.spec.js @@ -31,6 +31,27 @@ describe('Plugin', () => { WebSocket = require(`../../../versions/ws@${version}`).get() }) + it('should not crash when sending on a socket without spanContext', async () => { + const server = new WebSocket.Server({ port: 16015 }) + const connectionPromise = once(server, 'connection') + + const socket = new WebSocket('ws://localhost:16015') + const [serverSocket] = await connectionPromise + await once(socket, 'open') + + assert.strictEqual(socket.spanContext, undefined) + + const messagePromise = once(serverSocket, 'message') + await new Promise((resolve, reject) => { + socket.send('test message', {}, (err) => err ? reject(err) : resolve()) + }) + await messagePromise + + socket.close() + await once(socket, 'close') + server.close() + }) + it('should emit original error in case close is called before connection is established', async () => { const socket = new WebSocket('wss://localhost:12345') diff --git a/packages/dd-trace/src/aiguard/channels.js b/packages/dd-trace/src/aiguard/channels.js new file mode 100644 index 00000000000..62299198987 --- /dev/null +++ b/packages/dd-trace/src/aiguard/channels.js @@ -0,0 +1,8 @@ +'use strict' + +const dc = require('dc-polyfill') + +module.exports = { + aiguardChannel: dc.channel('dd-trace:ai:aiguard'), + incomingHttpRequestStart: dc.channel('dd-trace:incomingHttpRequestStart'), +} diff --git a/packages/dd-trace/src/aiguard/index.js b/packages/dd-trace/src/aiguard/index.js index d3a523ec98b..dd26baa6bd1 100644 --- a/packages/dd-trace/src/aiguard/index.js +++ b/packages/dd-trace/src/aiguard/index.js @@ -1,15 +1,17 @@ 'use strict' -const { channel } = require('dc-polyfill') const log = require('../log') +const { incomingHttpRequestStart, aiguardChannel } = require('./channels') const AIGuard = require('./sdk') -const aiguardChannel = channel('dd-trace:ai:aiguard') - let isEnabled = false let aiguard let block +function onIncomingHttpRequestStart () { + // No-op: subscribing ensures the HTTP plugin spreads req onto the store +} + function enable (tracer, config) { if (isEnabled) return @@ -17,6 +19,7 @@ function enable (tracer, config) { aiguard = new AIGuard(tracer, config) block = config.experimental?.aiguard?.block !== false + incomingHttpRequestStart.subscribe(onIncomingHttpRequestStart) aiguardChannel.subscribe(onEvaluate) isEnabled = true @@ -29,6 +32,7 @@ function enable (tracer, config) { function disable () { if (!isEnabled) return + incomingHttpRequestStart.unsubscribe(onIncomingHttpRequestStart) aiguardChannel.unsubscribe(onEvaluate) aiguard = undefined diff --git a/packages/dd-trace/src/aiguard/sdk.js b/packages/dd-trace/src/aiguard/sdk.js index f817856bfc4..47dd18356e1 100644 --- a/packages/dd-trace/src/aiguard/sdk.js +++ b/packages/dd-trace/src/aiguard/sdk.js @@ -1,7 +1,10 @@ 'use strict' const rfdc = require('../../../../vendor/dist/rfdc')({ proto: false, circles: false }) +const { HTTP_CLIENT_IP, NETWORK_CLIENT_IP } = require('../../../../ext/tags') +const { storage } = require('../../../datadog-core') const log = require('../log') +const { extractIp } = require('../plugins/util/ip_extractor') const telemetryMetrics = require('../telemetry/metrics') const tracerVersion = require('../../../../package.json').version const { keepTrace } = require('../priority_sampler') @@ -13,6 +16,7 @@ const { AI_GUARD_TARGET_TAG_KEY, AI_GUARD_REASON_TAG_KEY, AI_GUARD_ACTION_TAG_KEY, + AI_GUARD_EVENT_TAG_KEY, AI_GUARD_BLOCKED_TAG_KEY, AI_GUARD_META_STRUCT_KEY, AI_GUARD_TOOL_NAME_TAG_KEY, @@ -57,6 +61,7 @@ class AIGuard extends NoopAIGuard { #maxMessagesLength #maxContentSize #meta + #config /** * @param {import('../tracer')} tracer - Tracer instance @@ -84,6 +89,7 @@ class AIGuard extends NoopAIGuard { this.#maxMessagesLength = config.experimental.aiguard.maxMessagesLength this.#maxContentSize = config.experimental.aiguard.maxContentSize this.#meta = { service: config.service, env: config.env } + this.#config = config this.#initialized = true } @@ -139,6 +145,42 @@ class AIGuard extends NoopAIGuard { return null } + #setRootSpanClientIpTags (rootSpan) { + if (!rootSpan) return + + const currentTags = rootSpan.context()._tags + const needsHttpClientIp = !Object.hasOwn(currentTags, HTTP_CLIENT_IP) + const needsNetworkClientIp = !Object.hasOwn(currentTags, NETWORK_CLIENT_IP) + + if (!needsHttpClientIp && !needsNetworkClientIp) return + + const req = storage('legacy').getStore()?.req + + if (!req) return + + const newTags = {} + + if (needsHttpClientIp) { + const clientIp = extractIp(this.#config, req) + + if (clientIp) { + newTags[HTTP_CLIENT_IP] = clientIp + } + } + + if (needsNetworkClientIp) { + const networkClientIp = req.socket?.remoteAddress + + if (networkClientIp) { + newTags[NETWORK_CLIENT_IP] = networkClientIp + } + } + + if (Object.keys(newTags).length > 0) { + rootSpan.addTags(newTags) + } + } + evaluate (messages, opts) { if (!this.#initialized) { return super.evaluate(messages, opts) @@ -162,9 +204,11 @@ class AIGuard extends NoopAIGuard { } const rootSpan = span.context()?._trace?.started?.[0] if (rootSpan) { + this.#setRootSpanClientIpTags(rootSpan) // keepTrace must be called before executeRequest so the sampling decision // is propagated correctly to outgoing HTTP client calls. keepTrace(rootSpan, AI_GUARD) + rootSpan.setTag(AI_GUARD_EVENT_TAG_KEY, 'true') } let response try { diff --git a/packages/dd-trace/src/aiguard/tags.js b/packages/dd-trace/src/aiguard/tags.js index 847d1c6edea..9192b0b2a8e 100644 --- a/packages/dd-trace/src/aiguard/tags.js +++ b/packages/dd-trace/src/aiguard/tags.js @@ -7,6 +7,7 @@ module.exports = { AI_GUARD_ACTION_TAG_KEY: 'ai_guard.action', AI_GUARD_REASON_TAG_KEY: 'ai_guard.reason', AI_GUARD_BLOCKED_TAG_KEY: 'ai_guard.blocked', + AI_GUARD_EVENT_TAG_KEY: 'ai_guard.event', AI_GUARD_META_STRUCT_KEY: 'ai_guard', AI_GUARD_TELEMETRY_REQUESTS: 'ai_guard.requests', diff --git a/packages/dd-trace/src/appsec/reporter.js b/packages/dd-trace/src/appsec/reporter.js index 9fc1b798fe4..4f4a7b3f0d7 100644 --- a/packages/dd-trace/src/appsec/reporter.js +++ b/packages/dd-trace/src/appsec/reporter.js @@ -3,6 +3,7 @@ const zlib = require('zlib') const dc = require('dc-polyfill') +const { NETWORK_CLIENT_IP } = require('../../../../ext/tags') const { storage } = require('../../../datadog-core') const web = require('../plugins/util/web') const { ipHeaderList } = require('../plugins/util/ip_extractor') @@ -363,7 +364,7 @@ function reportAttack ({ events: attackData, actions }, req) { : '{"triggers":' + attackDataStr + '}' if (req.socket) { - newTags['network.client.ip'] = req.socket.remoteAddress + newTags[NETWORK_CLIENT_IP] = req.socket.remoteAddress } rootSpan.addTags(newTags) diff --git a/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js b/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js index 5aa9cfb070f..968a5c5244b 100644 --- a/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js +++ b/packages/dd-trace/src/ci-visibility/early-flake-detection/get-known-tests.js @@ -1,9 +1,9 @@ 'use strict' +const getConfig = require('../../config') const request = require('../requests/request') const id = require('../../id') const log = require('../../log') -const { getValueFromEnvSources } = require('../../config/helper') const { incrementCountMetric, @@ -151,7 +151,7 @@ function fetchFromApi ({ options.path = `${evpProxyPrefix}/api/v2/ci/libraries/tests` options.headers['X-Datadog-EVP-Subdomain'] = 'api' } else { - const apiKey = getValueFromEnvSources('DD_API_KEY') + const { apiKey } = getConfig() if (!apiKey) { return done(new Error('Known tests were not fetched because Datadog API key is not defined.')) } diff --git a/packages/dd-trace/src/ci-visibility/exporters/agentless/coverage-writer.js b/packages/dd-trace/src/ci-visibility/exporters/agentless/coverage-writer.js index 940f8f41f89..c43f260436e 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/agentless/coverage-writer.js +++ b/packages/dd-trace/src/ci-visibility/exporters/agentless/coverage-writer.js @@ -1,8 +1,8 @@ 'use strict' +const getConfig = require('../../../config') const request = require('../../../exporters/common/request') const log = require('../../../log') const { safeJSONStringify } = require('../../../exporters/common/util') -const { getValueFromEnvSources } = require('../../../config/helper') const { CoverageCIVisibilityEncoder } = require('../../../encode/coverage-ci-visibility') const BaseWriter = require('../../../exporters/common/writer') @@ -29,7 +29,7 @@ class Writer extends BaseWriter { path: '/api/v2/citestcov', method: 'POST', headers: { - 'dd-api-key': getValueFromEnvSources('DD_API_KEY'), + 'dd-api-key': getConfig().apiKey, ...form.getHeaders(), }, timeout: 15_000, diff --git a/packages/dd-trace/src/ci-visibility/exporters/agentless/di-logs-writer.js b/packages/dd-trace/src/ci-visibility/exporters/agentless/di-logs-writer.js index 4d272fddfe3..2b797f27044 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/agentless/di-logs-writer.js +++ b/packages/dd-trace/src/ci-visibility/exporters/agentless/di-logs-writer.js @@ -1,9 +1,9 @@ 'use strict' +const getConfig = require('../../../config') const request = require('../../../exporters/common/request') const log = require('../../../log') const { safeJSONStringify } = require('../../../exporters/common/util') const { JSONEncoder } = require('../../encode/json-encoder') -const { getValueFromEnvSources } = require('../../../config/helper') const { DEBUGGER_INPUT_V1 } = require('../../../debugger/constants') const BaseWriter = require('../../../exporters/common/writer') @@ -26,7 +26,7 @@ class DynamicInstrumentationLogsWriter extends BaseWriter { path: '/api/v2/logs', method: 'POST', headers: { - 'dd-api-key': getValueFromEnvSources('DD_API_KEY'), + 'dd-api-key': getConfig().apiKey, 'Content-Type': 'application/json', }, timeout: this.timeout, diff --git a/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js b/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js index 13981ca27c2..c092e1f1a10 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js +++ b/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js @@ -1,8 +1,8 @@ 'use strict' +const getConfig = require('../../../config') const request = require('../../../exporters/common/request') const { safeJSONStringify } = require('../../../exporters/common/util') const log = require('../../../log') -const { getValueFromEnvSources } = require('../../../config/helper') const { AgentlessCiVisibilityEncoder } = require('../../../encode/agentless-ci-visibility') const BaseWriter = require('../../../exporters/common/writer') @@ -30,7 +30,7 @@ class Writer extends BaseWriter { path: '/api/v2/citestcycle', method: 'POST', headers: { - 'dd-api-key': getValueFromEnvSources('DD_API_KEY'), + 'dd-api-key': getConfig().apiKey, 'Content-Type': 'application/msgpack', }, timeout: 15_000, diff --git a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js index 68e79d64949..f8feb908a24 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js +++ b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js @@ -3,12 +3,11 @@ const fs = require('fs') const path = require('path') +const getConfig = require('../../../config') const FormData = require('../../../exporters/common/form-data') const request = require('../../../exporters/common/request') -const { getValueFromEnvSources } = require('../../../config/helper') const log = require('../../../log') -const { isFalse } = require('../../../util') const { getLatestCommits, getRepositoryUrl, @@ -51,7 +50,7 @@ function getCommonRequestOptions (url) { return { method: 'POST', headers: { - 'dd-api-key': getValueFromEnvSources('DD_API_KEY'), + 'dd-api-key': getConfig().apiKey, }, timeout: 15_000, url, @@ -288,7 +287,7 @@ function sendGitMetadata (url, { isEvpProxy, evpProxyPrefix }, configRepositoryU } // Otherwise we unshallow and get commits to upload again log.debug('It is shallow clone, unshallowing...') - if (!isFalse(getValueFromEnvSources('DD_CIVISIBILITY_GIT_UNSHALLOW_ENABLED'))) { + if (getConfig().DD_CIVISIBILITY_GIT_UNSHALLOW_ENABLED) { unshallowRepository(false) } diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js index 29a154ae2c1..153fdfcbc39 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js @@ -1,8 +1,8 @@ 'use strict' +const getConfig = require('../../config') const request = require('../requests/request') const log = require('../../log') -const { getValueFromEnvSources } = require('../../config/helper') const { incrementCountMetric, distributionMetric, @@ -120,7 +120,7 @@ function fetchFromApi ({ options.path = `${evpProxyPrefix}/api/v2/ci/tests/skippable` options.headers['X-Datadog-EVP-Subdomain'] = 'api' } else { - const apiKey = getValueFromEnvSources('DD_API_KEY') + const { apiKey } = getConfig() if (!apiKey) { return done(new Error('Skippable suites were not fetched because Datadog API key is not defined.')) } diff --git a/packages/dd-trace/src/ci-visibility/log-submission/log-submission-plugin.js b/packages/dd-trace/src/ci-visibility/log-submission/log-submission-plugin.js index 330337af257..852fcf4127e 100644 --- a/packages/dd-trace/src/ci-visibility/log-submission/log-submission-plugin.js +++ b/packages/dd-trace/src/ci-visibility/log-submission/log-submission-plugin.js @@ -2,26 +2,25 @@ const Plugin = require('../../plugins/plugin') const log = require('../../log') -const { getValueFromEnvSources } = require('../../config/helper') function getWinstonLogSubmissionParameters (config) { - const { site, service } = config + const { site, service, apiKey, DD_AGENTLESS_LOG_SUBMISSION_URL } = config const defaultParameters = { host: `http-intake.logs.${site}`, path: `/api/v2/logs?ddsource=winston&service=${service}`, ssl: true, headers: { - 'DD-API-KEY': getValueFromEnvSources('DD_API_KEY'), + 'DD-API-KEY': apiKey, }, } - if (!getValueFromEnvSources('DD_AGENTLESS_LOG_SUBMISSION_URL')) { + if (!DD_AGENTLESS_LOG_SUBMISSION_URL) { return defaultParameters } try { - const url = new URL(getValueFromEnvSources('DD_AGENTLESS_LOG_SUBMISSION_URL')) + const url = new URL(DD_AGENTLESS_LOG_SUBMISSION_URL) return { host: url.hostname, port: url.port, diff --git a/packages/dd-trace/src/ci-visibility/requests/fs-cache.js b/packages/dd-trace/src/ci-visibility/requests/fs-cache.js index 443bb3ea6e6..fc1e029dad2 100644 --- a/packages/dd-trace/src/ci-visibility/requests/fs-cache.js +++ b/packages/dd-trace/src/ci-visibility/requests/fs-cache.js @@ -5,8 +5,8 @@ const path = require('node:path') const { createHash } = require('node:crypto') const { tmpdir } = require('node:os') +const getConfig = require('../../config') const log = require('../../log') -const { getValueFromEnvSources } = require('../../config/helper') const CACHE_TTL_MS = 30 * 60 * 1000 // 30 minutes const CACHE_LOCK_POLL_MS = 500 @@ -14,13 +14,12 @@ const CACHE_LOCK_TIMEOUT_MS = 120_000 // 2 minutes const CACHE_LOCK_HEARTBEAT_MS = 30_000 // 30 seconds /** - * Returns whether the filesystem cache is enabled via the env var. + * Returns whether the filesystem cache is enabled via config. * * @returns {boolean} */ function isCacheEnabled () { - const { isTrue } = require('../../util') - return isTrue(getValueFromEnvSources('DD_EXPERIMENTAL_TEST_REQUESTS_FS_CACHE')) + return getConfig().DD_EXPERIMENTAL_TEST_REQUESTS_FS_CACHE } /** diff --git a/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js index 96a126f7af1..816a61d83eb 100644 --- a/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js +++ b/packages/dd-trace/src/ci-visibility/requests/get-library-configuration.js @@ -1,8 +1,8 @@ 'use strict' +const getConfig = require('../../config') const id = require('../../id') const log = require('../../log') -const { getValueFromEnvSources } = require('../../config/helper') const { incrementCountMetric, distributionMetric, @@ -36,6 +36,7 @@ function getLibraryConfiguration ({ custom, tag, }, done) { + const config = getConfig() const options = { path: '/api/v2/libraries/tests/services/setting', method: 'POST', @@ -50,11 +51,10 @@ function getLibraryConfiguration ({ options.path = `${evpProxyPrefix}/api/v2/libraries/tests/services/setting` options.headers['X-Datadog-EVP-Subdomain'] = 'api' } else { - const apiKey = getValueFromEnvSources('DD_API_KEY') - if (!apiKey) { + if (!config.apiKey) { return done(new Error('Request to settings endpoint was not done because Datadog API key is not defined.')) } - options.headers['dd-api-key'] = apiKey + options.headers['dd-api-key'] = config.apiKey } const data = JSON.stringify({ @@ -132,11 +132,11 @@ function getLibraryConfiguration ({ log.debug('Remote settings: %j', settings) - if (getValueFromEnvSources('DD_CIVISIBILITY_DANGEROUSLY_FORCE_COVERAGE')) { + if (config.DD_CIVISIBILITY_DANGEROUSLY_FORCE_COVERAGE) { settings.isCodeCoverageEnabled = true log.debug('Dangerously set code coverage to true') } - if (getValueFromEnvSources('DD_CIVISIBILITY_DANGEROUSLY_FORCE_TEST_SKIPPING')) { + if (config.DD_CIVISIBILITY_DANGEROUSLY_FORCE_TEST_SKIPPING) { settings.isSuitesSkippingEnabled = true log.debug('Dangerously set test skipping to true') } diff --git a/packages/dd-trace/src/ci-visibility/requests/upload-coverage-report.js b/packages/dd-trace/src/ci-visibility/requests/upload-coverage-report.js index 67f6d946904..b7d16c1af76 100644 --- a/packages/dd-trace/src/ci-visibility/requests/upload-coverage-report.js +++ b/packages/dd-trace/src/ci-visibility/requests/upload-coverage-report.js @@ -3,10 +3,10 @@ const { readFileSync } = require('node:fs') const { gzipSync } = require('node:zlib') +const getConfig = require('../../config') const FormData = require('../../exporters/common/form-data') const request = require('../../exporters/common/request') const log = require('../../log') -const { getValueFromEnvSources } = require('../../config/helper') const { incrementCountMetric, distributionMetric, @@ -34,7 +34,7 @@ function uploadCoverageReport ( { filePath, format, testEnvironmentMetadata, url, isEvpProxy, evpProxyPrefix }, callback ) { - const apiKey = getValueFromEnvSources('DD_API_KEY') + const apiKey = getConfig().apiKey if (!apiKey && !isEvpProxy) { return callback(new Error('DD_API_KEY is required for coverage report upload')) diff --git a/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js b/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js index f1220bc0111..7191c2f0588 100644 --- a/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js +++ b/packages/dd-trace/src/ci-visibility/test-management/get-test-management-tests.js @@ -1,8 +1,8 @@ 'use strict' +const getConfig = require('../../config') const request = require('../requests/request') const id = require('../../id') -const { getValueFromEnvSources } = require('../../config/helper') const log = require('../../log') const { @@ -121,7 +121,7 @@ function fetchFromApi ({ options.path = `${evpProxyPrefix}/api/v2/test/libraries/test-management/tests` options.headers['X-Datadog-EVP-Subdomain'] = 'api' } else { - const apiKey = getValueFromEnvSources('DD_API_KEY') + const { apiKey } = getConfig() if (!apiKey) { return done(new Error('Test management tests were not fetched because Datadog API key is not defined.')) } diff --git a/packages/dd-trace/src/config/config-types.d.ts b/packages/dd-trace/src/config/config-types.d.ts index 6a83baf4630..e56472dd4b8 100644 --- a/packages/dd-trace/src/config/config-types.d.ts +++ b/packages/dd-trace/src/config/config-types.d.ts @@ -10,11 +10,8 @@ export interface ConfigProperties extends GeneratedConfig { } commitSHA: string | undefined debug: boolean - gcpPubSubPushSubscriptionEnabled: boolean instrumentationSource: 'manual' | 'ssi' - isAzureFunction: boolean isCiVisibility: boolean - isGCPFunction: boolean isServiceNameInferred: boolean isServiceUserProvided: boolean logger: import('../../../../index').TracerOptions['logger'] | undefined @@ -22,7 +19,6 @@ export interface ConfigProperties extends GeneratedConfig { readonly parsedDdTags: Record plugins: boolean repositoryUrl: string | undefined - rules: import('../../../../index').SamplingRule[] sampler: { rateLimit: number rules: import('../../../../index').SamplingRule[] diff --git a/packages/dd-trace/src/config/defaults.js b/packages/dd-trace/src/config/defaults.js index ac40d35c87e..90f127208f6 100644 --- a/packages/dd-trace/src/config/defaults.js +++ b/packages/dd-trace/src/config/defaults.js @@ -82,10 +82,6 @@ for (const [name, value] of Object.entries(defaults)) { */ function generateTelemetry (value = null, origin, optionName) { const { type, canonicalName = optionName } = configurationsTable[optionName] ?? { type: typeof value } - // TODO: Consider adding a preParser hook to the parsers object. - if (canonicalName === 'OTEL_RESOURCE_ATTRIBUTES') { - value = telemetryTransformers.MAP(value) - } // TODO: Should we not send defaults to telemetry to reduce size? // TODO: How to handle aliases/actual names in the future? Optional fields? Normalize the name at intake? // TODO: Validate that space separated tags are parsed by the backend. Optimizations would be possible with that. diff --git a/packages/dd-trace/src/config/generated-config-types.d.ts b/packages/dd-trace/src/config/generated-config-types.d.ts index 51568eb8b0b..cca3ff0d084 100644 --- a/packages/dd-trace/src/config/generated-config-types.d.ts +++ b/packages/dd-trace/src/config/generated-config-types.d.ts @@ -297,6 +297,7 @@ export interface GeneratedConfig { DD_TRACE_MIDDIE_ENABLED: boolean; DD_TRACE_MOCHA_EACH_ENABLED: boolean; DD_TRACE_MOCHA_ENABLED: boolean; + DD_TRACE_MODELCONTEXTPROTOCOL_SDK_ENABLED: boolean; DD_TRACE_MOLECULER_ENABLED: boolean; DD_TRACE_MONGODB_CORE_ENABLED: boolean; DD_TRACE_MONGODB_ENABLED: boolean; @@ -478,36 +479,36 @@ export interface GeneratedConfig { }; openAiLogsEnabled: boolean; OTEL_EXPORTER_OTLP_ENDPOINT: string | undefined; + OTEL_EXPORTER_OTLP_HEADERS: Record | undefined; + OTEL_EXPORTER_OTLP_LOGS_HEADERS: Record | undefined; + OTEL_EXPORTER_OTLP_METRICS_HEADERS: Record | undefined; + OTEL_EXPORTER_OTLP_TRACES_ENDPOINT: string; + OTEL_EXPORTER_OTLP_TRACES_HEADERS: Record | undefined; + OTEL_EXPORTER_OTLP_TRACES_PROTOCOL: "http/json"; + OTEL_EXPORTER_OTLP_TRACES_TIMEOUT: number; OTEL_LOGS_EXPORTER: "none" | "otlp" | undefined; OTEL_METRICS_EXPORTER: "none" | "otlp" | undefined; OTEL_RESOURCE_ATTRIBUTES: Record; OTEL_SDK_DISABLED: boolean; - OTEL_TRACES_EXPORTER: "none" | "otlp"; + OTEL_TRACES_EXPORTER: "none" | "otlp" | undefined; OTEL_TRACES_SAMPLER: "always_on" | "always_off" | "traceidratio" | "parentbased_always_on" | "parentbased_always_off" | "parentbased_traceidratio"; OTEL_TRACES_SAMPLER_ARG: number | undefined; otelBatchTimeout: number; - otelHeaders: string | undefined; otelLogsEnabled: boolean; - otelLogsHeaders: string | undefined; otelLogsProtocol: string; otelLogsTimeout: number; - otelLogsUrl: string | undefined; + otelLogsUrl: string; otelMaxExportBatchSize: number; otelMaxQueueSize: number; otelMetricsEnabled: boolean; otelMetricsExportInterval: number; otelMetricsExportTimeout: number; - otelMetricsHeaders: string | undefined; otelMetricsProtocol: string; otelMetricsTemporalityPreference: "DELTA" | "CUMULATIVE" | "LOWMEMORY"; otelMetricsTimeout: number; - otelMetricsUrl: string | undefined; + otelMetricsUrl: string; otelProtocol: string; otelTimeout: number; - otelTracesHeaders: Record | undefined; - otelTracesProtocol: string; - otelTracesTimeout: number; - otelTracesUrl: string | undefined; peerServiceMapping: Record; port: string | number; profiling: { @@ -536,7 +537,7 @@ export interface GeneratedConfig { sampleRate: number | undefined; samplingRules: import('../../../../index').SamplingRule[]; scope: string | undefined; - service: string | undefined; + service: string; serviceMapping: Record; site: string; spanAttributeSchema: "v0" | "v1"; diff --git a/packages/dd-trace/src/config/index.js b/packages/dd-trace/src/config/index.js index 02e246c0b98..90a4c9afad9 100644 --- a/packages/dd-trace/src/config/index.js +++ b/packages/dd-trace/src/config/index.js @@ -358,21 +358,9 @@ class Config extends ConfigBase { setAndTrack(this, 'otelMetricsEnabled', false) } - const otelTracesEnabled = trackedConfigOrigins.has('OTEL_TRACES_EXPORTER') && - this.OTEL_TRACES_EXPORTER === 'otlp' - if (this.protocolVersion && this.protocolVersion !== '0.4' && otelTracesEnabled) { + if (this.OTEL_TRACES_EXPORTER === 'otlp' && this.protocolVersion && this.protocolVersion !== '0.4') { log.warn('DD_TRACE_AGENT_PROTOCOL_VERSION is set, disabling OTLP traces export') - setAndTrack(this, 'otelTracesEnabled', false) - } else { - setAndTrack(this, 'otelTracesEnabled', otelTracesEnabled) - } - - if (this.otelTracesProtocol && this.otelTracesProtocol !== 'http/json') { - log.warn( - 'OTEL_EXPORTER_OTLP_TRACES_PROTOCOL=%s is not yet supported; only http/json is currently implemented', - this.otelTracesProtocol - ) - setAndTrack(this, 'otelTracesProtocol', 'http/json') + setAndTrack(this, 'OTEL_TRACES_EXPORTER', 'none') } if (this.telemetry.heartbeatInterval) { @@ -441,16 +429,13 @@ class Config extends ConfigBase { setAndTrack(this, 'runtimeMetrics.enabled', false) } - if (!trackedConfigOrigins.has('sampleRate')) { - const effectiveSampler = (trackedConfigOrigins.has('OTEL_TRACES_EXPORTER') && - this.OTEL_TRACES_EXPORTER === 'otlp' && - !trackedConfigOrigins.has('OTEL_TRACES_SAMPLER')) - ? 'parentbased_always_on' - : this.OTEL_TRACES_SAMPLER - if (effectiveSampler && (trackedConfigOrigins.has('OTEL_TRACES_SAMPLER') || - trackedConfigOrigins.has('OTEL_TRACES_EXPORTER'))) { - setAndTrack(this, 'sampleRate', getFromOtelSamplerMap(effectiveSampler, this.OTEL_TRACES_SAMPLER_ARG)) - } + // Apply the OTel sampler when the user opted into OTel traces or explicitly set the sampler. + // OTEL_TRACES_SAMPLER has `default: parentbased_always_on` (per OTel spec), so opt-in users + // that don't set the sampler still get parent-based sampling. + if (!trackedConfigOrigins.has('sampleRate') && + (trackedConfigOrigins.has('OTEL_TRACES_SAMPLER') || this.OTEL_TRACES_EXPORTER === 'otlp')) { + setAndTrack(this, 'sampleRate', + getFromOtelSamplerMap(this.OTEL_TRACES_SAMPLER, this.OTEL_TRACES_SAMPLER_ARG)) } if (this.DD_SPAN_SAMPLING_RULES_FILE) { @@ -605,19 +590,18 @@ class Config extends ConfigBase { } } - const DEFAULT_OTLP_PORT = '4318' + // TODO: This could likely be moved to the base class and allow easier GRPC handling + // Default OTLP endpoints follow the configured agent host so users who point DD at a custom + // agent (DD_AGENT_HOST / DD_TRACE_AGENT_URL) also reach OTLP on that host. + const defaultOtlpBase = this.OTEL_EXPORTER_OTLP_ENDPOINT?.replace(/\/$/, '') ?? `http://${agentHostname}:4318` if (!this.otelLogsUrl) { - setAndTrack(this, 'otelLogsUrl', `http://${agentHostname}:${DEFAULT_OTLP_PORT}`) + setAndTrack(this, 'otelLogsUrl', `${defaultOtlpBase}/v1/logs`) } if (!this.otelMetricsUrl) { - setAndTrack(this, 'otelMetricsUrl', `http://${agentHostname}:${DEFAULT_OTLP_PORT}/v1/metrics`) - } - if (!trackedConfigOrigins.has('otelTracesUrl') && this.OTEL_EXPORTER_OTLP_ENDPOINT) { - // Generic OTLP endpoint: per spec, append /v1/traces signal-specific subpath - setAndTrack(this, 'otelTracesUrl', this.OTEL_EXPORTER_OTLP_ENDPOINT.replace(/\/$/, '') + '/v1/traces') - } else if (!this.otelTracesUrl) { - const tracesHostname = agentHostname === '127.0.0.1' ? 'localhost' : agentHostname - setAndTrack(this, 'otelTracesUrl', `http://${tracesHostname}:${DEFAULT_OTLP_PORT}/v1/traces`) + setAndTrack(this, 'otelMetricsUrl', `${defaultOtlpBase}/v1/metrics`) + } + if (!this.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT) { + setAndTrack(this, 'OTEL_EXPORTER_OTLP_TRACES_ENDPOINT', `${defaultOtlpBase}/v1/traces`) } if (process.platform === 'win32') { diff --git a/packages/dd-trace/src/config/parsers.js b/packages/dd-trace/src/config/parsers.js index 7ddd29b24a3..bfa89e115ac 100644 --- a/packages/dd-trace/src/config/parsers.js +++ b/packages/dd-trace/src/config/parsers.js @@ -13,6 +13,15 @@ const VALID_PROPAGATION_STYLES = new Set([ 'datadog', 'tracecontext', 'b3', 'b3 single header', 'b3multi', 'baggage', 'none', ]) +const RENAMED_OTEL_TAGS = new Map( + [ + ['deployment.environment.name', 'env'], + ['deployment.environment', 'env'], + ['service.name', 'service'], + ['service.version', 'version'], + ] +) + function toCase (value, methodName) { if (Array.isArray(value)) { return value.map(item => { @@ -66,12 +75,12 @@ const transformers = { } return value }, - parseOtelTags (value, optionName) { - return parsers.MAP(value - ?.replace(/(^|,)deployment\.environment=/, '$1env:') - .replace(/(^|,)service\.name=/, '$1service:') - .replace(/(^|,)service\.version=/, '$1version:') - .replaceAll('=', ':'), optionName) + parseOtelTags (object) { + const tags = {} + for (const [key, value] of Object.entries(object)) { + tags[RENAMED_OTEL_TAGS.get(key) ?? key] = value + } + return tags }, normalizeProfilingEnabled (configValue) { if (configValue == null) { @@ -225,11 +234,19 @@ const parsers = { if (!raw) { return entries } - // DD_TAGS is a special case. It may be a map of key-value pairs separated by spaces. - if (optionName === 'DD_TAGS' && !raw.includes(',')) { + let valueSeparator = ':' + if (optionName.startsWith('OTEL_')) { + // OTEL spec uses `key=value,key=value` + // (https://opentelemetry.io/docs/specs/otel/protocol/exporter/#specifying-headers-via-environment-variables), + // while DD uses `key:value,key:value`. Parse OTEL-prefixed options with `=` so downstream code + // receives a proper map and telemetry reports the parsed entries. The char-by-char loop + // avoids the allocations that `split(',')` + `indexOf('=')` do per pair. + valueSeparator = '=' + } else if (optionName === 'DD_TAGS' && !raw.includes(',')) { + // DD_TAGS is a special case. It may be a map of key-value pairs separated by spaces. raw = raw.replaceAll(/\s+/g, ',') } - tagger.add(entries, raw) + tagger.add(entries, raw, valueSeparator) return entries }, JSON (raw) { diff --git a/packages/dd-trace/src/config/supported-configurations.json b/packages/dd-trace/src/config/supported-configurations.json index d5f6f3e0d7a..9c709d2be9f 100644 --- a/packages/dd-trace/src/config/supported-configurations.json +++ b/packages/dd-trace/src/config/supported-configurations.json @@ -179,7 +179,8 @@ { "implementation": "A", "type": "int", - "default": "100" + "default": "100", + "allowed": "\\d+" } ], "DD_APM_TRACING_ENABLED": [ @@ -3110,6 +3111,13 @@ "default": "true" } ], + "DD_TRACE_MODELCONTEXTPROTOCOL_SDK_ENABLED": [ + { + "implementation": "A", + "type": "boolean", + "default": "true" + } + ], "DD_TRACE_MOLECULER_ENABLED": [ { "implementation": "A", @@ -3896,18 +3904,16 @@ ], "OTEL_EXPORTER_OTLP_HEADERS": [ { - "implementation": "C", - "type": "string", - "default": null, - "internalPropertyName": "otelHeaders" + "implementation": "B", + "type": "map", + "default": null } ], "OTEL_EXPORTER_OTLP_TRACES_ENDPOINT": [ { "implementation": "A", "type": "string", - "default": null, - "internalPropertyName": "otelTracesUrl" + "default": null } ], "OTEL_EXPORTER_OTLP_TRACES_HEADERS": [ @@ -3915,7 +3921,6 @@ "implementation": "B", "type": "map", "default": null, - "internalPropertyName": "otelTracesHeaders", "aliases": [ "OTEL_EXPORTER_OTLP_HEADERS" ] @@ -3926,7 +3931,7 @@ "implementation": "B", "type": "string", "default": "http/json", - "internalPropertyName": "otelTracesProtocol", + "allowed": "http/json", "aliases": [ "OTEL_EXPORTER_OTLP_PROTOCOL" ] @@ -3936,7 +3941,6 @@ { "implementation": "B", "type": "int", - "internalPropertyName": "otelTracesTimeout", "default": "10000", "aliases": [ "OTEL_EXPORTER_OTLP_TIMEOUT" @@ -3948,18 +3952,14 @@ "implementation": "A", "type": "string", "default": null, - "internalPropertyName": "otelLogsUrl", - "aliases": [ - "OTEL_EXPORTER_OTLP_ENDPOINT" - ] + "internalPropertyName": "otelLogsUrl" } ], "OTEL_EXPORTER_OTLP_LOGS_HEADERS": [ { - "implementation": "A", - "type": "string", + "implementation": "B", + "type": "map", "default": null, - "internalPropertyName": "otelLogsHeaders", "aliases": [ "OTEL_EXPORTER_OTLP_HEADERS" ] @@ -3993,18 +3993,14 @@ "implementation": "A", "type": "string", "default": null, - "internalPropertyName": "otelMetricsUrl", - "aliases": [ - "OTEL_EXPORTER_OTLP_ENDPOINT" - ] + "internalPropertyName": "otelMetricsUrl" } ], "OTEL_EXPORTER_OTLP_METRICS_HEADERS": [ { - "implementation": "B", - "type": "string", + "implementation": "C", + "type": "map", "default": null, - "internalPropertyName": "otelMetricsHeaders", "aliases": [ "OTEL_EXPORTER_OTLP_HEADERS" ] @@ -4098,8 +4094,8 @@ ], "OTEL_RESOURCE_ATTRIBUTES": [ { - "implementation": "B", - "type": "string", + "implementation": "A", + "type": "map", "default": "", "transform": "parseOtelTags" } @@ -4113,9 +4109,9 @@ ], "OTEL_TRACES_EXPORTER": [ { - "implementation": "F", + "implementation": "H", "type": "string", - "default": "otlp", + "default": null, "allowed": "none|otlp", "transform": "toLowerCase" } diff --git a/packages/dd-trace/src/debugger/config.js b/packages/dd-trace/src/debugger/config.js index 0aed8f2aa46..96636dfcc36 100644 --- a/packages/dd-trace/src/debugger/config.js +++ b/packages/dd-trace/src/debugger/config.js @@ -5,6 +5,7 @@ module.exports = function getDebuggerConfig (config, inputPath) { commitSHA: config.commitSHA, debug: config.debug, dynamicInstrumentation: config.dynamicInstrumentation, + env: config.env, hostname: config.hostname, logLevel: config.logLevel, port: config.port, @@ -13,6 +14,7 @@ module.exports = function getDebuggerConfig (config, inputPath) { runtimeId: config.tags['runtime-id'], service: config.service, url: config.url?.toString(), + version: config.version, inputPath, } } diff --git a/packages/dd-trace/src/debugger/devtools_client/send.js b/packages/dd-trace/src/debugger/devtools_client/send.js index eabe952fe60..81c9f159a28 100644 --- a/packages/dd-trace/src/debugger/devtools_client/send.js +++ b/packages/dd-trace/src/debugger/devtools_client/send.js @@ -6,7 +6,6 @@ const { stringify } = require('querystring') const { version } = require('../../../../../package.json') const request = require('../../exporters/common/request') const { GIT_COMMIT_SHA, GIT_REPOSITORY_URL } = require('../../plugins/util/tags') -const { getValueFromEnvSources } = require('../../config/helper') const { DEBUGGER_DIAGNOSTICS_V1, DEBUGGER_INPUT_V2 } = require('../constants') const log = require('./log') const JSONBuffer = require('./json-buffer') @@ -23,14 +22,14 @@ const ddsource = 'dd_debugger' const hostname = getHostname() const service = config.service -const ddtags = [ - ['env', getValueFromEnvSources('DD_ENV')], - ['version', getValueFromEnvSources('DD_VERSION')], +const ddtags = buildTags([ + ['env', config.env], + ['version', config.version], ['debugger_version', version], ['host_name', hostname], [GIT_COMMIT_SHA, config.commitSHA], [GIT_REPOSITORY_URL, config.repositoryUrl], -].filter(([, value]) => value !== undefined).map((pair) => pair.join(':')).join(',') +]) let path setInputPath(config.inputPath) @@ -136,3 +135,24 @@ function setInputPath (newPath) { config.inputPath = newPath path = `${newPath}?${stringify({ ddtags })}` } + +/** + * @param {Array<[string, unknown]>} tags - The tags to serialize. + * @returns {string} The serialized tags. + */ +function buildTags (tags) { + const serializedTags = [] + + for (const [key, rawValue] of tags) { + if (rawValue === undefined) continue + + if (String(rawValue).includes(',')) { + log.warn('[debugger:devtools_client] Skipping invalid tag value for %s', key) + continue + } + + serializedTags.push(`${key}:${rawValue}`) + } + + return serializedTags.join(',') +} diff --git a/packages/dd-trace/src/encode/0.4.js b/packages/dd-trace/src/encode/0.4.js index f50b57d1f41..1a5f829a40e 100644 --- a/packages/dd-trace/src/encode/0.4.js +++ b/packages/dd-trace/src/encode/0.4.js @@ -1,9 +1,8 @@ 'use strict' +const getConfig = require('../config') const { MsgpackChunk, MsgpackEncoder } = require('../msgpack') const log = require('../log') -const { isTrue } = require('../util') -const { getValueFromEnvSources } = require('../config/helper') const { truncateSpan, normalizeSpan } = require('./tags-processors') const SOFT_LIMIT = 8 * 1024 * 1024 // 8MB @@ -12,7 +11,7 @@ function formatSpan (span, config) { span = normalizeSpan(truncateSpan(span, false)) if (span.span_events) { // ensure span events are encoded as tags if agent doesn't support native top level span events - if (config?.trace?.nativeSpanEvents) { + if (config.trace.nativeSpanEvents) { formatSpanEvents(span) } else { span.meta.events = JSON.stringify(span.span_events) @@ -30,8 +29,8 @@ class AgentEncoder { this._stringBytes = new MsgpackChunk() this._writer = writer this._reset() - this._debugEncoding = isTrue(getValueFromEnvSources('DD_TRACE_ENCODING_DEBUG')) - this._config = this._writer?._config + this._config = getConfig() + this._debugEncoding = this._config.DD_TRACE_ENCODING_DEBUG } count () { diff --git a/packages/dd-trace/src/exporters/agent/index.js b/packages/dd-trace/src/exporters/agent/index.js index becba84eb29..28a26713ed5 100644 --- a/packages/dd-trace/src/exporters/agent/index.js +++ b/packages/dd-trace/src/exporters/agent/index.js @@ -24,7 +24,6 @@ class AgentExporter { lookup, protocolVersion, headers, - config, }) globalThis[Symbol.for('dd-trace')].beforeExitHandlers.add(this.flush.bind(this)) diff --git a/packages/dd-trace/src/exporters/agent/writer.js b/packages/dd-trace/src/exporters/agent/writer.js index f880b2ba1be..aca885bae9e 100644 --- a/packages/dd-trace/src/exporters/agent/writer.js +++ b/packages/dd-trace/src/exporters/agent/writer.js @@ -20,14 +20,13 @@ class AgentWriter extends BaseWriter { ...args[0], beforeFirstFlush: () => firstFlushChannel.publish(), }) - const { prioritySampler, lookup, protocolVersion, headers, config = {} } = args[0] + const { prioritySampler, lookup, protocolVersion, headers } = args[0] const AgentEncoder = getEncoder(protocolVersion) this._prioritySampler = prioritySampler this._lookup = lookup this._protocolVersion = protocolVersion this._headers = headers - this._config = config this._encoder = new AgentEncoder(this) } diff --git a/packages/dd-trace/src/exporters/agentless/writer.js b/packages/dd-trace/src/exporters/agentless/writer.js index bfff722b784..10ca6c0477b 100644 --- a/packages/dd-trace/src/exporters/agentless/writer.js +++ b/packages/dd-trace/src/exporters/agentless/writer.js @@ -1,6 +1,6 @@ 'use strict' -const { getValueFromEnvSources } = require('../../config/helper') +const getConfig = require('../../config') const log = require('../../log') const request = require('../common/request') const tracerVersion = require('../../../../../package.json').version @@ -39,7 +39,7 @@ class AgentlessWriter extends BaseWriter { } } - if (!getValueFromEnvSources('DD_API_KEY')) { + if (!getConfig().apiKey) { this.#apiKeyMissing = true log.error('DD_API_KEY is required for agentless trace intake. Set DD_API_KEY. Traces will not be sent.') } @@ -108,7 +108,7 @@ class AgentlessWriter extends BaseWriter { return } - const apiKey = getValueFromEnvSources('DD_API_KEY') + const apiKey = getConfig().apiKey if (!apiKey) { if (!this.#apiKeyMissing) { this.#apiKeyMissing = true diff --git a/packages/dd-trace/src/exporters/common/util.js b/packages/dd-trace/src/exporters/common/util.js index da0337b52b0..7d1099d5ee7 100644 --- a/packages/dd-trace/src/exporters/common/util.js +++ b/packages/dd-trace/src/exporters/common/util.js @@ -1,12 +1,12 @@ 'use strict' -const { getValueFromEnvSources } = require('../../config/helper') +const getConfig = require('../../config') function safeJSONStringify (value) { return JSON.stringify( value, (key, value) => key === 'dd-api-key' ? undefined : value, - getValueFromEnvSources('DD_TRACE_BEAUTIFUL_LOGS') ? 2 : undefined + getConfig().DD_TRACE_BEAUTIFUL_LOGS ? 2 : undefined ) } diff --git a/packages/dd-trace/src/id.js b/packages/dd-trace/src/id.js index caa7f03bea9..288d7cae1c7 100644 --- a/packages/dd-trace/src/id.js +++ b/packages/dd-trace/src/id.js @@ -245,3 +245,5 @@ function writeUInt32BE (buffer, value, offset) { module.exports = function createIdentifier (value, radix) { return new Identifier(value ?? '', radix) } + +module.exports.Identifier = Identifier diff --git a/packages/dd-trace/src/lambda/handler.js b/packages/dd-trace/src/lambda/handler.js index 58a5e8c8a7c..5af9ca8bd68 100644 --- a/packages/dd-trace/src/lambda/handler.js +++ b/packages/dd-trace/src/lambda/handler.js @@ -3,7 +3,6 @@ const log = require('../log') const { channel } = require('../../../datadog-instrumentations/src/helpers/instrument') const { ERROR_MESSAGE, ERROR_TYPE } = require('../constants') -const { getValueFromEnvSources } = require('../config/helper') const { ImpendingTimeout } = require('./runtime/errors') const { extractContext } = require('./context') @@ -27,8 +26,7 @@ let __lambdaTimeout function checkTimeout (context) { const remainingTimeInMillis = context.getRemainingTimeInMillis() - let apmFlushDeadline = Number.parseInt(getValueFromEnvSources('DD_APM_FLUSH_DEADLINE_MILLISECONDS')) || 100 - apmFlushDeadline = apmFlushDeadline < 0 ? 100 : apmFlushDeadline + const apmFlushDeadline = tracer._config.DD_APM_FLUSH_DEADLINE_MILLISECONDS __lambdaTimeout = setTimeout(() => { timeoutChannel.publish() diff --git a/packages/dd-trace/src/llmobs/plugins/modelcontextprotocol-sdk/index.js b/packages/dd-trace/src/llmobs/plugins/modelcontextprotocol-sdk/index.js new file mode 100644 index 00000000000..2c9b48dce94 --- /dev/null +++ b/packages/dd-trace/src/llmobs/plugins/modelcontextprotocol-sdk/index.js @@ -0,0 +1,68 @@ +'use strict' + +const LLMObsPlugin = require('../base') +const { formatInput, formatOutput } = require('./utils') + +class McpToolCallLLMObsPlugin extends LLMObsPlugin { + static id = 'llmobs_mcp_tool_call' + static integration = 'modelcontextprotocol-sdk' + static prefix = 'tracing:orchestrion:@modelcontextprotocol/sdk:Client_callTool' + + getLLMObsSpanRegisterOptions (ctx) { + const params = ctx.arguments?.[0] + const toolName = params?.name || 'unknown_tool' + + return { + kind: 'tool', + name: `MCP Client Tool Call: ${toolName}`, + } + } + + setLLMObsTags (ctx) { + const span = ctx.currentStore?.span + if (!span) return + + const params = ctx.arguments?.[0] + const toolName = params?.name + const toolArguments = params?.arguments + + const spanTags = { mcp_tool_kind: 'client' } + + const serverVersion = ctx.self?.getServerVersion?.() + if (serverVersion) { + if (serverVersion.name) spanTags.mcp_server_name = serverVersion.name + if (serverVersion.version) spanTags.mcp_server_version = serverVersion.version + if (serverVersion.title) spanTags.mcp_server_title = serverVersion.title + } + + this._tagger.tagSpanTags(span, spanTags) + + const hasError = ctx.error || ctx.result?.isError + const input = formatInput(toolName, toolArguments) + const output = hasError ? undefined : formatOutput(ctx.result) + + this._tagger.tagTextIO(span, input, output) + } +} + +class McpListToolsLLMObsPlugin extends LLMObsPlugin { + static id = 'llmobs_mcp_list_tools' + static integration = 'modelcontextprotocol-sdk' + static prefix = 'tracing:orchestrion:@modelcontextprotocol/sdk:Client_listTools' + + getLLMObsSpanRegisterOptions () { + return { + kind: 'task', + name: 'MCP Client List Tools', + } + } + + setLLMObsTags (ctx) { + const span = ctx.currentStore?.span + if (!span || ctx.error) return + + this._tagger.tagTextIO(span, null, JSON.stringify(ctx.result)) + } +} + +module.exports = [McpToolCallLLMObsPlugin, McpListToolsLLMObsPlugin] diff --git a/packages/dd-trace/src/llmobs/plugins/modelcontextprotocol-sdk/utils.js b/packages/dd-trace/src/llmobs/plugins/modelcontextprotocol-sdk/utils.js new file mode 100644 index 00000000000..3a0e55cb5ea --- /dev/null +++ b/packages/dd-trace/src/llmobs/plugins/modelcontextprotocol-sdk/utils.js @@ -0,0 +1,57 @@ +'use strict' + +/** + * Formats tool call input as a JSON string. + * @param {string} toolName - The name of the tool being called + * @param {object} toolArguments - The arguments passed to the tool + * @returns {string} Formatted input string + */ +function formatInput (toolName, toolArguments) { + if (!toolName && !toolArguments) return '' + + if (toolArguments === undefined || toolArguments === null) { + return toolName || '' + } + + try { + return JSON.stringify({ name: toolName, arguments: toolArguments }) + } catch { + return toolName || '' + } +} + +/** + * Formats MCP tool call result as a structured object matching Python's output format. + * MCP tool results contain a `content` array with items like: + * `[{ type: 'text', text: '...' }, { type: 'image', data: '...', mimeType: '...' }]` + * @param {object} result - The MCP CallToolResult + * @returns {string} JSON string of `{ content: Array<{type, text, annotations, meta}>, isError: boolean }` + */ +function formatOutput (result) { + if (!result) return '' + + const content = result.content + const isError = result.isError || false + + const processed = [] + if (Array.isArray(content)) { + for (const item of content) { + if (item.type !== 'text') continue + const contentBlock = { + type: item.type, + text: item.text || '', + annotations: item.annotations || {}, + meta: item._meta || {}, + } + processed.push(contentBlock) + } + } + + try { + return JSON.stringify({ content: processed, isError }) + } catch { + return '' + } +} + +module.exports = { formatInput, formatOutput } diff --git a/packages/dd-trace/src/llmobs/sdk.js b/packages/dd-trace/src/llmobs/sdk.js index 8149b02ac77..b6af86efcda 100644 --- a/packages/dd-trace/src/llmobs/sdk.js +++ b/packages/dd-trace/src/llmobs/sdk.js @@ -2,7 +2,7 @@ const { channel } = require('dc-polyfill') -const { isTrue, isError } = require('../util') +const { isError, isTrue } = require('../util') const tracerVersion = require('../../../../package.json').version const logger = require('../log') const { getValueFromEnvSources } = require('../config/helper') @@ -427,7 +427,7 @@ class LLMObs extends NoopLLMObs { } // When OTel tracing is enabled, add source:otel tag to allow backend to wait for OTel span conversion - if (isTrue(getValueFromEnvSources('DD_TRACE_OTEL_ENABLED'))) { + if (this._config.DD_TRACE_OTEL_ENABLED) { evaluationTags.source = 'otel' } diff --git a/packages/dd-trace/src/opentelemetry/logs/index.js b/packages/dd-trace/src/opentelemetry/logs/index.js index a36446d7dbe..c9e59929611 100644 --- a/packages/dd-trace/src/opentelemetry/logs/index.js +++ b/packages/dd-trace/src/opentelemetry/logs/index.js @@ -61,7 +61,7 @@ function initializeOpenTelemetryLogs (config) { // Create OTLP exporter using resolved config values const exporter = new OtlpHttpLogExporter( config.otelLogsUrl, - config.otelLogsHeaders, + config.OTEL_EXPORTER_OTLP_LOGS_HEADERS, config.otelLogsTimeout, config.otelLogsProtocol, resourceAttributes diff --git a/packages/dd-trace/src/opentelemetry/logs/otlp_http_log_exporter.js b/packages/dd-trace/src/opentelemetry/logs/otlp_http_log_exporter.js index ac20ccfe1a5..d01b0cec5c2 100644 --- a/packages/dd-trace/src/opentelemetry/logs/otlp_http_log_exporter.js +++ b/packages/dd-trace/src/opentelemetry/logs/otlp_http_log_exporter.js @@ -22,13 +22,14 @@ class OtlpHttpLogExporter extends OtlpHttpExporterBase { * Creates a new OtlpHttpLogExporter instance. * * @param {string} url - OTLP endpoint URL - * @param {string} headers - Additional HTTP headers as comma-separated key=value string + * @param {Record|undefined} headers - Additional HTTP headers parsed from the + * corresponding `OTEL_EXPORTER_OTLP_*_HEADERS` env by the MAP parser. * @param {number} timeout - Request timeout in milliseconds * @param {string} protocol - OTLP protocol (http/protobuf or http/json) * @param {Resource} resource - Resource attributes */ constructor (url, headers, timeout, protocol, resource) { - super(url, headers, timeout, protocol, '/v1/logs', 'logs') + super(url, headers, timeout, protocol, 'logs') this.transformer = new OtlpTransformer(resource, protocol) } diff --git a/packages/dd-trace/src/opentelemetry/metrics/index.js b/packages/dd-trace/src/opentelemetry/metrics/index.js index 914baeee330..eb158e2a383 100644 --- a/packages/dd-trace/src/opentelemetry/metrics/index.js +++ b/packages/dd-trace/src/opentelemetry/metrics/index.js @@ -58,7 +58,7 @@ function initializeOpenTelemetryMetrics (config) { const exporter = new OtlpHttpMetricExporter( config.otelMetricsUrl, - config.otelMetricsHeaders, + config.OTEL_EXPORTER_OTLP_METRICS_HEADERS, config.otelMetricsTimeout, config.otelMetricsProtocol, resourceAttributes diff --git a/packages/dd-trace/src/opentelemetry/metrics/otlp_http_metric_exporter.js b/packages/dd-trace/src/opentelemetry/metrics/otlp_http_metric_exporter.js index 6abfdb5b5af..8af42b70854 100644 --- a/packages/dd-trace/src/opentelemetry/metrics/otlp_http_metric_exporter.js +++ b/packages/dd-trace/src/opentelemetry/metrics/otlp_http_metric_exporter.js @@ -18,13 +18,14 @@ class OtlpHttpMetricExporter extends OtlpHttpExporterBase { * Creates a new OtlpHttpMetricExporter instance. * * @param {string} url - OTLP endpoint URL - * @param {string} headers - Additional HTTP headers as comma-separated key=value string + * @param {Record|undefined} headers - Additional HTTP headers parsed from the + * corresponding `OTEL_EXPORTER_OTLP_*_HEADERS` env by the MAP parser. * @param {number} timeout - Request timeout in milliseconds * @param {string} protocol - OTLP protocol (http/protobuf or http/json) * @param {Resource} resource - Resource attributes */ constructor (url, headers, timeout, protocol, resource) { - super(url, headers, timeout, protocol, '/v1/metrics', 'metrics') + super(url, headers, timeout, protocol, 'metrics') this.transformer = new OtlpTransformer(resource, protocol) } diff --git a/packages/dd-trace/src/opentelemetry/otlp/otlp_http_exporter_base.js b/packages/dd-trace/src/opentelemetry/otlp/otlp_http_exporter_base.js index 5f041b95569..06cad1e0d2e 100644 --- a/packages/dd-trace/src/opentelemetry/otlp/otlp_http_exporter_base.js +++ b/packages/dd-trace/src/opentelemetry/otlp/otlp_http_exporter_base.js @@ -19,34 +19,31 @@ class OtlpHttpExporterBase { /** * Creates a new OtlpHttpExporterBase instance. * - * @param {string} url - OTLP endpoint URL - * @param {string|undefined} headers - Additional HTTP headers as comma-separated key=value string + * @param {string} url - OTLP endpoint URL (callers are expected to supply the full signal URL) + * @param {Record|undefined} headers - Additional HTTP headers parsed from the + * corresponding `OTEL_EXPORTER_OTLP_*_HEADERS` env by the MAP parser. * @param {number} timeout - Request timeout in milliseconds * @param {string} protocol - OTLP protocol (http/protobuf or http/json) - * @param {string} defaultPath - Default path to use if URL has no path * @param {string} signalType - Signal type for error messages (e.g., 'logs', 'metrics') */ - constructor (url, headers, timeout, protocol, defaultPath, signalType) { - const parsedUrl = new URL(url) - + constructor (url, headers, timeout, protocol, signalType) { this.protocol = protocol this.signalType = signalType - // If no path is provided, use default path - const path = parsedUrl.pathname === '/' ? defaultPath : parsedUrl.pathname const isJson = protocol === 'http/json' + // Initialize fields setUrl doesn't touch; it fills in hostname/port/path below. this.options = { - hostname: parsedUrl.hostname, - port: parsedUrl.port, - path: path + parsedUrl.search, method: 'POST', timeout, headers: { 'Content-Type': isJson ? 'application/json' : 'application/x-protobuf', - ...this.#parseAdditionalHeaders(headers), + ...headers, }, } + + this.setUrl(url) + this.telemetryTags = [ 'protocol:http', `encoding:${isJson ? 'json' : 'protobuf'}`, @@ -61,6 +58,7 @@ class OtlpHttpExporterBase { * @protected */ recordTelemetry (metricName, count, additionalTags) { + // @ts-expect-error - additionalTags is optional and can be undefined if (additionalTags?.length > 0) { tracerMetrics.count(metricName, [...this.telemetryTags, ...additionalTags || []]).inc(count) } else { @@ -91,6 +89,7 @@ class OtlpHttpExporterBase { }) res.once('end', () => { + // @ts-expect-error - res.statusCode can be undefined if (res.statusCode >= 200 && res.statusCode < 300) { resultCallback({ code: 0 }) } else { @@ -116,61 +115,15 @@ class OtlpHttpExporterBase { } /** - * Parses additional HTTP headers from a comma-separated string or pre-parsed map. - * @param {string|Record} [headersString=''] - Comma-separated key=value pairs or map - * @returns {Record} Parsed headers object + * Updates the target URL used by this exporter. The URL is used as-is per the OTel spec: the + * caller is responsible for including the signal-specific path (`/v1/traces` etc.). + * @param {string} url - New OTLP endpoint URL */ - #parseAdditionalHeaders (headersString = '') { - if (headersString !== null && typeof headersString === 'object') { - // The config MAP parser uses tagger.add (which splits on ':'), so OTEL-format - // headers ('key=value') arrive with the full 'key=value' string as the map key - // and an empty string as the value. Re-split on '=' to get the correct pairs. - const result = {} - for (const [k, v] of Object.entries(headersString)) { - if (v === '' && k.includes('=')) { - const idx = k.indexOf('=') - result[k.slice(0, idx).trim()] = k.slice(idx + 1).trim() - } else { - result[k] = v - } - } - return result - } - const headers = {} - let key = '' - let value = '' - let readingKey = true - - for (const char of headersString) { - if (readingKey) { - if (char === '=') { - readingKey = false - key = key.trim() - } else { - key += char - } - } else if (char === ',') { - value = value.trim() - if (key && value) { - headers[key] = value - } - key = '' - value = '' - readingKey = true - } else { - value += char - } - } - - // Add the last pair if present - if (!readingKey) { - value = value.trim() - if (value) { - headers[key] = value - } - } - - return headers + setUrl (url) { + const parsedUrl = new URL(url) + this.options.hostname = parsedUrl.hostname + this.options.port = parsedUrl.port + this.options.path = parsedUrl.pathname + parsedUrl.search } /** diff --git a/packages/dd-trace/src/opentelemetry/trace/index.js b/packages/dd-trace/src/opentelemetry/trace/index.js index 92f73f9ea0e..91144d6cb9b 100644 --- a/packages/dd-trace/src/opentelemetry/trace/index.js +++ b/packages/dd-trace/src/opentelemetry/trace/index.js @@ -4,7 +4,7 @@ const { VERSION } = require('../../../../../version') const OtlpHttpTraceExporter = require('./otlp_http_trace_exporter') /** - * @typedef {import('../../config')} Config + * @typedef {import('../../config/config-base')} Config * @typedef {import('../../opentracing/tracer')} DatadogTracer */ @@ -33,21 +33,17 @@ const OtlpHttpTraceExporter = require('./otlp_http_trace_exporter') */ function buildResourceAttributes (config) { const resourceAttributes = { - 'service.name': config.service || config.tags.service, + 'service.name': config.service, 'telemetry.sdk.name': 'datadog', 'telemetry.sdk.version': VERSION, 'telemetry.sdk.language': 'nodejs', } - const env = config.env || config.tags.env - if (env) resourceAttributes['deployment.environment'] = env - const version = config.version || config.tags.version - if (version) resourceAttributes['service.version'] = version + if (config.env) resourceAttributes['deployment.environment.name'] = config.env + if (config.version) resourceAttributes['service.version'] = config.version - if (config.tags) { - const { service, version, env, ...filteredTags } = config.tags - Object.assign(resourceAttributes, filteredTags) - } + const { service, version, env, ...filteredTags } = config.tags + Object.assign(resourceAttributes, filteredTags) return resourceAttributes } @@ -56,15 +52,14 @@ function buildResourceAttributes (config) { * Creates the OTLP HTTP/JSON trace exporter. * * @param {Config} config - Tracer configuration instance - * @param {import('@opentelemetry/api').Attributes} resourceAttributes - Resource attributes * @returns {OtlpHttpTraceExporter} The OTLP HTTP/JSON exporter */ -function createOtlpTraceExporter (config, resourceAttributes) { +function createOtlpTraceExporter (config) { return new OtlpHttpTraceExporter( - config.otelTracesUrl, - config.otelTracesHeaders, - config.otelTracesTimeout, - resourceAttributes + config.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT, + config.OTEL_EXPORTER_OTLP_TRACES_HEADERS, + config.OTEL_EXPORTER_OTLP_TRACES_TIMEOUT, + buildResourceAttributes(config) ) } diff --git a/packages/dd-trace/src/opentelemetry/trace/otlp_http_trace_exporter.js b/packages/dd-trace/src/opentelemetry/trace/otlp_http_trace_exporter.js index 6bdfc110f80..8c4256b14d3 100644 --- a/packages/dd-trace/src/opentelemetry/trace/otlp_http_trace_exporter.js +++ b/packages/dd-trace/src/opentelemetry/trace/otlp_http_trace_exporter.js @@ -8,13 +8,20 @@ const OtlpTraceTransformer = require('./otlp_transformer') /** * OtlpHttpTraceExporter exports DD-formatted spans via OTLP over HTTP/JSON. * - * This implementation follows the OTLP HTTP v1.7.0 specification: + * This implementation follows the OTLP HTTP specification: * https://opentelemetry.io/docs/specs/otlp/#otlphttp * * It receives DD-formatted spans (from span_format.js), transforms them * to OTLP ExportTraceServiceRequest JSON format, and sends them to the * configured OTLP endpoint via HTTP POST. * + * TODO: Add batch handling similar to the OpenTelemetry SDK Batch Processor + * (https://opentelemetry.io/docs/specs/otel/trace/sdk/#batching-processor). + * Currently each finished trace is sent as its own HTTP request, which is + * unsuitable for high-traffic production environments. The config values + * `otelBatchTimeout`, `otelMaxExportBatchSize`, and `otelMaxQueueSize` + * (OTEL_BSP_*) are already defined and should drive that implementation. + * * @class OtlpHttpTraceExporter * @augments OtlpHttpExporterBase */ @@ -25,12 +32,13 @@ class OtlpHttpTraceExporter extends OtlpHttpExporterBase { * Creates a new OtlpHttpTraceExporter instance. * * @param {string} url - OTLP endpoint URL - * @param {string} headers - Additional HTTP headers as comma-separated key=value string + * @param {Record|undefined} headers - Additional HTTP headers parsed from the + * corresponding `OTEL_EXPORTER_OTLP_*_HEADERS` env by the MAP parser. * @param {number} timeout - Request timeout in milliseconds * @param {import('@opentelemetry/api').Attributes} resourceAttributes - Resource attributes */ constructor (url, headers, timeout, resourceAttributes) { - super(url, headers, timeout, 'http/json', '/v1/traces', 'traces') + super(url, headers, timeout, 'http/json', 'traces') this.#transformer = new OtlpTraceTransformer(resourceAttributes) } diff --git a/packages/dd-trace/src/opentelemetry/trace/otlp_transformer.js b/packages/dd-trace/src/opentelemetry/trace/otlp_transformer.js index 11d4710ca9e..950e6ac4b04 100644 --- a/packages/dd-trace/src/opentelemetry/trace/otlp_transformer.js +++ b/packages/dd-trace/src/opentelemetry/trace/otlp_transformer.js @@ -3,6 +3,7 @@ const OtlpTransformerBase = require('../otlp/otlp_transformer_base') const { getProtobufTypes } = require('../otlp/protobuf_loader') const { VERSION } = require('../../../../../version') +const id = require('../../id') const { protoSpanKind } = getProtobufTypes() const SPAN_KIND_UNSPECIFIED = protoSpanKind.values.SPAN_KIND_UNSPECIFIED @@ -12,11 +13,28 @@ const SPAN_KIND_CLIENT = protoSpanKind.values.SPAN_KIND_CLIENT const SPAN_KIND_PRODUCER = protoSpanKind.values.SPAN_KIND_PRODUCER const SPAN_KIND_CONSUMER = protoSpanKind.values.SPAN_KIND_CONSUMER +// Cached zero Identifier used to detect zero IDs without re-allocating per span. +const ZERO_ID = id('0') + /** + * @typedef {import('../../id').Identifier} Identifier + * + * @typedef {object} DDSpanLink + * @property {string} trace_id - Hex-encoded trace ID + * @property {string} span_id - Hex-encoded span ID + * @property {Record} [attributes] - Link attributes + * @property {number} [flags] - Trace flags + * @property {string} [tracestate] - W3C trace state + * + * @typedef {object} DDSpanEvent + * @property {string} name - Event name + * @property {number} time_unix_nano - Event time in nanoseconds since epoch + * @property {Record} [attributes] - Event attributes + * * @typedef {object} DDFormattedSpan - * @property {import('../../id')} trace_id - DD Identifier for trace ID - * @property {import('../../id')} span_id - DD Identifier for span ID - * @property {import('../../id')} parent_id - DD Identifier for parent span ID + * @property {Identifier} trace_id - DD Identifier for trace ID + * @property {Identifier} span_id - DD Identifier for span ID + * @property {Identifier} parent_id - DD Identifier for parent span ID * @property {string} name - Span operation name * @property {string} resource - Resource name * @property {string} [service] - Service name @@ -24,9 +42,10 @@ const SPAN_KIND_CONSUMER = protoSpanKind.values.SPAN_KIND_CONSUMER * @property {number} error - Error flag (0 or 1) * @property {{[key: string]: string}} meta - String key-value tags * @property {{[key: string]: number}} metrics - Numeric key-value tags + * @property {{[key: string]: object}} [meta_struct] - Structured tags (JSON-serialized, bytes in protobuf) * @property {number} start - Start time in nanoseconds since epoch * @property {number} duration - Duration in nanoseconds - * @property {object[]} [span_events] - Span events + * @property {DDSpanEvent[]} [span_events] - Span events */ // Map DD span.kind string values to OTLP SpanKind numeric values @@ -51,7 +70,7 @@ const EXCLUDED_META_KEYS = new Set([ /** * OtlpTraceTransformer transforms DD-formatted spans to OTLP trace JSON format. * - * This implementation follows the OTLP Trace v1.7.0 Data Model specification: + * This implementation follows the OTLP trace data model: * https://opentelemetry.io/docs/specs/otlp/#trace-data-model * * It receives DD-formatted spans (from span_format.js) and produces @@ -119,7 +138,7 @@ class OtlpTraceTransformer extends OtlpTransformerBase { return { traceId: this.#idToBytes(span.trace_id, 16), spanId: this.#idToBytes(span.span_id, 8), - parentSpanId: (parentId && !this.#isZeroId(parentId)) ? this.#idToBytes(parentId, 8) : undefined, + parentSpanId: (parentId && !parentId.equals(ZERO_ID)) ? this.#idToBytes(parentId, 8) : undefined, name: span.resource, kind: this.#mapSpanKind(span.meta?.['span.kind']), startTimeUnixNano: span.start, @@ -178,7 +197,10 @@ class OtlpTraceTransformer extends OtlpTransformerBase { } } - // Add meta_struct as bytesValue attributes (JSON-serialized, base64-encoded per proto JSON mapping) + // TODO: meta_struct values are logically raw bytes. The OTLP http/json spec encodes the bytesValue + // field as base64, but when http/protobuf or gRPC support is added the payload should be sent as + // raw bytes directly (no JSON.stringify + base64). The backend decoding side will need to be + // updated in parallel to accept the unencoded bytes. if (span.meta_struct) { for (const [key, value] of Object.entries(span.meta_struct)) { const bytes = Buffer.from(JSON.stringify(value)) @@ -202,33 +224,40 @@ class OtlpTraceTransformer extends OtlpTransformerBase { /** * Maps DD span error state to an OTLP Status object. + * Combines error.type and error.message when both are present so error type + * information is preserved on the OTel side. * * @param {DDFormattedSpan} span - DD-formatted span * @returns {object} OTLP Status object with code and message */ #mapStatus (span) { - if (span.error === 1) { - return { - code: STATUS_CODE_ERROR, - message: span.meta?.['error.message'] || '', - } + if (span.error !== 1) { + return { code: STATUS_CODE_UNSET, message: '' } + } + const errorType = span.meta?.['error.type'] + const errorMessage = span.meta?.['error.message'] + let message = '' + if (errorType && errorMessage) { + message = `${errorType}: ${errorMessage}` + } else if (errorType) { + message = errorType + } else if (errorMessage) { + message = errorMessage } - return { code: STATUS_CODE_UNSET, message: '' } + return { code: STATUS_CODE_ERROR, message } } /** * Transforms a DD span event to an OTLP Event object. * - * @param {object} event - DD span event with name, time_unix_nano, and attributes + * @param {DDSpanEvent} event - DD span event * @returns {object} OTLP Event object */ #transformEvent (event) { return { timeUnixNano: event.time_unix_nano, name: event.name || '', - attributes: event.attributes && Object.keys(event.attributes).length > 0 - ? this.transformAttributes(event.attributes) - : [], + attributes: this.transformAttributes(event.attributes ?? {}), droppedAttributesCount: 0, } } @@ -257,7 +286,7 @@ class OtlpTraceTransformer extends OtlpTransformerBase { /** * Transforms a single DD span link to an OTLP Link object. * - * @param {object} link - DD span link with trace_id, span_id, attributes, flags, tracestate + * @param {DDSpanLink} link - DD span link * @returns {object} OTLP Link object */ #transformLink (link) { @@ -265,9 +294,7 @@ class OtlpTraceTransformer extends OtlpTransformerBase { traceId: this.#hexToBytes(link.trace_id, 16), spanId: this.#hexToBytes(link.span_id, 8), traceState: link.tracestate || '', - attributes: link.attributes && Object.keys(link.attributes).length > 0 - ? this.transformAttributes(link.attributes) - : [], + attributes: this.transformAttributes(link.attributes ?? {}), droppedAttributesCount: 0, flags: link.flags, } @@ -278,7 +305,7 @@ class OtlpTraceTransformer extends OtlpTransformerBase { * Pads with leading zeros if the identifier buffer is shorter than the target. * Per the OTLP http/json spec, trace-ids and span-ids must be hex-encoded strings. * - * @param {object} identifier - DD Identifier object with toBuffer() method + * @param {Identifier} identifier - DD Identifier * @param {number} targetLength - Target byte length (16 for trace ID, 8 for span ID) * @returns {string} Hex-encoded string of the specified length */ @@ -290,29 +317,12 @@ class OtlpTraceTransformer extends OtlpTransformerBase { if (buffer.length > targetLength) { return Buffer.from(buffer.slice(buffer.length - targetLength)).toString('hex') } - // Pad with leading zeros to reach target length + // Pad with leading zeros to reach target length. const result = Buffer.alloc(targetLength) - const offset = targetLength - buffer.length - for (let i = 0; i < buffer.length; i++) { - result[offset + i] = buffer[i] - } + Buffer.from(buffer).copy(result, targetLength - buffer.length) return result.toString('hex') } - /** - * Checks if a DD Identifier represents a zero ID (all bytes are 0). - * - * @param {object} identifier - DD Identifier object with toBuffer() method - * @returns {boolean} True if the identifier is all zeros - */ - #isZeroId (identifier) { - const buffer = identifier.toBuffer() - for (let i = 0; i < buffer.length; i++) { - if (buffer[i] !== 0) return false - } - return true - } - /** * Normalizes a hex string to the specified byte length. * Pads with leading zeros if the hex string is shorter than expected. diff --git a/packages/dd-trace/src/opentelemetry/tracer.js b/packages/dd-trace/src/opentelemetry/tracer.js index 2272894f233..4cecdce7a4b 100644 --- a/packages/dd-trace/src/opentelemetry/tracer.js +++ b/packages/dd-trace/src/opentelemetry/tracer.js @@ -81,10 +81,10 @@ class Tracer { } _convertOtelContextToDatadog (traceId, spanId, traceFlag, ts, meta = {}) { - const origin = null + let origin = null let samplingPriority = traceFlag - ts = ts?.traceparent || null + ts = ts?.traceparent if (ts) { // Use TraceState.fromString to parse the tracestate header @@ -101,19 +101,17 @@ class Tracer { // Assuming ddTraceStateData is now a Map or similar structure containing Datadog trace state data // Extract values as needed, similar to the original logic const samplingPriorityTs = ddTraceStateData.get('s') - const origin = ddTraceStateData.get('o') + origin = ddTraceStateData.get('o') ?? null // Convert Map to object for meta const otherPropagatedTags = Object.fromEntries(ddTraceStateData.entries()) // Update meta and samplingPriority based on extracted values Object.assign(meta, otherPropagatedTags) - samplingPriority = TextMapPropagator._getSamplingPriority( - traceFlag, - Number.parseInt(samplingPriorityTs, 10), - origin - ) + // Guard against an undefined/empty `s:` field that would result in NaN. + const tracestateSamplingPriority = samplingPriorityTs ? Math.trunc(samplingPriorityTs) : undefined + samplingPriority = TextMapPropagator._getSamplingPriority(traceFlag, tracestateSamplingPriority, origin) } else { - log.debug('no dd list member in tracestate from incoming request:', ts) + log.debug('No dd list member in tracestate from incoming request:', ts) } } @@ -121,8 +119,8 @@ class Tracer { traceId: id(traceId, 16), spanId: id(), tags: meta, parentId: id(spanId, 16), }) - spanContext._sampling = { priority: samplingPriority } - spanContext._trace = { origin } + spanContext._ddContext._sampling = { priority: samplingPriority } + spanContext._ddContext._trace = { ...spanContext._ddContext._trace, origin } return spanContext } diff --git a/packages/dd-trace/src/opentracing/propagation/text_map.js b/packages/dd-trace/src/opentracing/propagation/text_map.js index 50efb42c9e0..41055cdf31b 100644 --- a/packages/dd-trace/src/opentracing/propagation/text_map.js +++ b/packages/dd-trace/src/opentracing/propagation/text_map.js @@ -802,18 +802,25 @@ class TextMapPropagator { return spanContext._traceId.toString(16) } - static _getSamplingPriority (traceparentSampled, tracestateSamplingPriority, origin = null) { + /** + * @param {number} traceparentSampled + * @param {number|undefined} tracestateSamplingPriority + * @param {string|null} origin + * @returns {import('../../priority_sampler').SamplingPriority} + */ + static _getSamplingPriority (traceparentSampled, tracestateSamplingPriority, origin) { const fromRumWithoutPriority = !tracestateSamplingPriority && origin === 'rum' - let samplingPriority - if (!fromRumWithoutPriority && traceparentSampled === 0 && - (!tracestateSamplingPriority || tracestateSamplingPriority >= 0)) { - samplingPriority = 0 - } else if (!fromRumWithoutPriority && traceparentSampled === 1 && - (!tracestateSamplingPriority || tracestateSamplingPriority < 0)) { - samplingPriority = 1 - } else { - samplingPriority = tracestateSamplingPriority + let samplingPriority = + /** @type {import('../../priority_sampler').SamplingPriority} */ (tracestateSamplingPriority ?? AUTO_KEEP) + if (!fromRumWithoutPriority) { + if (traceparentSampled === 0 && + (!tracestateSamplingPriority || tracestateSamplingPriority >= 0)) { + samplingPriority = AUTO_REJECT + } else if (traceparentSampled === 1 && + (!tracestateSamplingPriority || tracestateSamplingPriority < 0)) { + samplingPriority = AUTO_KEEP + } } return samplingPriority diff --git a/packages/dd-trace/src/opentracing/span.js b/packages/dd-trace/src/opentracing/span.js index 9d1e95b9a19..af2201c071a 100644 --- a/packages/dd-trace/src/opentracing/span.js +++ b/packages/dd-trace/src/opentracing/span.js @@ -149,7 +149,7 @@ class DatadogSpan { } /** - * @returns {import('../priority_sampler').DatadogSpanContext} + * @returns {import('./span_context')} */ context () { return this._spanContext diff --git a/packages/dd-trace/src/opentracing/tracer.js b/packages/dd-trace/src/opentracing/tracer.js index a4c04bbd7e5..53ced38fd48 100644 --- a/packages/dd-trace/src/opentracing/tracer.js +++ b/packages/dd-trace/src/opentracing/tracer.js @@ -20,7 +20,7 @@ const REFERENCE_CHILD_OF = 'child_of' const REFERENCE_FOLLOWS_FROM = 'follows_from' class DatadogTracer { - constructor (config, prioritySampler, exporter) { + constructor (config, prioritySampler) { this._config = config this._service = config.service this._version = config.version @@ -29,8 +29,15 @@ class DatadogTracer { this._debug = config.debug this._prioritySampler = prioritySampler ?? new PrioritySampler(config.env, config.sampler) - if (exporter) { - this._exporter = exporter + // OTEL_TRACES_EXPORTER=otlp should not replace the Test Optimization + // exporter when the tracer is running in Test Optimization mode. Test spans + // (test_session/test_module/ test_suite/test) belong on the citestcycle + // endpoint, not on an OTLP traces endpoint — otherwise users with OTEL_* + // vars set in their environment (e.g. for a separate telemetry integration) + // silently lose all test spans. + if (config.OTEL_TRACES_EXPORTER === 'otlp' && !config.isCiVisibility) { + const { createOtlpTraceExporter } = require('../opentelemetry/trace') + this._exporter = createOtlpTraceExporter(config) } else { const Exporter = getExporter(config.experimental.exporter) this._exporter = new Exporter(config, this._prioritySampler) @@ -43,7 +50,7 @@ class DatadogTracer { this._propagators = { [formats.TEXT_MAP]: new TextMapPropagator(config), [formats.HTTP_HEADERS]: new HttpPropagator(config), - [formats.BINARY]: new BinaryPropagator(config), + [formats.BINARY]: new BinaryPropagator(), [formats.LOG]: new LogPropagator(config), [formats.TEXT_MAP_DSM]: new DSMTextMapPropagator(config), } @@ -116,7 +123,7 @@ class DatadogTracer { * Get the span context from a span or a span context. * * @param {Span|SpanContext} spanContext - * @returns {SpanContext} + * @returns {SpanContext|null} */ function getContext (spanContext) { if (spanContext instanceof Span) { diff --git a/packages/dd-trace/src/plugins/index.js b/packages/dd-trace/src/plugins/index.js index 44d84e67324..d6c2ef7df70 100644 --- a/packages/dd-trace/src/plugins/index.js +++ b/packages/dd-trace/src/plugins/index.js @@ -6,6 +6,7 @@ const plugins = { get '@aws-sdk/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') }, get '@azure/event-hubs' () { return require('../../../datadog-plugin-azure-event-hubs/src') }, get '@azure/functions' () { return require('../../../datadog-plugin-azure-functions/src') }, + get '@modelcontextprotocol/sdk' () { return require('../../../datadog-plugin-modelcontextprotocol-sdk/src') }, get 'durable-functions' () { return require('../../../datadog-plugin-azure-durable-functions/src') }, get '@azure/service-bus' () { return require('../../../datadog-plugin-azure-service-bus/src') }, get '@cucumber/cucumber' () { return require('../../../datadog-plugin-cucumber/src') }, diff --git a/packages/dd-trace/src/plugins/util/url.js b/packages/dd-trace/src/plugins/util/url.js index e3ee483e260..3eea3b7759d 100644 --- a/packages/dd-trace/src/plugins/util/url.js +++ b/packages/dd-trace/src/plugins/util/url.js @@ -31,7 +31,8 @@ function extractURL (req) { } function getProtocol (req) { - return (req.socket?.encrypted || req.connection?.encrypted) ? 'https' : 'http' + // Do not check deprecated `req.connection` property. + return req.socket?.encrypted ? 'https' : 'http' } /** diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index 7360313cf8f..66f0b2737c3 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -1,6 +1,5 @@ 'use strict' -const { getValueFromEnvSources } = require('./config/helper') const NoopProxy = require('./noop/proxy') const DatadogTracer = require('./tracer') const getConfig = require('./config') @@ -213,7 +212,7 @@ class Tracer extends NoopProxy { this._testApiManualPlugin.configure({ ...config, enabled: true }, false) } if (config.ciVisAgentlessLogSubmissionEnabled) { - if (getValueFromEnvSources('DD_API_KEY')) { + if (config.apiKey) { const LogSubmissionPlugin = require('./ci-visibility/log-submission/log-submission-plugin') const automaticLogPlugin = new LogSubmissionPlugin(this) automaticLogPlugin.configure({ ...config, enabled: true }) @@ -279,12 +278,7 @@ class Tracer extends NoopProxy { const prioritySampler = config.apmTracingEnabled === false ? require('./standalone').configure(config) : undefined - let otlpExporter - if (config.otelTracesEnabled) { - const { buildResourceAttributes, createOtlpTraceExporter } = require('./opentelemetry/trace') - otlpExporter = createOtlpTraceExporter(config, buildResourceAttributes(config)) - } - this._tracer = new DatadogTracer(config, prioritySampler, otlpExporter) + this._tracer = new DatadogTracer(config, prioritySampler) this.dataStreamsCheckpointer = this._tracer.dataStreamsCheckpointer lazyProxy(this, 'appsec', () => require('./appsec/sdk'), this._tracer, config) lazyProxy(this, 'llmobs', () => require('./llmobs/sdk'), this._tracer, this._modules.llmobs, config) diff --git a/packages/dd-trace/src/service-naming/schemas/v0/web.js b/packages/dd-trace/src/service-naming/schemas/v0/web.js index de40b22e10c..89e379772b2 100644 --- a/packages/dd-trace/src/service-naming/schemas/v0/web.js +++ b/packages/dd-trace/src/service-naming/schemas/v0/web.js @@ -35,6 +35,10 @@ const web = { serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService, serviceSource: optionServiceSource, }, + 'modelcontextprotocol-sdk': { + opName: () => 'mcp.tool.call', + serviceName: ({ pluginService, tracerService }) => pluginService || tracerService, + }, aws: { opName: () => 'aws.request', serviceName: awsServiceV0, diff --git a/packages/dd-trace/src/service-naming/schemas/v1/web.js b/packages/dd-trace/src/service-naming/schemas/v1/web.js index 645ae59117b..c6d09566481 100644 --- a/packages/dd-trace/src/service-naming/schemas/v1/web.js +++ b/packages/dd-trace/src/service-naming/schemas/v1/web.js @@ -22,6 +22,10 @@ const web = { serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService, serviceSource: optionServiceSource, }, + 'modelcontextprotocol-sdk': { + opName: () => 'mcp.tool.call', + serviceName: identityService, + }, fetch: { opName: () => 'http.client.request', serviceName: httpPluginClientService, diff --git a/packages/dd-trace/src/span_processor.js b/packages/dd-trace/src/span_processor.js index 15a6560e490..780a98b057d 100644 --- a/packages/dd-trace/src/span_processor.js +++ b/packages/dd-trace/src/span_processor.js @@ -5,7 +5,6 @@ const spanFormat = require('./span_format') const SpanSampler = require('./span_sampler') const GitMetadataTagger = require('./git_metadata_tagger') const processTags = require('./process-tags') -const { getValueFromEnvSources } = require('./config/helper') const startedSpans = new WeakSet() const finishedSpans = new WeakSet() @@ -88,7 +87,7 @@ class SpanProcessor { } _erase (trace, active) { - if (getValueFromEnvSources('DD_TRACE_EXPERIMENTAL_STATE_TRACKING') === 'true') { + if (this._config.DD_TRACE_EXPERIMENTAL_STATE_TRACKING) { const started = new Set() const startedIds = new Set() const finished = new Set() diff --git a/packages/dd-trace/src/tagger.js b/packages/dd-trace/src/tagger.js index 432962b3ae8..dce0b91a143 100644 --- a/packages/dd-trace/src/tagger.js +++ b/packages/dd-trace/src/tagger.js @@ -8,7 +8,7 @@ function addNonEmpty (carrier, key, value) { } } -function add (carrier, keyValuePairs) { +function add (carrier, keyValuePairs, valueSeparator = ':') { if (!carrier) return if (typeof keyValuePairs === 'string') { @@ -18,7 +18,7 @@ function add (carrier, keyValuePairs) { for (let i = 0; i < keyValuePairs.length; i++) { const char = keyValuePairs[i] - if (char === ':') { + if (char === valueSeparator) { if (valueStart === 0) { valueStart = i } diff --git a/packages/dd-trace/src/telemetry/send-data.js b/packages/dd-trace/src/telemetry/send-data.js index ef0d86634df..02851a79c45 100644 --- a/packages/dd-trace/src/telemetry/send-data.js +++ b/packages/dd-trace/src/telemetry/send-data.js @@ -2,8 +2,6 @@ const request = require('../exporters/common/request') const log = require('../log') -const { isTrue } = require('../util') -const { getValueFromEnvSources } = require('../config/helper') /** * @typedef {Record} TelemetryPayloadObject @@ -139,16 +137,16 @@ function sendData (config, application, host, reqType, payload = {}, cb = () => hostname, port, isCiVisibility, + DD_CIVISIBILITY_AGENTLESS_ENABLED, } = config let url = config.url - const isCiVisibilityAgentlessMode = isCiVisibility && - isTrue(getValueFromEnvSources('DD_CIVISIBILITY_AGENTLESS_ENABLED')) + const isCiVisibilityAgentlessMode = isCiVisibility && DD_CIVISIBILITY_AGENTLESS_ENABLED if (isCiVisibilityAgentlessMode) { try { - url = url || new URL(getAgentlessTelemetryEndpoint(config.site)) + url ||= new URL(getAgentlessTelemetryEndpoint(config.site)) } catch (err) { log.error('Telemetry endpoint url is invalid', err) // No point to do the request if the URL is invalid @@ -178,14 +176,14 @@ function sendData (config, application, host, reqType, payload = {}, cb = () => }) request(data, options, (error) => { - if (error && getValueFromEnvSources('DD_API_KEY') && config.site) { + if (error && config.apiKey && config.site) { if (agentTelemetry) { log.warn('Agent telemetry failed, started agentless telemetry') agentTelemetry = false } // figure out which data center to send to const backendUrl = getAgentlessTelemetryEndpoint(config.site) - const backendHeader = { ...options.headers, 'DD-API-KEY': getValueFromEnvSources('DD_API_KEY') } + const backendHeader = { ...options.headers, 'DD-API-KEY': config.apiKey } const backendOptions = { ...options, url: backendUrl, diff --git a/packages/dd-trace/src/tracer.js b/packages/dd-trace/src/tracer.js index 4e97a20638a..556d18652d6 100644 --- a/packages/dd-trace/src/tracer.js +++ b/packages/dd-trace/src/tracer.js @@ -17,8 +17,8 @@ const SERVICE_NAME = tags.SERVICE_NAME const MEASURED = tags.MEASURED class DatadogTracer extends Tracer { - constructor (config, prioritySampler, exporter) { - super(config, prioritySampler, exporter) + constructor (config, prioritySampler) { + super(config, prioritySampler) this._dataStreamsProcessor = new DataStreamsProcessor(config) this._dataStreamsManager = new DataStreamsManager(this._dataStreamsProcessor) this.dataStreamsCheckpointer = new DataStreamsCheckpointer(this) diff --git a/packages/dd-trace/test/aiguard/index.spec.js b/packages/dd-trace/test/aiguard/index.spec.js index 9a4b9197756..be0eaac4227 100644 --- a/packages/dd-trace/test/aiguard/index.spec.js +++ b/packages/dd-trace/test/aiguard/index.spec.js @@ -17,6 +17,7 @@ const telemetryMetrics = require('../../src/telemetry/metrics') const appsecNamespace = telemetryMetrics.manager.namespace('appsec') const { USER_KEEP } = require('../../../../ext/priority') const { SAMPLING_MECHANISM_AI_GUARD, DECISION_MAKER_KEY } = require('../../src/constants') +const { AI_GUARD_EVENT_TAG_KEY } = require('../../src/aiguard/tags') describe('AIGuard SDK', () => { const config = { @@ -468,6 +469,25 @@ describe('AIGuard SDK', () => { assert.strictEqual(result.reason, 'AI Guard is not enabled') }) + it('test ai_guard.event tag on root span', async () => { + mockFetch({ + body: { data: { attributes: { action: 'ALLOW', reason: 'OK', is_blocking_enabled: false } } }, + }) + await tracer.trace('root', async () => { + await aiguard.evaluate(prompt, { block: false }) + }) + await agent.assertSomeTraces(traces => { + assert.ok(traces[0].length === 2, 'Trace should contain two spans root + ai_guard') + for (const span of traces[0]) { + if (span.name === 'root') { + assert.strictEqual(span.meta[AI_GUARD_EVENT_TAG_KEY], 'true') + } else { + assert.ok(!Object.hasOwn(span.meta, AI_GUARD_EVENT_TAG_KEY)) + } + } + }) + }) + const sites = [ { site: 'datad0g.com', endpoint: 'https://app.datad0g.com/api/v2/ai-guard' }, { site: 'datadoghq.com', endpoint: 'https://app.datadoghq.com/api/v2/ai-guard' }, diff --git a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mquery.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mquery.plugin.spec.js index 058b75a76d7..db9d1aa22b8 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mquery.plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mquery.plugin.spec.js @@ -12,7 +12,9 @@ const agent = require('../../../plugins/agent') const { withVersions } = require('../../../setup/mocha') const { prepareTestServerForIastInExpress } = require('../utils') -describe('nosql injection detection with mquery', () => { +// TODO(APPSEC-62431): re-enable once duplicate NOSQL_MONGODB_INJECTION +// detection (N+1 !== N) is fixed +describe.skip('nosql injection detection with mquery', () => { // https://github.com/fiznool/express-mongo-sanitize/issues/200 withVersions('mquery', 'express', '>4.18.0 <5.0.0', expressVersion => { withVersions('mquery', 'mongodb', mongodbVersion => { diff --git a/packages/dd-trace/test/ci-visibility/exporters/agentless/di-logs-writer.spec.js b/packages/dd-trace/test/ci-visibility/exporters/agentless/di-logs-writer.spec.js index 44f60de43d7..a63d8406360 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/agentless/di-logs-writer.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/agentless/di-logs-writer.spec.js @@ -4,21 +4,25 @@ const assert = require('node:assert/strict') const { describe, it, beforeEach, afterEach } = require('mocha') const context = describe +const proxyquire = require('proxyquire') const sinon = require('sinon') const nock = require('nock') require('../../../../../dd-trace/test/setup/core') -const DynamicInstrumentationLogsWriter = require('../../../../src/ci-visibility/exporters/agentless/di-logs-writer') const log = require('../../../../src/log') +const DynamicInstrumentationLogsWriterWithApiKey = proxyquire( + '../../../../src/ci-visibility/exporters/agentless/di-logs-writer', + { '../../../config': () => ({ apiKey: '1' }) } +) +const DynamicInstrumentationLogsWriter = require('../../../../src/ci-visibility/exporters/agentless/di-logs-writer') + describe('Test Visibility DI Writer', () => { beforeEach(() => { nock.cleanAll() - process.env.DD_API_KEY = '1' }) afterEach(() => { - delete process.env.DD_API_KEY sinon.restore() }) @@ -31,7 +35,7 @@ describe('Test Visibility DI Writer', () => { }) .reply(202) - const logsWriter = new DynamicInstrumentationLogsWriter({ url: 'http://www.example.com' }) + const logsWriter = new DynamicInstrumentationLogsWriterWithApiKey({ url: 'http://www.example.com' }) logsWriter.append({ message: 'test' }) logsWriter.append({ message: 'test2' }) @@ -49,7 +53,7 @@ describe('Test Visibility DI Writer', () => { .post('/api/v2/logs') .reply(500) - const logsWriter = new DynamicInstrumentationLogsWriter({ url: 'http://www.example.com' }) + const logsWriter = new DynamicInstrumentationLogsWriterWithApiKey({ url: 'http://www.example.com' }) logsWriter.append({ message: 'test5' }) logsWriter.append({ message: 'test6' }) @@ -64,8 +68,6 @@ describe('Test Visibility DI Writer', () => { context('agent based', () => { it('can send logs to the debugger endpoint in the agent', (done) => { - delete process.env.DD_API_KEY - const scope = nock('http://www.example.com') .post('/debugger/v1/input', body => { assert.deepStrictEqual(body, [{ message: 'test3' }, { message: 'test4' }]) @@ -85,8 +87,6 @@ describe('Test Visibility DI Writer', () => { }) it('logs an error if the request fails', (done) => { - delete process.env.DD_API_KEY - const logErrorSpy = sinon.spy(log, 'error') const scope = nock('http://www.example.com') diff --git a/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js index fccd5e1f418..d2dbbeeba06 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js @@ -5,6 +5,7 @@ const cp = require('node:child_process') const { describe, it, beforeEach, afterEach, before, after } = require('mocha') const context = describe +const proxyquire = require('proxyquire').noPreserveCache() const sinon = require('sinon') const nock = require('nock') @@ -12,6 +13,22 @@ require('../../../../../dd-trace/test/setup/core') const AgentlessCiVisibilityExporter = require('../../../../src/ci-visibility/exporters/agentless') const DynamicInstrumentationLogsWriter = require('../../../../src/ci-visibility/exporters/agentless/di-logs-writer') +// Used by the negative "no API key" test to inject a stubbed getConfig singleton into +// the request chain. The stubbed singleton still pulls every other field from the real +// tracer Config so the rest of the exporter behaves normally. +function loadAgentlessExporterWithFakeConfig (fakeConfig) { + const realConfig = require('../../../../src/config')() + const getLibraryConfiguration = proxyquire('../../../../src/ci-visibility/requests/get-library-configuration', { + '../../config': () => ({ ...realConfig, ...fakeConfig }), + }) + const CiVisibilityExporter = proxyquire('../../../../src/ci-visibility/exporters/ci-visibility-exporter', { + '../requests/get-library-configuration': getLibraryConfiguration, + }) + return proxyquire('../../../../src/ci-visibility/exporters/agentless', { + '../ci-visibility-exporter': CiVisibilityExporter, + }) +} + describe('CI Visibility Agentless Exporter', () => { const url = new URL('http://www.example.com') @@ -143,8 +160,8 @@ describe('CI Visibility Agentless Exporter', () => { }) it('will not allow skippable request if ITR configuration fails', (done) => { - // request will fail - delete process.env.DD_API_KEY + // Stub apiKey to be missing so the request is never sent. + const AgentlessCiVisibilityExporter = loadAgentlessExporterWithFakeConfig({ apiKey: undefined }) const scope = nock('http://www.example.com') .post('/api/v2/libraries/tests/services/setting') @@ -162,10 +179,10 @@ describe('CI Visibility Agentless Exporter', () => { url, isGitUploadEnabled: true, isIntelligentTestRunnerEnabled: true, tags: {}, }) agentlessExporter.sendGitMetadata = () => { - return new Promise(resolve => { + return /** @type {Promise} */ (new Promise(resolve => { agentlessExporter._resolveGit() resolve() - }) + })) } agentlessExporter.getLibraryConfiguration({}, (err) => { @@ -176,7 +193,6 @@ describe('CI Visibility Agentless Exporter', () => { ) ) assert.strictEqual(agentlessExporter.shouldRequestSkippableSuites(), false) - process.env.DD_API_KEY = '1' done() }) }) diff --git a/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js b/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js index 10014b8dc23..cad595ac71d 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js @@ -32,16 +32,14 @@ describe('git_metadata', () => { let generatePackFilesForCommitsStub let isShallowRepositoryStub let unshallowRepositoryStub + let fakeConfig before(() => { - process.env.DD_API_KEY = 'api-key' fs.writeFileSync(temporaryPackFile, '') fs.writeFileSync(secondTemporaryPackFile, '') }) after(() => { - delete process.env.DD_API_KEY - delete process.env.DD_CIVISIBILITY_GIT_UNSHALLOW_ENABLED fs.unlinkSync(temporaryPackFile) fs.unlinkSync(secondTemporaryPackFile) }) @@ -55,6 +53,8 @@ describe('git_metadata', () => { generatePackFilesForCommitsStub = sinon.stub().returns([temporaryPackFile]) + fakeConfig = { apiKey: 'api-key', DD_CIVISIBILITY_GIT_UNSHALLOW_ENABLED: true } + gitMetadata = proxyquire('../../../../src/ci-visibility/exporters/git/git_metadata', { '../../../plugins/util/git': { getLatestCommits: getLatestCommitsStub, @@ -64,6 +64,7 @@ describe('git_metadata', () => { isShallowRepository: isShallowRepositoryStub, unshallowRepository: unshallowRepositoryStub, }, + '../../../config': () => fakeConfig, }) }) @@ -104,7 +105,7 @@ describe('git_metadata', () => { }) it('should not unshallow if the parameter to enable unshallow is false', (done) => { - process.env.DD_CIVISIBILITY_GIT_UNSHALLOW_ENABLED = false + fakeConfig.DD_CIVISIBILITY_GIT_UNSHALLOW_ENABLED = false const scope = nock('https://api.test.com') .post('/api/v2/git/repository/search_commits') .reply(200, JSON.stringify({ data: [] })) diff --git a/packages/dd-trace/test/config/index.spec.js b/packages/dd-trace/test/config/index.spec.js index c14152b8a77..ef634dac9e9 100644 --- a/packages/dd-trace/test/config/index.spec.js +++ b/packages/dd-trace/test/config/index.spec.js @@ -320,11 +320,13 @@ describe('Config', () => { assertObjectContains(config, { OTEL_EXPORTER_OTLP_ENDPOINT: 'http://collector:4318', - otelLogsUrl: 'http://collector:4318', - otelMetricsUrl: 'http://collector:4318', - otelHeaders: 'x-test=value', - otelLogsHeaders: 'x-test=value', - otelMetricsHeaders: 'x-test=value', + OTEL_EXPORTER_OTLP_TRACES_ENDPOINT: 'http://collector:4318/v1/traces', + otelLogsUrl: 'http://collector:4318/v1/logs', + otelMetricsUrl: 'http://collector:4318/v1/metrics', + OTEL_EXPORTER_OTLP_TRACES_HEADERS: { 'x-test': 'value' }, + OTEL_EXPORTER_OTLP_HEADERS: { 'x-test': 'value' }, + OTEL_EXPORTER_OTLP_LOGS_HEADERS: { 'x-test': 'value' }, + OTEL_EXPORTER_OTLP_METRICS_HEADERS: { 'x-test': 'value' }, otelProtocol: 'grpc', otelLogsProtocol: 'grpc', otelMetricsProtocol: 'grpc', @@ -351,20 +353,32 @@ describe('Config', () => { }) // TODO: update default when adding grpc support - it('should set default otelTracesUrl to localhost', () => { + it('should default OTLP endpoints to the DD agent host with the signal subpath', () => { delete process.env.OTEL_EXPORTER_OTLP_ENDPOINT delete process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + delete process.env.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + delete process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT delete process.env.DD_AGENT_HOST const config = getConfig() - assert.strictEqual(config.otelTracesUrl, 'http://localhost:4318/v1/traces') + // Host follows the DD agent (default 127.0.0.1); the signal subpath is baked into the default + // so telemetry reports the full URL users will hit. + assert.strictEqual(config.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT, 'http://127.0.0.1:4318/v1/traces') + assert.strictEqual(config.otelMetricsUrl, 'http://127.0.0.1:4318/v1/metrics') + assert.strictEqual(config.otelLogsUrl, 'http://127.0.0.1:4318/v1/logs') }) - it('should set otelTracesUrl using DD_AGENT_HOST', () => { + it('should default OTLP endpoints to the agent host when DD_AGENT_HOST is set', () => { delete process.env.OTEL_EXPORTER_OTLP_ENDPOINT delete process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + delete process.env.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + delete process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT process.env.DD_AGENT_HOST = 'myHostName' const config = getConfig() - assert.strictEqual(config.otelTracesUrl, `http://${process.env.DD_AGENT_HOST}:4318/v1/traces`) + // In the unified-agent model, OTLP lives on the same host as the DD agent (different port), + // so DD_AGENT_HOST drives the default OTLP host too. + assert.strictEqual(config.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT, 'http://myHostName:4318/v1/traces') + assert.strictEqual(config.otelMetricsUrl, 'http://myHostName:4318/v1/metrics') + assert.strictEqual(config.otelLogsUrl, 'http://myHostName:4318/v1/logs') }) it('should correctly map OTEL_TRACES_SAMPLER and OTEL_TRACES_SAMPLER_ARG', () => { @@ -437,33 +451,28 @@ describe('Config', () => { assert.strictEqual(config.sampleRate, undefined) }) - it('should enable OTLP traces export when OTEL_TRACES_EXPORTER is set to otlp', () => { + it('should keep OTEL_TRACES_EXPORTER=otlp', () => { process.env.OTEL_TRACES_EXPORTER = 'otlp' const config = getConfig() - assert.strictEqual(config.otelTracesEnabled, true) + assert.strictEqual(config.OTEL_TRACES_EXPORTER, 'otlp') }) - it('should not enable OTLP traces export when OTEL_TRACES_EXPORTER is not set', () => { + it('should default OTEL_TRACES_EXPORTER to undefined when not set (opt-in)', () => { const config = getConfig() - assert.strictEqual(config.otelTracesEnabled, false) + assert.strictEqual(config.OTEL_TRACES_EXPORTER, undefined) }) it('should disable OTLP traces export when DD_TRACE_AGENT_PROTOCOL_VERSION is set', () => { process.env.OTEL_TRACES_EXPORTER = 'otlp' process.env.DD_TRACE_AGENT_PROTOCOL_VERSION = '0.5' const config = getConfig() - assert.strictEqual(config.otelTracesEnabled, false) + assert.strictEqual(config.OTEL_TRACES_EXPORTER, 'none') }) - it('should warn and fall back to http/json when OTEL_EXPORTER_OTLP_TRACES_PROTOCOL is unsupported', () => { + it('should fall back to http/json when OTEL_EXPORTER_OTLP_TRACES_PROTOCOL is unsupported', () => { process.env.OTEL_EXPORTER_OTLP_TRACES_PROTOCOL = 'grpc' const config = getConfig() - assert.strictEqual(config.otelTracesProtocol, 'http/json') - sinon.assert.calledWith( - log.warn, - 'OTEL_EXPORTER_OTLP_TRACES_PROTOCOL=%s is not yet supported; only http/json is currently implemented', - 'grpc' - ) + assert.strictEqual(config.OTEL_EXPORTER_OTLP_TRACES_PROTOCOL, 'http/json') }) it('should not warn when OTEL_EXPORTER_OTLP_TRACES_PROTOCOL is http/json', () => { diff --git a/packages/dd-trace/test/debugger/config.spec.js b/packages/dd-trace/test/debugger/config.spec.js index ad1e9a32666..63665c35f2f 100644 --- a/packages/dd-trace/test/debugger/config.spec.js +++ b/packages/dd-trace/test/debugger/config.spec.js @@ -19,6 +19,7 @@ describe('getDebuggerConfig', function () { 'commitSHA', 'debug', 'dynamicInstrumentation', + 'env', 'hostname', 'logLevel', 'port', @@ -27,11 +28,14 @@ describe('getDebuggerConfig', function () { 'runtimeId', 'service', 'url', + 'version', + 'inputPath', ]) assertObjectContains(config, { commitSHA: tracerConfig.commitSHA, debug: tracerConfig.debug, dynamicInstrumentation: tracerConfig.dynamicInstrumentation, + env: tracerConfig.env, hostname: tracerConfig.hostname, logLevel: tracerConfig.logLevel, port: tracerConfig.port, @@ -39,6 +43,7 @@ describe('getDebuggerConfig', function () { runtimeId: tracerConfig.tags['runtime-id'], service: tracerConfig.service, url: tracerConfig.url.toString(), + version: tracerConfig.version, }) }) diff --git a/packages/dd-trace/test/debugger/devtools_client/send.spec.js b/packages/dd-trace/test/debugger/devtools_client/send.spec.js index 465ca03b3a5..a1d4256c3ad 100644 --- a/packages/dd-trace/test/debugger/devtools_client/send.spec.js +++ b/packages/dd-trace/test/debugger/devtools_client/send.spec.js @@ -8,13 +8,13 @@ const proxyquire = require('proxyquire') const sinon = require('sinon') const JSONBuffer = require('../../../src/debugger/devtools_client/json-buffer') -const { version } = require('../../../../../package.json') +const { version: debuggerVersion } = require('../../../../../package.json') const { getRequestOptions } = require('./utils') require('../../setup/mocha') -process.env.DD_ENV = 'my-env' -process.env.DD_VERSION = 'my-version' +const env = 'my-env' +const version = 'my-version' const service = 'my-service' const commitSHA = 'my-commit-sha' const repositoryUrl = 'my-repository-url' @@ -92,9 +92,9 @@ describe('input message http requests', function () { assert.strictEqual(opts.method, 'POST') assert.strictEqual(opts.path, '/debugger/v2/input?ddtags=' + - `env%3A${process.env.DD_ENV}%2C` + - `version%3A${process.env.DD_VERSION}%2C` + - `debugger_version%3A${version}%2C` + + `env%3A${env}%2C` + + `version%3A${version}%2C` + + `debugger_version%3A${debuggerVersion}%2C` + `host_name%3A${hostname}%2C` + `git.commit.sha%3A${commitSHA}%2C` + `git.repository_url%3A${repositoryUrl}`) @@ -102,6 +102,63 @@ describe('input message http requests', function () { done() }) + it('should drop tag values containing commas', function (done) { + const logStub = { + debug: sinon.stub(), + error: sinon.stub(), + warn: sinon.stub(), + '@noCallThru': true, + } + + const sendWithInvalidTag = proxyquire('../../../src/debugger/devtools_client/send', { + './config': createConfigMock({ repositoryUrl: 'my-repository-url,forged:value' }), + './json-buffer': JSONBuffer, + './log': logStub, + '../../exporters/common/request': request, + './snapshot-pruner': { pruneSnapshot: pruneSnapshotStub }, + }) + + sendWithInvalidTag(message, logger, dd, snapshot) + clock.tick(1000) + + sinon.assert.calledOnce(request) + sinon.assert.calledOnceWithExactly(logStub.warn, + '[debugger:devtools_client] Skipping invalid tag value for %s', + 'git.repository_url') + + const opts = getRequestOptions(request) + assert.strictEqual(opts.path, + '/debugger/v2/input?ddtags=' + + `env%3A${env}%2C` + + `version%3A${version}%2C` + + `debugger_version%3A${debuggerVersion}%2C` + + `host_name%3A${hostname}%2C` + + `git.commit.sha%3A${commitSHA}`) + + done() + }) + + it('should coerce non-string tag values to strings', function (done) { + const sendWithNumericTag = proxyquire('../../../src/debugger/devtools_client/send', { + './config': createConfigMock({ commitSHA: 123 }), + './json-buffer': JSONBuffer, + '../../exporters/common/request': request, + './snapshot-pruner': { pruneSnapshot: pruneSnapshotStub }, + }) + + sendWithNumericTag(message, logger, dd, snapshot) + clock.tick(1000) + + sinon.assert.calledOnce(request) + const opts = getRequestOptions(request) + assert.ok( + opts.path.includes('git.commit.sha%3A123'), + `Expected path to include git.commit.sha%3A123 but got ${opts.path}` + ) + + done() + }) + it('should use /debugger/v2/input when configured', function (done) { // Create a new send module with v2 endpoint configured const sendV2 = proxyquire('../../../src/debugger/devtools_client/send', { @@ -313,6 +370,8 @@ function getPayload (_message = message, _snapshot = snapshot) { */ function createConfigMock (overrides = {}) { return { + env, + version, service, commitSHA, repositoryUrl, diff --git a/packages/dd-trace/test/debugger/devtools_client/snapshot/utils.js b/packages/dd-trace/test/debugger/devtools_client/snapshot/utils.js index e64f980b8f5..d9cd34af1c6 100644 --- a/packages/dd-trace/test/debugger/devtools_client/snapshot/utils.js +++ b/packages/dd-trace/test/debugger/devtools_client/snapshot/utils.js @@ -115,6 +115,8 @@ function assertOnBreakpoint (done, snapshotConfig, callback) { if (typeof snapshotConfig === 'function') { callback = snapshotConfig snapshotConfig = DEFAULT_CAPTURE_LIMITS + } else { + snapshotConfig = { ...DEFAULT_CAPTURE_LIMITS, ...snapshotConfig } } session.once('Debugger.paused', ({ params }) => { diff --git a/packages/dd-trace/test/encode/0.4.spec.js b/packages/dd-trace/test/encode/0.4.spec.js index 67143242b16..bad94a8929d 100644 --- a/packages/dd-trace/test/encode/0.4.spec.js +++ b/packages/dd-trace/test/encode/0.4.spec.js @@ -28,8 +28,10 @@ describe('encode', () => { logger = { debug: sinon.stub(), } + const getConfig = () => ({ trace: { nativeSpanEvents: false } }) const { AgentEncoder } = proxyquire('../../src/encode/0.4', { '../log': logger, + '../config': getConfig, }) writer = { flush: sinon.spy() } encoder = new AgentEncoder(writer) @@ -487,10 +489,12 @@ describe('encode', () => { debug: sinon.spy(), } + const getConfig = () => ({ trace: { nativeSpanEvents: true } }) const { AgentEncoder } = proxyquire('../../src/encode/0.4', { '../log': logger, + '../config': getConfig, }) - writer = { flush: sinon.spy(), _config: { trace: { nativeSpanEvents: true } } } + writer = { flush: sinon.spy() } encoder = new AgentEncoder(writer) }) diff --git a/packages/dd-trace/test/exporters/agentless/writer.spec.js b/packages/dd-trace/test/exporters/agentless/writer.spec.js index df4e2d51624..8c8aff06299 100644 --- a/packages/dd-trace/test/exporters/agentless/writer.spec.js +++ b/packages/dd-trace/test/exporters/agentless/writer.spec.js @@ -18,7 +18,7 @@ describe('AgentlessWriter', () => { let encoderArgs let url let log - let getValueFromEnvSources + let apiKey beforeEach(() => { request = sinon.stub().yieldsAsync(null, '{}', 200) @@ -38,8 +38,6 @@ describe('AgentlessWriter', () => { error: sinon.spy(), } - getValueFromEnvSources = sinon.stub().returns('test-api-key') - const AgentlessJSONEncoder = function (...args) { encoderArgs = args return encoder @@ -47,12 +45,14 @@ describe('AgentlessWriter', () => { const requestModule = Object.assign(request, { '@global': true }) + apiKey = 'test-api-key' + Writer = proxyquire('../../../src/exporters/agentless/writer', { '../common/request': requestModule, '../../encode/agentless-json': { AgentlessJSONEncoder }, '../../../../../package.json': { version: 'tracerVersion' }, '../../log': log, - '../../config/helper': { getValueFromEnvSources }, + '../../config': () => ({ apiKey }), }) }) @@ -149,7 +149,7 @@ describe('AgentlessWriter', () => { }) it('should log error at startup when API key is missing', () => { - getValueFromEnvSources.returns(undefined) + apiKey = undefined // Error should be logged at constructor time writer = new Writer({ url }) @@ -161,7 +161,7 @@ describe('AgentlessWriter', () => { }) it('should skip sending when API key is missing', (done) => { - getValueFromEnvSources.returns(undefined) + apiKey = undefined writer = new Writer({ url }) encoder.count.returns(1) diff --git a/packages/dd-trace/test/lambda/index.spec.js b/packages/dd-trace/test/lambda/index.spec.js index 2cd8aa6aca2..95b2e0d7ec7 100644 --- a/packages/dd-trace/test/lambda/index.spec.js +++ b/packages/dd-trace/test/lambda/index.spec.js @@ -376,7 +376,7 @@ describe('lambda', () => { }, { envVar: 'DD_APM_FLUSH_DEADLINE_MILLISECONDS', - value: '-100', // will default to 0 + value: '-100', // will default to 100 }, { envVar: 'DD_APM_FLUSH_DEADLINE_MILLISECONDS', diff --git a/packages/dd-trace/test/llmobs/plugins/modelcontextprotocol-sdk/index.spec.js b/packages/dd-trace/test/llmobs/plugins/modelcontextprotocol-sdk/index.spec.js new file mode 100644 index 00000000000..6bc1c1c0bdc --- /dev/null +++ b/packages/dd-trace/test/llmobs/plugins/modelcontextprotocol-sdk/index.spec.js @@ -0,0 +1,220 @@ +'use strict' + +const assert = require('node:assert') +const { describe, it, before, after } = require('mocha') +const { withVersions } = require('../../../setup/mocha') + +const { + assertLlmObsSpanEvent, + MOCK_STRING, + useLlmObs, +} = require('../../util') + +describe('integrations', () => { + let Client + let McpServer + let InMemoryTransport + + let client + let server + + describe('modelcontextprotocol-sdk', () => { + const { getEvents } = useLlmObs({ plugin: 'modelcontextprotocol-sdk' }) + + withVersions('modelcontextprotocol-sdk', '@modelcontextprotocol/sdk', (version) => { + before(async () => { + const path = require('path') + const versionModule = require(`../../../../../../versions/@modelcontextprotocol/sdk@${version}`) + + // Require the client submodule first so RITM patches it before the server loads it transitively + Client = versionModule.get('@modelcontextprotocol/sdk/client').Client + + // The package exports map remaps package.json to dist/cjs/package.json, so navigate + // up from the resolved client entry path to find the SDK root directory + const clientEntryPath = versionModule.getPath('@modelcontextprotocol/sdk/client') + const sdkDir = path.resolve(path.dirname(clientEntryPath), '..', '..', '..') + McpServer = require(path.join(sdkDir, 'dist/cjs/server/mcp.js')).McpServer + + InMemoryTransport = versionModule.get('@modelcontextprotocol/sdk/inMemory.js').InMemoryTransport + + server = new McpServer({ name: 'test-server', version: '1.0.0' }) + + server.registerTool( + 'test-tool', + { description: 'A test tool', inputSchema: {} }, + async () => ({ + content: [{ type: 'text', text: 'Result from test-tool' }], + }) + ) + + server.registerTool( + 'error-tool', + { description: 'A tool that errors', inputSchema: {} }, + async () => { + throw new Error('Intentional test error') + } + ) + + server.registerTool( + 'multi-content-tool', + { description: 'Returns multiple content parts', inputSchema: {} }, + async () => ({ + content: [ + { type: 'text', text: 'First part' }, + { type: 'text', text: 'Second part' }, + ], + }) + ) + + const [clientTransport, serverTransport] = InMemoryTransport.createLinkedPair() + await server.connect(serverTransport) + + client = new Client({ name: 'test-client', version: '1.0.0' }) + await client.connect(clientTransport) + }) + + after(async () => { + if (client) await client.close() + if (server) await server.close() + }) + + describe('Client.callTool', () => { + it('creates a tool span for a basic tool call', async () => { + const result = await client.callTool({ name: 'test-tool', arguments: {} }) + + assert.ok(result.content) + assert.equal(result.content[0].text, 'Result from test-tool') + + const { apmSpans, llmobsSpans } = await getEvents() + + assertLlmObsSpanEvent(llmobsSpans[0], { + span: apmSpans[0], + spanKind: 'tool', + name: 'MCP Client Tool Call: test-tool', + inputValue: JSON.stringify({ name: 'test-tool', arguments: {} }), + outputValue: JSON.stringify({ + content: [{ type: 'text', text: 'Result from test-tool', annotations: {}, meta: {} }], + isError: false, + }), + tags: { + ml_app: 'test', + integration: 'modelcontextprotocol-sdk', + mcp_tool_kind: 'client', + mcp_server_name: 'test-server', + mcp_server_version: '1.0.0', + }, + }) + }) + + it('creates a tool span with arguments', async () => { + const result = await client.callTool({ + name: 'test-tool', + arguments: { query: 'hello world', limit: 10 }, + }) + + assert.ok(result.content) + + const { apmSpans, llmobsSpans } = await getEvents() + + assertLlmObsSpanEvent(llmobsSpans[0], { + span: apmSpans[0], + spanKind: 'tool', + name: 'MCP Client Tool Call: test-tool', + inputValue: JSON.stringify({ + name: 'test-tool', + arguments: { query: 'hello world', limit: 10 }, + }), + outputValue: JSON.stringify({ + content: [{ type: 'text', text: 'Result from test-tool', annotations: {}, meta: {} }], + isError: false, + }), + tags: { + ml_app: 'test', + integration: 'modelcontextprotocol-sdk', + mcp_tool_kind: 'client', + mcp_server_name: 'test-server', + mcp_server_version: '1.0.0', + }, + }) + }) + + it('creates a tool span with multi-content response', async () => { + const result = await client.callTool({ name: 'multi-content-tool', arguments: {} }) + + assert.ok(result.content) + assert.equal(result.content.length, 2) + + const { apmSpans, llmobsSpans } = await getEvents() + + assertLlmObsSpanEvent(llmobsSpans[0], { + span: apmSpans[0], + spanKind: 'tool', + name: 'MCP Client Tool Call: multi-content-tool', + inputValue: JSON.stringify({ name: 'multi-content-tool', arguments: {} }), + outputValue: JSON.stringify({ + content: [ + { type: 'text', text: 'First part', annotations: {}, meta: {} }, + { type: 'text', text: 'Second part', annotations: {}, meta: {} }, + ], + isError: false, + }), + tags: { + ml_app: 'test', + integration: 'modelcontextprotocol-sdk', + mcp_tool_kind: 'client', + mcp_server_name: 'test-server', + mcp_server_version: '1.0.0', + }, + }) + }) + + it('creates a tool span with error on failure', async () => { + // In MCP SDK 1.27+, tool errors are returned as isError:true results, not thrown exceptions + const result = await client.callTool({ name: 'error-tool', arguments: {} }) + assert.ok(result.isError, 'callTool result should have isError: true') + assert.ok(result.content?.[0]?.text?.includes('Intentional test error')) + + const { apmSpans, llmobsSpans } = await getEvents() + + assertLlmObsSpanEvent(llmobsSpans[0], { + span: apmSpans[0], + spanKind: 'tool', + name: 'MCP Client Tool Call: error-tool', + inputValue: JSON.stringify({ name: 'error-tool', arguments: {} }), + error: { + type: MOCK_STRING, + message: MOCK_STRING, + stack: MOCK_STRING, + }, + tags: { + ml_app: 'test', + integration: 'modelcontextprotocol-sdk', + mcp_tool_kind: 'client', + mcp_server_name: 'test-server', + mcp_server_version: '1.0.0', + }, + }) + }) + }) + + describe('Client.listTools', () => { + it('creates a task span for listing tools', async () => { + const result = await client.listTools() + + assert.ok(result.tools) + assert.equal(result.tools.length, 3) + + const { apmSpans, llmobsSpans } = await getEvents() + + assertLlmObsSpanEvent(llmobsSpans[0], { + span: apmSpans[0], + spanKind: 'task', + name: 'MCP Client List Tools', + outputValue: JSON.stringify(result), + tags: { ml_app: 'test', integration: 'modelcontextprotocol-sdk' }, + }) + }) + }) + }) + }) +}) diff --git a/packages/dd-trace/test/llmobs/sdk/index.spec.js b/packages/dd-trace/test/llmobs/sdk/index.spec.js index ceeec2dd691..f07147b4cfb 100644 --- a/packages/dd-trace/test/llmobs/sdk/index.spec.js +++ b/packages/dd-trace/test/llmobs/sdk/index.spec.js @@ -1222,18 +1222,29 @@ describe('sdk', () => { assert.strictEqual(LLMObsEvalMetricsWriter.prototype.append.getCall(0).args[0].categorical_value, 'foo') }) - it('defaults to the current time if no timestamp is provided', () => { - sinon.stub(Date, 'now').returns(1234) - llmobs.submitEvaluation(spanCtx, { - mlApp: 'test', - label: 'test', - metricType: 'score', - value: 0.6, + describe('with no timestamp provided', () => { + let prevTime + + before(() => { + prevTime = clock.now + clock.setSystemTime(1234) + }) + + after(() => { + clock.setSystemTime(prevTime) }) - assert.ok('timestamp_ms' in LLMObsEvalMetricsWriter.prototype.append.getCall(0).args[0]) - assert.strictEqual(LLMObsEvalMetricsWriter.prototype.append.getCall(0).args[0].timestamp_ms, 1234) - Date.now.restore() + it('defaults to the current time', () => { + llmobs.submitEvaluation(spanCtx, { + mlApp: 'test', + label: 'test', + metricType: 'score', + value: 0.6, + }) + + assert.ok('timestamp_ms' in LLMObsEvalMetricsWriter.prototype.append.getCall(0).args[0]) + assert.strictEqual(LLMObsEvalMetricsWriter.prototype.append.getCall(0).args[0].timestamp_ms, 1234) + }) }) it('submits a boolean evaluation metric', () => { @@ -1366,16 +1377,24 @@ describe('sdk', () => { }) describe('with DD_TRACE_OTEL_ENABLED set', () => { + let otelLLMObs + before(() => { + // DD_TRACE_OTEL_ENABLED is a launch-time env var captured when `Config` is built. + // Build a fresh config with the env set, then wire up a sibling LLMObs SDK that uses it. + // The outer `llmobs` is already enabled and its writers are already subscribed to the + // channels, so we only need this SDK to hold a config that reports `enabled` and has + // `DD_TRACE_OTEL_ENABLED` set - no extra enable()/disable() calls (which would trigger + // flush() on the spied writer and pollute unrelated tests). process.env.DD_TRACE_OTEL_ENABLED = 'true' - }) - - after(() => { + const config = getConfigFresh({ llmobs: { mlApp: 'mlApp', agentlessEnabled: false } }) delete process.env.DD_TRACE_OTEL_ENABLED + config.llmobs.enabled = true + otelLLMObs = new LLMObsSDK(tracer._tracer, llmobsModule, config) }) it('adds source:otel tag', () => { - llmobs.submitEvaluation(spanCtx, { + otelLLMObs.submitEvaluation(spanCtx, { mlApp: 'test', timestampMs: 1234, label: 'test', diff --git a/packages/dd-trace/test/llmobs/util.js b/packages/dd-trace/test/llmobs/util.js index 6717f06e544..6767d145088 100644 --- a/packages/dd-trace/test/llmobs/util.js +++ b/packages/dd-trace/test/llmobs/util.js @@ -258,6 +258,9 @@ function assertLlmObsSpanEvent (actual, expected) { expectedMeta.input = { documents: inputDocuments } } else if (inputValue) { expectedMeta.input = { value: inputValue } + } else { + // span_processor.js always sets meta.input = {} even when no input is tagged + expectedMeta.input = {} } const expectedSpanEvent = { diff --git a/packages/dd-trace/test/openfeature/flagging_provider.spec.js b/packages/dd-trace/test/openfeature/flagging_provider.spec.js index c628c2b16ef..a749d1942cf 100644 --- a/packages/dd-trace/test/openfeature/flagging_provider.spec.js +++ b/packages/dd-trace/test/openfeature/flagging_provider.spec.js @@ -72,7 +72,7 @@ describe('FlaggingProvider', () => { const provider = new FlaggingProvider(mockTracer, mockConfig) assert.ok(provider) - sinon.assert.calledWith(log.debug, 'FlaggingProvider created with timeout: 30000ms') + sinon.assert.calledWith(log.debug, '%s created with timeout: %dms', 'FlaggingProvider', 30000) }) }) @@ -85,7 +85,7 @@ describe('FlaggingProvider', () => { provider._setConfiguration(ufc) sinon.assert.calledOnceWithExactly(setConfigSpy, ufc) - sinon.assert.calledWith(log.debug, 'FlaggingProvider provider configuration updated') + sinon.assert.calledWith(log.debug, '%s provider configuration updated', 'FlaggingProvider') }) it('should handle null/undefined configuration gracefully', () => { diff --git a/packages/dd-trace/test/opentelemetry/logs.spec.js b/packages/dd-trace/test/opentelemetry/logs.spec.js index 33bb110c17f..3e49355ebd5 100644 --- a/packages/dd-trace/test/opentelemetry/logs.spec.js +++ b/packages/dd-trace/test/opentelemetry/logs.spec.js @@ -1,8 +1,5 @@ 'use strict' -// Increase max listeners to avoid warnings in tests -process.setMaxListeners(50) - const assert = require('assert') const os = require('os') const http = require('http') @@ -21,10 +18,26 @@ const { assertObjectContains } = require('../../../../integration-tests/helpers' describe('OpenTelemetry Logs', () => { let originalEnv - function setupTracer (enabled = true, maxExportBatchSize = '1') { + function setupLogs (enabled = true, maxExportBatchSize = '1') { process.env.DD_LOGS_OTEL_ENABLED = enabled ? 'true' : 'false' process.env.OTEL_BSP_MAX_EXPORT_BATCH_SIZE = maxExportBatchSize // Force immediate export + logs.disable() + const config = getConfigFresh() + if (config.otelLogsEnabled) { + const { initializeOpenTelemetryLogs } = + proxyquire.noPreserveCache()('../../src/opentelemetry/logs', {}) + initializeOpenTelemetryLogs(config) + } + return { config, logs, loggerProvider: logs.getLoggerProvider() } + } + + // Full tracer.init() path. Needed for tests that assert on runtime pieces only populated by the + // full pipeline (e.g. the `_dd.rc.client_id` resource attribute added by remote config). + function setupLogsFull (maxExportBatchSize = '1') { + process.env.DD_LOGS_OTEL_ENABLED = 'true' + process.env.OTEL_BSP_MAX_EXPORT_BATCH_SIZE = maxExportBatchSize + const proxy = proxyquire.noPreserveCache()('../../src/proxy', { './config': getConfigFresh, }) @@ -142,7 +155,7 @@ describe('OpenTelemetry Logs', () => { assert.strictEqual(log2.traceId.toString('hex'), '1234567890abcdef1234567890abcdef') assert.strictEqual(log2.spanId.toString('hex'), '1234567890abcdef') }) - setupTracer(true, '2') + setupLogs(true, '2') const spanContext = { traceId: '1234567890abcdef1234567890abcdef', @@ -172,7 +185,7 @@ describe('OpenTelemetry Logs', () => { assert.strictEqual(decoded.resourceLogs[0].scopeLogs[0].logRecords[0].body.stringValue, 'Protobuf format') }) - const { logs } = setupTracer() + const { logs } = setupLogs() logs.getLogger({ name: 'test' }).emit({ severityText: 'INFO', body: 'Protobuf format' }) }) @@ -182,7 +195,7 @@ describe('OpenTelemetry Logs', () => { assert.strictEqual(decoded.resourceLogs[0].scopeLogs[0].logRecords[0].body.stringValue, 'JSON format') }, 'json') - const { logs } = setupTracer() + const { logs } = setupLogs() logs.getLogger('test').emit({ severityText: 'DEBUG', body: 'JSON format' }) }) @@ -193,7 +206,7 @@ describe('OpenTelemetry Logs', () => { assert.strictEqual(decoded.resourceLogs[0].scopeLogs[0].logRecords[0].body.stringValue, 'before shutdown') }) - const { logs, loggerProvider } = setupTracer(true, '2') + const { logs, loggerProvider } = setupLogs(true, '2') const logger1 = logs.getLogger('test-logger') // Emit before shutdown - should work @@ -225,7 +238,7 @@ describe('OpenTelemetry Logs', () => { assert.strictEqual(log.body.stringValue, 'Scope test') }) - const { logs } = setupTracer() + const { logs } = setupLogs() logs.getLogger('test-logger').emit({ body: 'Scope test', instrumentationScope: { name: 'custom-scope', version: '2.0.0' }, @@ -290,7 +303,7 @@ describe('OpenTelemetry Logs', () => { assert.strictEqual(capturedHeaders['x-api-key'], 'test123') }) - setupTracer() + setupLogsFull() const spanContext = { traceId: '00000000000000000000000000000001', @@ -332,7 +345,7 @@ describe('OpenTelemetry Logs', () => { assert.strictEqual(scope2.logRecords[0].body.stringValue, 'Message from logger2') }) - setupTracer(true, '2') + setupLogs(true, '2') const spanContext = { traceId: '1234567890abcdef1234567890abcdef', @@ -365,7 +378,7 @@ describe('OpenTelemetry Logs', () => { done() }) - const { logs } = setupTracer() + const { logs } = setupLogs() const logger = logs.getLogger('test-logger') // Emit with an invalid severity number (999) @@ -409,7 +422,7 @@ describe('OpenTelemetry Logs', () => { done() }) - const { logs } = setupTracer(true, '6') + const { logs } = setupLogs(true, '6') const logger = logs.getLogger('test-logger') // Emit logs with different body types @@ -431,7 +444,7 @@ describe('OpenTelemetry Logs', () => { process.env.OTEL_BSP_MAX_EXPORT_BATCH_SIZE = '10' process.env.OTEL_BSP_SCHEDULE_DELAY = '100' // 100ms timeout - const { logs } = setupTracer() + const { logs } = setupLogs() const logger = logs.getLogger('test-logger') logger.emit({ body: 'timeout test' }) @@ -460,13 +473,13 @@ describe('OpenTelemetry Logs', () => { process.env.DD_ENV = 'production' process.env.DD_TRACE_REPORT_HOSTNAME = 'true' - const { logs } = setupTracer() + const { logs } = setupLogs() const logger = logs.getLogger('test-logger') logger.emit({ body: 'test' }) }) it('handles multiple register() calls', () => { - const { logs, loggerProvider } = setupTracer() + const { logs, loggerProvider } = setupLogs() // Calling register again should not throw loggerProvider.register() @@ -481,21 +494,21 @@ describe('OpenTelemetry Logs', () => { it('uses default protobuf protocol', () => { delete process.env.OTEL_EXPORTER_OTLP_LOGS_PROTOCOL delete process.env.OTEL_EXPORTER_OTLP_PROTOCOL - const { loggerProvider } = setupTracer() + const { loggerProvider } = setupLogs() assert(loggerProvider.processor) assert.strictEqual(loggerProvider.processor.exporter.transformer.protocol, 'http/protobuf') }) it('configures protocol from environment variable', () => { process.env.OTEL_EXPORTER_OTLP_PROTOCOL = 'http/json' - const { loggerProvider } = setupTracer() + const { loggerProvider } = setupLogs() assert.strictEqual(loggerProvider.processor.exporter.transformer.protocol, 'http/json') }) it('prioritizes logs-specific protocol over generic protocol', () => { process.env.OTEL_EXPORTER_OTLP_LOGS_PROTOCOL = 'http/json' process.env.OTEL_EXPORTER_OTLP_PROTOCOL = 'http/protobuf' - const { loggerProvider } = setupTracer() + const { loggerProvider } = setupLogs() assert.strictEqual(loggerProvider.processor.exporter.transformer.protocol, 'http/json') }) @@ -503,7 +516,7 @@ describe('OpenTelemetry Logs', () => { const logMock = mockLogWarn() process.env.OTEL_EXPORTER_OTLP_LOGS_PROTOCOL = 'grpc' - const { loggerProvider } = setupTracer() + const { loggerProvider } = setupLogs() assert.strictEqual(loggerProvider.processor.exporter.transformer.protocol, 'http/protobuf') assert.match(logMock.getMessage(), /OTLP gRPC protocol is not supported/) @@ -512,7 +525,7 @@ describe('OpenTelemetry Logs', () => { it('configures OTLP endpoint from environment variable', () => { process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT = 'http://custom:4321/v1/logs' - const { loggerProvider } = setupTracer() + const { loggerProvider } = setupLogs() assert.strictEqual(loggerProvider.processor.exporter.options.path, '/v1/logs') assert.strictEqual(loggerProvider.processor.exporter.options.hostname, 'custom') assert.strictEqual(loggerProvider.processor.exporter.options.port, '4321') @@ -521,21 +534,27 @@ describe('OpenTelemetry Logs', () => { it('prioritizes logs-specific endpoint over generic endpoint', () => { process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT = 'http://custom:4318/v1/logs' process.env.OTEL_EXPORTER_OTLP_ENDPOINT = 'http://generic:4318/v1/logs' - const { loggerProvider } = setupTracer() + const { loggerProvider } = setupLogs() assert.strictEqual(loggerProvider.processor.exporter.options.path, '/v1/logs') assert.strictEqual(loggerProvider.processor.exporter.options.hostname, 'custom') assert.strictEqual(loggerProvider.processor.exporter.options.port, '4318') }) - it('appends /v1/logs to endpoint if not provided', () => { - process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT = 'http://custom:4318' - const { loggerProvider } = setupTracer() + it('appends /v1/logs when deriving the URL from the generic OTEL_EXPORTER_OTLP_ENDPOINT', () => { + process.env.OTEL_EXPORTER_OTLP_ENDPOINT = 'http://custom:4318' + const { loggerProvider } = setupLogs() assert.strictEqual(loggerProvider.processor.exporter.options.path, '/v1/logs') }) + it('uses a signal-specific endpoint as-is without appending /v1/logs', () => { + process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT = 'http://custom:4318' + const { loggerProvider } = setupLogs() + assert.strictEqual(loggerProvider.processor.exporter.options.path, '/') + }) + it('configures OTLP headers from environment variable', () => { process.env.OTEL_EXPORTER_OTLP_HEADERS = 'api-key=secret,env=prod' - const { loggerProvider } = setupTracer() + const { loggerProvider } = setupLogs() const exporter = loggerProvider.processor.exporter assert.strictEqual(exporter.options.headers['api-key'], 'secret') assert.strictEqual(exporter.options.headers.env, 'prod') @@ -544,7 +563,7 @@ describe('OpenTelemetry Logs', () => { it('prioritizes logs-specific headers over generic OTLP headers', () => { process.env.OTEL_EXPORTER_OTLP_HEADERS = 'generic=value,shared=generic' process.env.OTEL_EXPORTER_OTLP_LOGS_HEADERS = 'logs-specific=value,shared=logs' - const { loggerProvider } = setupTracer() + const { loggerProvider } = setupLogs() const exporter = loggerProvider.processor.exporter assert.strictEqual(exporter.options.headers['logs-specific'], 'value') assert.strictEqual(exporter.options.headers.shared, 'logs') @@ -553,19 +572,19 @@ describe('OpenTelemetry Logs', () => { it('configures OTLP timeout from environment variable', () => { process.env.OTEL_EXPORTER_OTLP_LOGS_TIMEOUT = '1000' - const { loggerProvider } = setupTracer() + const { loggerProvider } = setupLogs() assert.strictEqual(loggerProvider.processor.exporter.options.timeout, 1000) }) it('prioritizes logs-specific timeout over generic timeout', () => { process.env.OTEL_EXPORTER_OTLP_LOGS_TIMEOUT = '1000' process.env.OTEL_EXPORTER_OTLP_TIMEOUT = '2000' - const { loggerProvider } = setupTracer() + const { loggerProvider } = setupLogs() assert.strictEqual(loggerProvider.processor.exporter.options.timeout, 1000) }) it('does not initialize when OTEL logs are disabled', () => { - const { loggerProvider } = setupTracer(false) + const { loggerProvider } = setupLogs(false) const { LoggerProvider } = require('../../src/opentelemetry/logs') // Should return no-op provider when disabled, not our custom LoggerProvider @@ -573,25 +592,25 @@ describe('OpenTelemetry Logs', () => { }) it('disables log injection when OTEL logs are enabled', () => { - const { tracer, loggerProvider } = setupTracer() + const { config, loggerProvider } = setupLogs() assert(loggerProvider) - assert.strictEqual(tracer._tracer._config.logInjection, false) + assert.strictEqual(config.logInjection, false) }) it('disables log injection even when DD_LOGS_INJECTION is explicitly set to true', () => { // OTEL logs and DD log injection are mutually exclusive process.env.DD_LOGS_INJECTION = 'true' - const { tracer, loggerProvider } = setupTracer() + const { config, loggerProvider } = setupLogs() assert(loggerProvider) - assert.strictEqual(tracer._tracer._config.logInjection, false) + assert.strictEqual(config.logInjection, false) }) }) describe('Telemetry Metrics', () => { it('tracks telemetry metrics for exported logs', () => { - setupTracer() + setupLogs() const telemetryMetrics = { manager: { namespace: sinon.stub().returns({ count: sinon.stub().returns({ inc: sinon.spy() }) }) }, } diff --git a/packages/dd-trace/test/opentelemetry/metrics.spec.js b/packages/dd-trace/test/opentelemetry/metrics.spec.js index 9422dd13fc7..70b9ba4ea76 100644 --- a/packages/dd-trace/test/opentelemetry/metrics.spec.js +++ b/packages/dd-trace/test/opentelemetry/metrics.spec.js @@ -1,7 +1,5 @@ 'use strict' -process.setMaxListeners(50) - const assert = require('assert') const http = require('http') const { format } = require('util') @@ -20,7 +18,7 @@ describe('OpenTelemetry Meter Provider', () => { let originalEnv let httpStub - function setupTracer (envOverrides, setDefaultEnv = true) { + function setupMetrics (envOverrides, setDefaultEnv = true) { if (setDefaultEnv) { process.env.DD_METRICS_OTEL_ENABLED = 'true' process.env.DD_SERVICE = 'test-service' @@ -39,21 +37,14 @@ describe('OpenTelemetry Meter Provider', () => { } } - const dogstatsd = proxyquire.noPreserveCache()('../../src/dogstatsd', {}) - - const proxy = proxyquire.noPreserveCache()('../../src/proxy', { - './config': getConfigFresh, - './dogstatsd': dogstatsd, - }) - const TracerProxy = proxyquire.noPreserveCache()('../../src', { - './proxy': proxy, - }) - const tracer = proxyquire.noPreserveCache()('../../', { - './src': TracerProxy, - }) - tracer._initialized = false - tracer.init() - return { tracer, meterProvider: metrics.getMeterProvider() } + metrics.disable() + const config = getConfigFresh() + if (config.otelMetricsEnabled) { + const { initializeOpenTelemetryMetrics } = + proxyquire.noPreserveCache()('../../src/opentelemetry/metrics', {}) + initializeOpenTelemetryMetrics(config) + } + return { config, meterProvider: metrics.getMeterProvider() } } function mockOtlpExport (validator) { @@ -139,7 +130,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(metrics[0].sum.dataPoints[0].asDouble, 10.3) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') const counter = meter.createCounter('requests') counter.add(5.1) @@ -156,7 +147,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(histogram.histogram.dataPoints[0].sum, 100) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') meter.createHistogram('duration').record(100) @@ -181,7 +172,7 @@ describe('OpenTelemetry Meter Provider', () => { } }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') const hist = meter.createHistogram('size') @@ -213,7 +204,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(gauge.gauge.dataPoints[0].asInt, 75) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') const temp = meter.createGauge('temperature') temp.record(72) @@ -230,7 +221,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(updown.sum.dataPoints[0].asInt, 7) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') const queue = meter.createUpDownCounter('queue') queue.add(10) @@ -249,7 +240,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(dp.attributes.find(a => a.key === 'type').value.stringValue, 'heap') }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') const mem = meter.createObservableGauge('memory') mem.addCallback((result) => result.observe(process.memoryUsage().heapUsed, { type: 'heap' })) @@ -265,7 +256,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(counter.sum.dataPoints[0].asInt, 42) }) - setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'CUMULATIVE' }) + setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'CUMULATIVE' }) const meter = metrics.getMeter('app') const conn = meter.createObservableCounter('connections') conn.addCallback((result) => result.observe(42)) @@ -281,7 +272,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(updown.sum.dataPoints[0].asInt, 15) }) - setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'CUMULATIVE' }) + setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'CUMULATIVE' }) const meter = metrics.getMeter('app') const tasks = meter.createObservableUpDownCounter('tasks') tasks.addCallback((result) => result.observe(15)) @@ -299,7 +290,7 @@ describe('OpenTelemetry Meter Provider', () => { assert(dataPoint.timeUnixNano > 0) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') meter.createCounter('test').add(5) @@ -319,7 +310,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(gauge.gauge.dataPoints[0].asInt, 100) }) - setupTracer({ OTEL_EXPORTER_OTLP_METRICS_PROTOCOL: 'http/json' }) + setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_PROTOCOL: 'http/json' }) const meter = metrics.getMeter('app') meter.createCounter('counter').add(5) meter.createHistogram('histogram').record(10) @@ -340,7 +331,7 @@ describe('OpenTelemetry Meter Provider', () => { assert(attrs['host.name'], 'should include host.name') }) - setupTracer({ DD_SERVICE: 'custom', DD_VERSION: '2.0.0', DD_TRACE_REPORT_HOSTNAME: 'true' }) + setupMetrics({ DD_SERVICE: 'custom', DD_VERSION: '2.0.0', DD_TRACE_REPORT_HOSTNAME: 'true' }) const meter = metrics.getMeter('app') meter.createCounter('test').add(1) @@ -359,7 +350,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(getDp('POST').asInt, 5) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') const api = meter.createCounter('api') api.add(10, { method: 'GET' }) @@ -381,7 +372,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(getDp('POST', 200).asInt, 150) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') const api = meter.createCounter('api') api.add(10, { method: 'GET', status: 200 }) @@ -412,7 +403,7 @@ describe('OpenTelemetry Meter Provider', () => { validated = true }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') meter.createCounter('test').add(5, { str: 'val', @@ -440,7 +431,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(counter.sum.dataPoints[0].asInt, 8) }) - setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'CUMULATIVE' }) + setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'CUMULATIVE' }) const meter = metrics.getMeter('app') const counter = meter.createCounter('test') counter.add(5) @@ -457,7 +448,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(counter.sum.dataPoints[0].asInt, 5) }) - setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'delta' }) + setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'delta' }) const meter = metrics.getMeter('app') meter.createCounter('test').add(5) @@ -471,7 +462,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(counter.sum.dataPoints[0].asInt, 5) }) - setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'LOWMEMORY' }) + setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'LOWMEMORY' }) const meter = metrics.getMeter('app') meter.createCounter('sync').add(5) @@ -485,7 +476,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(counter.sum.dataPoints[0].asInt, 10) }) - setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'lowmemory' }) + setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'lowmemory' }) const meter = metrics.getMeter('app') const obs = meter.createObservableCounter('obs') obs.addCallback((result) => result.observe(10)) @@ -500,7 +491,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(updown.sum.dataPoints[0].asInt, 5) }) - setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'DELTA' }) + setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'DELTA' }) const meter = metrics.getMeter('app') meter.createUpDownCounter('updown').add(5) @@ -515,7 +506,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(updown.sum.dataPoints[0].asInt, 10) }) - setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'DELTA' }) + setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'DELTA' }) const meter = metrics.getMeter('app') const obs = meter.createObservableUpDownCounter('obs.updown') obs.addCallback((result) => result.observe(10)) @@ -531,7 +522,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(histogram.histogram.dataPoints[0].sum, 30) }) - setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'DELTA' }) + setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'DELTA' }) const meter = metrics.getMeter('app') meter.createHistogram('latency').record(10) meter.createHistogram('latency').record(20) @@ -547,7 +538,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(histogram.histogram.dataPoints[0].sum, 60) }) - setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'CUMULATIVE' }) + setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: 'CUMULATIVE' }) const meter = metrics.getMeter('app') meter.createHistogram('latency').record(10) meter.createHistogram('latency').record(20) @@ -559,7 +550,7 @@ describe('OpenTelemetry Meter Provider', () => { describe('Case Insensitivity', () => { it('meter names are case-insensitive', () => { - setupTracer() + setupMetrics() const meter1 = metrics.getMeter('MyApp') const meter2 = metrics.getMeter('myapp') assert.strictEqual(meter1, meter2) @@ -572,7 +563,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(counter.sum.dataPoints[0].asInt, 6) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') const c1 = meter.createCounter('MyMetric') const c2 = meter.createCounter('mymetric') @@ -599,7 +590,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(histogram.histogram.dataPoints[0].sum, 100) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') meter.createCounter('Test').add(5) meter.createHistogram('TEST').record(100) @@ -610,7 +601,7 @@ describe('OpenTelemetry Meter Provider', () => { describe('Lifecycle', () => { it('handles shutdown gracefully', async () => { - setupTracer() + setupMetrics() const provider = metrics.getMeterProvider() await provider.reader.shutdown() await provider.reader.shutdown() // Second shutdown should be safe @@ -621,7 +612,7 @@ describe('OpenTelemetry Meter Provider', () => { assert(decoded.resourceMetrics) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') meter.createCounter('test').add(1) @@ -636,7 +627,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(gauge.gauge.dataPoints[0].asInt, 200) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') const gauge = meter.createObservableGauge('temperature') @@ -661,7 +652,7 @@ describe('OpenTelemetry Meter Provider', () => { assert.strictEqual(idAttr?.value.intValue, 23) }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app', '', { attributes: { username: 'test', id: 23 } }) meter.createCounter('num.monkies').add(1) meter.createCounter('num.baboons').add(2) @@ -675,7 +666,7 @@ describe('OpenTelemetry Meter Provider', () => { const log = require('../../src/log') const warnSpy = sinon.spy(log, 'warn') - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') meter.addBatchObservableCallback(() => {}, []) meter.removeBatchObservableCallback(() => {}, []) @@ -692,7 +683,7 @@ describe('OpenTelemetry Meter Provider', () => { describe('Protocol Configuration', () => { it('uses default protobuf protocol', () => { - const { meterProvider } = setupTracer({ + const { meterProvider } = setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_PROTOCOL: undefined, OTEL_EXPORTER_OTLP_PROTOCOL: undefined, }) @@ -701,12 +692,12 @@ describe('OpenTelemetry Meter Provider', () => { }) it('configures protocol from environment variable', () => { - const { meterProvider } = setupTracer({ OTEL_EXPORTER_OTLP_PROTOCOL: 'http/json' }) + const { meterProvider } = setupMetrics({ OTEL_EXPORTER_OTLP_PROTOCOL: 'http/json' }) assert.strictEqual(meterProvider.reader.exporter.transformer.protocol, 'http/json') }) it('prioritizes metrics-specific protocol over generic protocol', () => { - const { meterProvider } = setupTracer({ + const { meterProvider } = setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_PROTOCOL: 'http/json', OTEL_EXPORTER_OTLP_PROTOCOL: 'http/protobuf', }) @@ -716,7 +707,7 @@ describe('OpenTelemetry Meter Provider', () => { it('logs warning and falls back to protobuf when gRPC protocol is set', () => { const log = require('../../src/log') const warnSpy = sinon.spy(log, 'warn') - const { meterProvider } = setupTracer({ OTEL_EXPORTER_OTLP_METRICS_PROTOCOL: 'grpc' }) + const { meterProvider } = setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_PROTOCOL: 'grpc' }) assert.strictEqual(meterProvider.reader.exporter.transformer.protocol, 'http/protobuf') const expectedMsg = 'OTLP gRPC protocol is not supported for metrics. ' + 'Defaulting to http/protobuf. gRPC protobuf support may be added in a future release.' @@ -727,7 +718,7 @@ describe('OpenTelemetry Meter Provider', () => { describe('Endpoint Configuration', () => { it('configures OTLP endpoint from environment variable', () => { - const { meterProvider } = setupTracer({ + const { meterProvider } = setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_ENDPOINT: 'http://custom:4321/v1/metrics', }) assert.strictEqual(meterProvider.reader.exporter.options.path, '/v1/metrics') @@ -736,7 +727,7 @@ describe('OpenTelemetry Meter Provider', () => { }) it('prioritizes metrics-specific endpoint over generic endpoint', () => { - const { meterProvider } = setupTracer({ + const { meterProvider } = setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_ENDPOINT: 'http://custom:4318/v1/metrics', OTEL_EXPORTER_OTLP_ENDPOINT: 'http://generic:4318/v1/metrics', }) @@ -747,21 +738,21 @@ describe('OpenTelemetry Meter Provider', () => { it('appends /v1/metrics to endpoint if not provided', () => { process.env.OTEL_EXPORTER_OTLP_ENDPOINT = 'http://custom:4318' - const { meterProvider } = setupTracer() + const { meterProvider } = setupMetrics() assert.strictEqual(meterProvider.reader.exporter.options.path, '/v1/metrics') }) }) describe('Headers Configuration', () => { it('configures OTLP headers from environment variable', () => { - const { meterProvider } = setupTracer({ OTEL_EXPORTER_OTLP_HEADERS: 'api-key=secret,env=prod' }) + const { meterProvider } = setupMetrics({ OTEL_EXPORTER_OTLP_HEADERS: 'api-key=secret,env=prod' }) const exporter = meterProvider.reader.exporter assert.strictEqual(exporter.options.headers['api-key'], 'secret') assert.strictEqual(exporter.options.headers.env, 'prod') }) it('prioritizes metrics-specific headers over generic OTLP headers', () => { - const { meterProvider } = setupTracer({ + const { meterProvider } = setupMetrics({ OTEL_EXPORTER_OTLP_HEADERS: 'generic=value,shared=generic', OTEL_EXPORTER_OTLP_METRICS_HEADERS: 'metrics-specific=value,shared=metrics', }) @@ -774,24 +765,24 @@ describe('OpenTelemetry Meter Provider', () => { describe('Timeout Configuration', () => { it('uses default timeout when not set', () => { - const { meterProvider } = setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TIMEOUT: undefined }) + const { meterProvider } = setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TIMEOUT: undefined }) assert.strictEqual(meterProvider.reader.exporter.options.timeout, 10000) }) it('configures OTLP timeout from environment variable', () => { - const { meterProvider } = setupTracer({ OTEL_EXPORTER_OTLP_METRICS_TIMEOUT: '1000' }) + const { meterProvider } = setupMetrics({ OTEL_EXPORTER_OTLP_METRICS_TIMEOUT: '1000' }) assert.strictEqual(meterProvider.reader.exporter.options.timeout, 1000) }) it('prioritizes metrics-specific timeout over generic timeout', () => { - const { meterProvider } = setupTracer( + const { meterProvider } = setupMetrics( { OTEL_EXPORTER_OTLP_METRICS_TIMEOUT: '1000', OTEL_EXPORTER_OTLP_TIMEOUT: '2000' } ) assert.strictEqual(meterProvider.reader.exporter.options.timeout, 1000) }) it('falls back to generic timeout when metrics-specific not set', () => { - const { meterProvider } = setupTracer({ OTEL_EXPORTER_OTLP_TIMEOUT: '5000' }) + const { meterProvider } = setupMetrics({ OTEL_EXPORTER_OTLP_TIMEOUT: '5000' }) assert.strictEqual(meterProvider.reader.exporter.options.timeout, 5000) }) }) @@ -813,7 +804,7 @@ describe('OpenTelemetry Meter Provider', () => { } it('rejects zero for metrics configs with allowZero=false', () => { - setupTracer({ + setupMetrics({ OTEL_BSP_SCHEDULE_DELAY: '0', OTEL_METRIC_EXPORT_INTERVAL: '0', OTEL_BSP_MAX_QUEUE_SIZE: '0', @@ -832,7 +823,7 @@ describe('OpenTelemetry Meter Provider', () => { }) it('rejects negative values for non-negative integer configs', () => { - setupTracer({ + setupMetrics({ OTEL_EXPORTER_OTLP_TIMEOUT: '-1', OTEL_EXPORTER_OTLP_LOGS_TIMEOUT: '-1', OTEL_EXPORTER_OTLP_METRICS_TIMEOUT: '-1', @@ -853,7 +844,7 @@ describe('OpenTelemetry Meter Provider', () => { }) it('rejects values that are not numbers for integer-based configs', () => { - setupTracer({ + setupMetrics({ OTEL_EXPORTER_OTLP_TIMEOUT: 'not a number', OTEL_EXPORTER_OTLP_LOGS_TIMEOUT: 'invalid', OTEL_EXPORTER_OTLP_METRICS_TIMEOUT: 'hi sir', @@ -876,14 +867,14 @@ describe('OpenTelemetry Meter Provider', () => { describe('Initialization', () => { it('does not initialize when OTEL metrics configuration is unset', () => { - const { meterProvider } = setupTracer({ DD_METRICS_OTEL_ENABLED: undefined }) + const { meterProvider } = setupMetrics({ DD_METRICS_OTEL_ENABLED: undefined }) const { MeterProvider } = require('../../src/opentelemetry/metrics') assert.strictEqual(meterProvider instanceof MeterProvider, false) }) it('does not initialize when OTEL metrics are explicitly disabled', () => { - const { meterProvider } = setupTracer({ DD_METRICS_OTEL_ENABLED: 'false' }) + const { meterProvider } = setupMetrics({ DD_METRICS_OTEL_ENABLED: 'false' }) const { MeterProvider } = require('../../src/opentelemetry/metrics') assert.strictEqual(meterProvider instanceof MeterProvider, false) @@ -893,7 +884,7 @@ describe('OpenTelemetry Meter Provider', () => { const log = require('../../src/log') const warnSpy = sinon.spy(log, 'warn') - setupTracer() + setupMetrics() const provider = metrics.getMeterProvider() provider.reader.shutdown() @@ -928,7 +919,7 @@ describe('OpenTelemetry Meter Provider', () => { )) }) - setupTracer( + setupMetrics( { DD_METRICS_OTEL_ENABLED: 'true', OTEL_METRIC_EXPORT_INTERVAL: '100', OTEL_BSP_MAX_QUEUE_SIZE: '3' } , false ) @@ -952,7 +943,7 @@ describe('OpenTelemetry Meter Provider', () => { } }) - setupTracer( + setupMetrics( { DD_METRICS_OTEL_ENABLED: 'true', OTEL_METRIC_EXPORT_INTERVAL: '100', OTEL_BSP_MAX_QUEUE_SIZE: '3' }, false ) @@ -976,7 +967,7 @@ describe('OpenTelemetry Meter Provider', () => { assert(warnSpy.getCalls().some(call => format(...call.args).includes('Metric queue exceeded limit'))) }) - setupTracer( + setupMetrics( { DD_METRICS_OTEL_ENABLED: 'true', OTEL_METRIC_EXPORT_INTERVAL: '100', OTEL_BSP_MAX_QUEUE_SIZE: '3' }, false ) @@ -1014,7 +1005,7 @@ describe('OpenTelemetry Meter Provider', () => { })) }) - setupTracer({ DD_METRICS_OTEL_ENABLED: 'true', OTEL_METRIC_EXPORT_INTERVAL: '30000' }, false) + setupMetrics({ DD_METRICS_OTEL_ENABLED: 'true', OTEL_METRIC_EXPORT_INTERVAL: '30000' }, false) const meter = metrics.getMeterProvider().getMeter('test') const counter = meter.createCounter('counter.sync') @@ -1068,7 +1059,7 @@ describe('OpenTelemetry Meter Provider', () => { return mockReq }) - setupTracer() + setupMetrics() const meter = metrics.getMeter('app') meter.createCounter('test1').add(1) diff --git a/packages/dd-trace/test/opentelemetry/tracer.spec.js b/packages/dd-trace/test/opentelemetry/tracer.spec.js index c4bf426028a..fe8f6e5c48a 100644 --- a/packages/dd-trace/test/opentelemetry/tracer.spec.js +++ b/packages/dd-trace/test/opentelemetry/tracer.spec.js @@ -8,6 +8,7 @@ const sinon = require('sinon') const api = require('@opentelemetry/api') const { hrTime, timeInputToHrTime } = require('../../../../vendor/dist/@opentelemetry/core') +const { AUTO_KEEP, AUTO_REJECT, USER_KEEP } = require('../../../../ext/priority') const { storage } = require('../../../datadog-core') require('../setup/core') require('../../').init() @@ -214,6 +215,54 @@ describe('OTel Tracer', () => { }) }) + describe('_convertOtelContextToDatadog (traceparent/tracestate extraction)', () => { + const TRACE_ID = '0123456789abcdef0123456789abcdef' + const SPAN_ID = '0123456789abcdef' + + /** + * @param {number} traceFlag + * @param {string|null} tracestate + */ + function convert (traceFlag, tracestate) { + const otelTracer = new Tracer({}, {}, new TracerProvider()) + return otelTracer._convertOtelContextToDatadog( + TRACE_ID, + SPAN_ID, + traceFlag, + tracestate ? { traceparent: tracestate } : null + ) + } + + it('writes sampling priority onto the wrapped Datadog context', () => { + const spanContext = convert(1, 'other=bleh,dd=s:2;o:synthetics;t.dm:-4') + assert.strictEqual(spanContext._ddContext._sampling.priority, USER_KEEP) + assert.strictEqual(spanContext._ddContext._trace.origin, 'synthetics') + assert.strictEqual(spanContext.traceFlags, 1) + }) + + it('preserves the existing _trace.started/finished/tags when writing origin', () => { + const spanContext = convert(1, 'other=bleh,dd=s:1;o:foo') + assert.deepStrictEqual(spanContext._ddContext._trace.started, []) + assert.deepStrictEqual(spanContext._ddContext._trace.finished, []) + assert.deepStrictEqual(spanContext._ddContext._trace.tags, {}) + assert.strictEqual(spanContext._ddContext._trace.origin, 'foo') + }) + + it('falls back to AUTO_REJECT/AUTO_KEEP when tracestate has no s: field', () => { + const rejected = convert(0, 'other=bleh,dd=o:foo;t.dm:-4') + assert.strictEqual(rejected._ddContext._sampling.priority, AUTO_REJECT) + + const kept = convert(1, 'other=bleh,dd=o:foo;t.dm:-4') + assert.strictEqual(kept._ddContext._sampling.priority, AUTO_KEEP) + }) + + it('falls back to AUTO_KEEP for RUM traces without a priority', () => { + const spanContext = convert(1, 'other=bleh,dd=o:rum') + assert.strictEqual(spanContext._ddContext._sampling.priority, AUTO_KEEP) + assert.strictEqual(spanContext._ddContext._trace.origin, 'rum') + }) + }) + it('test otel context mixed span parenting', () => { const tracerProvider = new TracerProvider() tracerProvider.register() diff --git a/packages/dd-trace/test/opentelemetry/traces.spec.js b/packages/dd-trace/test/opentelemetry/traces.spec.js index a4f1faf8855..22b5e29826b 100644 --- a/packages/dd-trace/test/opentelemetry/traces.spec.js +++ b/packages/dd-trace/test/opentelemetry/traces.spec.js @@ -1,8 +1,5 @@ 'use strict' -// Increase max listeners to avoid warnings in tests -process.setMaxListeners(50) - const assert = require('assert') const http = require('http') @@ -13,32 +10,24 @@ const proxyquire = require('proxyquire') require('../setup/core') const { getConfigFresh } = require('../helpers/config') const id = require('../../src/id') +const OtlpHttpTraceExporter = require('../../src/opentelemetry/trace/otlp_http_trace_exporter') +const { createOtlpTraceExporter } = require('../../src/opentelemetry/trace') + +const OTEL_ENV_KEYS = [ + 'OTEL_TRACES_EXPORTER', + 'OTEL_EXPORTER_OTLP_ENDPOINT', + 'OTEL_EXPORTER_OTLP_TRACES_ENDPOINT', + 'OTEL_EXPORTER_OTLP_PROTOCOL', + 'OTEL_EXPORTER_OTLP_TRACES_PROTOCOL', + 'OTEL_EXPORTER_OTLP_HEADERS', + 'OTEL_EXPORTER_OTLP_TRACES_HEADERS', + 'OTEL_EXPORTER_OTLP_TIMEOUT', + 'OTEL_EXPORTER_OTLP_TRACES_TIMEOUT', +] describe('OpenTelemetry Traces', () => { let originalEnv - function setupTracer (enabled = true) { - if (enabled) { - process.env.OTEL_TRACES_EXPORTER = 'otlp' - } else { - delete process.env.OTEL_TRACES_EXPORTER - } - - const proxy = proxyquire.noPreserveCache()('../../src/proxy', { - './config': getConfigFresh, - }) - const TracerProxy = proxyquire.noPreserveCache()('../../src', { - './proxy': proxy, - }) - const tracer = proxyquire.noPreserveCache()('../../', { - './src': TracerProxy, - }) - tracer._initialized = false - tracer._tracingInitialized = false - tracer.init() - return tracer - } - /** * Creates a mock DD-formatted span (as produced by span_format.js). * @@ -74,7 +63,6 @@ describe('OpenTelemetry Traces', () => { let validatorCalled = false sinon.stub(http, 'request').callsFake((options, callback) => { - // Only intercept OTLP traces requests if (options.path && options.path.includes('/v1/traces')) { capturedHeaders = options.headers const mockReq = { @@ -91,8 +79,6 @@ describe('OpenTelemetry Traces', () => { callback({ statusCode: 200, on: () => {}, once: () => {}, setTimeout: () => {} }) return mockReq } - - // For other requests (remote config, DD agent, etc), return a basic mock const mockReq = { write: () => {}, end: () => {}, @@ -111,19 +97,23 @@ describe('OpenTelemetry Traces', () => { } } + /** + * Builds an OtlpHttpTraceExporter from a fresh config derived from the current + * process.env. Does NOT initialize the full tracer — this avoids leaking + * process-level listeners across tests. + * + * @param {object} [extraEnv] - Extra environment variables for this one build + * @returns {OtlpHttpTraceExporter} + */ + function buildExporter (extraEnv) { + if (extraEnv) Object.assign(process.env, extraEnv) + return createOtlpTraceExporter(getConfigFresh()) + } + beforeEach(() => { originalEnv = { ...process.env } - // Clear OTEL env vars that may be set by the host environment (e.g. Claude Code telemetry) - // to prevent test pollution. afterEach restores the original env. - delete process.env.OTEL_TRACES_EXPORTER - delete process.env.OTEL_EXPORTER_OTLP_ENDPOINT - delete process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - delete process.env.OTEL_EXPORTER_OTLP_PROTOCOL - delete process.env.OTEL_EXPORTER_OTLP_TRACES_PROTOCOL - delete process.env.OTEL_EXPORTER_OTLP_HEADERS - delete process.env.OTEL_EXPORTER_OTLP_TRACES_HEADERS - delete process.env.OTEL_EXPORTER_OTLP_TIMEOUT - delete process.env.OTEL_EXPORTER_OTLP_TRACES_TIMEOUT + // Clear OTEL env vars that may be set by the host environment to prevent test pollution. + for (const key of OTEL_ENV_KEYS) delete process.env[key] }) afterEach(() => { @@ -250,6 +240,31 @@ describe('OpenTelemetry Traces', () => { }) }) + it('combines error.type and error.message in status message', () => { + const transformer = new OtlpTraceTransformer({}) + + const span = createMockSpan({ + error: 1, + meta: { 'error.type': 'TypeError', 'error.message': 'cannot read properties' }, + }) + const decoded = decodePayload(transformer.transformSpans([span])) + assert.deepStrictEqual(decoded.resourceSpans[0].scopeSpans[0].spans[0].status, { + code: 2, + message: 'TypeError: cannot read properties', + }) + }) + + it('falls back to error.type when no error.message is present', () => { + const transformer = new OtlpTraceTransformer({}) + + const span = createMockSpan({ error: 1, meta: { 'error.type': 'TypeError' } }) + const decoded = decodePayload(transformer.transformSpans([span])) + assert.deepStrictEqual(decoded.resourceSpans[0].scopeSpans[0].spans[0].status, { + code: 2, + message: 'TypeError', + }) + }) + it('omits parentSpanId for root spans (zero parent ID)', () => { const transformer = new OtlpTraceTransformer({}) const span = createMockSpan({ parent_id: id('0') }) @@ -456,9 +471,7 @@ describe('OpenTelemetry Traces', () => { assert.strictEqual(otlpSpan.name, '/api/test') }) - const tracer = setupTracer() - const processor = tracer._tracer._processor - const exporter = processor._exporter + const exporter = buildExporter({ OTEL_TRACES_EXPORTER: 'otlp' }) const span = createMockSpan({ name: 'http.request' }) exporter.export([span]) @@ -469,69 +482,62 @@ describe('OpenTelemetry Traces', () => { assert.strictEqual(headers['Content-Type'], 'application/json') }) - const tracer = setupTracer() - const processor = tracer._tracer._processor - const exporter = processor._exporter + const exporter = buildExporter({ OTEL_TRACES_EXPORTER: 'otlp' }) exporter.export([createMockSpan()]) }) it('includes custom headers from OTEL_EXPORTER_OTLP_TRACES_HEADERS', () => { - process.env.OTEL_EXPORTER_OTLP_TRACES_HEADERS = 'x-api-key=secret123' - mockOtlpExport((decoded, headers) => { assert.strictEqual(headers['x-api-key'], 'secret123') }) - const tracer = setupTracer() - const processor = tracer._tracer._processor - const exporter = processor._exporter + const exporter = buildExporter({ + OTEL_TRACES_EXPORTER: 'otlp', + OTEL_EXPORTER_OTLP_TRACES_HEADERS: 'x-api-key=secret123', + }) exporter.export([createMockSpan()]) }) it('includes multiple comma-separated custom headers from OTEL_EXPORTER_OTLP_TRACES_HEADERS', () => { - process.env.OTEL_EXPORTER_OTLP_TRACES_HEADERS = 'x-api-key=secret123,other-config-value=value' - mockOtlpExport((decoded, headers) => { assert.strictEqual(headers['x-api-key'], 'secret123') assert.strictEqual(headers['other-config-value'], 'value') }) - const tracer = setupTracer() - const processor = tracer._tracer._processor - const exporter = processor._exporter + const exporter = buildExporter({ + OTEL_TRACES_EXPORTER: 'otlp', + OTEL_EXPORTER_OTLP_TRACES_HEADERS: 'x-api-key=secret123,other-config-value=value', + }) exporter.export([createMockSpan()]) }) it('includes custom headers from OTEL_EXPORTER_OTLP_HEADERS when traces-specific header is not set', () => { - delete process.env.OTEL_EXPORTER_OTLP_TRACES_HEADERS - process.env.OTEL_EXPORTER_OTLP_HEADERS = 'x-generic-key=generic-value' - mockOtlpExport((decoded, headers) => { assert.strictEqual(headers['x-generic-key'], 'generic-value') }) - const tracer = setupTracer() - const processor = tracer._tracer._processor - const exporter = processor._exporter + const exporter = buildExporter({ + OTEL_TRACES_EXPORTER: 'otlp', + OTEL_EXPORTER_OTLP_HEADERS: 'x-generic-key=generic-value', + }) exporter.export([createMockSpan()]) }) it('uses OTEL_EXPORTER_OTLP_TRACES_HEADERS over OTEL_EXPORTER_OTLP_HEADERS when both are set', () => { - process.env.OTEL_EXPORTER_OTLP_HEADERS = 'x-generic-key=generic-value' - process.env.OTEL_EXPORTER_OTLP_TRACES_HEADERS = 'x-traces-key=traces-value' - mockOtlpExport((decoded, headers) => { assert.strictEqual(headers['x-traces-key'], 'traces-value') assert.strictEqual(headers['x-generic-key'], undefined) }) - const tracer = setupTracer() - const processor = tracer._tracer._processor - const exporter = processor._exporter + const exporter = buildExporter({ + OTEL_TRACES_EXPORTER: 'otlp', + OTEL_EXPORTER_OTLP_HEADERS: 'x-generic-key=generic-value', + OTEL_EXPORTER_OTLP_TRACES_HEADERS: 'x-traces-key=traces-value', + }) exporter.export([createMockSpan()]) }) @@ -543,9 +549,7 @@ describe('OpenTelemetry Traces', () => { return { write: () => {}, end: () => {}, on: () => {}, once: () => {}, setTimeout: () => {} } }) - const tracer = setupTracer() - const processor = tracer._tracer._processor - const exporter = processor._exporter + const exporter = new OtlpHttpTraceExporter('http://localhost:4318/v1/traces', {}, 1000, {}) exporter.export([]) assert(!exportCalled, 'No HTTP request should be made for empty span arrays') @@ -558,8 +562,7 @@ describe('OpenTelemetry Traces', () => { return { write: () => {}, end: () => {}, on: () => {}, once: () => {}, setTimeout: () => {} } }) - const tracer = setupTracer() - const exporter = tracer._tracer._processor._exporter + const exporter = new OtlpHttpTraceExporter('http://localhost:4318/v1/traces', {}, 1000, {}) exporter.export([createMockSpan({ metrics: { _sampling_priority_v1: 0 } })]) assert(!exportCalled, 'No HTTP request should be made for rejected traces') @@ -572,22 +575,26 @@ describe('OpenTelemetry Traces', () => { return { write: () => {}, end: () => {}, on: () => {}, once: () => {}, setTimeout: () => {} } }) - const tracer = setupTracer() - const exporter = tracer._tracer._processor._exporter + const exporter = new OtlpHttpTraceExporter('http://localhost:4318/v1/traces', {}, 1000, {}) exporter.export([createMockSpan({ metrics: { _sampling_priority_v1: -1 } })]) assert(!exportCalled, 'No HTTP request should be made for user-rejected traces') }) - it('replaces the original DD Agent exporter', () => { - mockOtlpExport(() => {}) - - const tracer = setupTracer() - const processor = tracer._tracer._processor - const exporter = processor._exporter + it('DatadogTracer uses the OTLP exporter when OTEL_TRACES_EXPORTER=otlp', () => { + process.env.OTEL_TRACES_EXPORTER = 'otlp' + const DatadogTracer = proxyquire.noPreserveCache()('../../src/opentracing/tracer', {}) + const tracer = new DatadogTracer(getConfigFresh()) + assert(tracer._exporter instanceof OtlpHttpTraceExporter, + 'Exporter should be the OTLP exporter when OTEL_TRACES_EXPORTER=otlp') + }) - const OtlpHttpTraceExporter = require('../../src/opentelemetry/trace/otlp_http_trace_exporter') - assert(exporter instanceof OtlpHttpTraceExporter, 'Exporter should be the OTLP exporter, not a wrapper') + it('DatadogTracer does not use the OTLP exporter when OTEL_TRACES_EXPORTER is not otlp', () => { + delete process.env.OTEL_TRACES_EXPORTER + const DatadogTracer = proxyquire.noPreserveCache()('../../src/opentracing/tracer', {}) + const tracer = new DatadogTracer(getConfigFresh()) + assert(!(tracer._exporter instanceof OtlpHttpTraceExporter), + 'Exporter should not be the OTLP exporter when OTEL_TRACES_EXPORTER is not otlp') }) }) @@ -595,43 +602,28 @@ describe('OpenTelemetry Traces', () => { // Only http/json is currently supported. Other protocols (grpc, http/protobuf) // are not yet implemented and will be added in a future release. it('uses default http/json protocol', () => { - delete process.env.OTEL_EXPORTER_OTLP_TRACES_PROTOCOL - delete process.env.OTEL_EXPORTER_OTLP_PROTOCOL - - const tracer = setupTracer() - const config = tracer._tracer._config - assert.strictEqual(config.otelTracesProtocol, 'http/json') + const config = getConfigFresh() + assert.strictEqual(config.OTEL_EXPORTER_OTLP_TRACES_PROTOCOL, 'http/json') }) it('uses port 4318 for default OTLP HTTP endpoint', () => { - delete process.env.OTEL_EXPORTER_OTLP_TRACES_PROTOCOL - delete process.env.OTEL_EXPORTER_OTLP_PROTOCOL - const config = getConfigFresh() - assert(config.otelTracesUrl.includes(':4318'), `expected port 4318 in URL, got: ${config.otelTracesUrl}`) + const endpoint = config.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + assert(endpoint.includes(':4318'), `expected port 4318 in URL, got: ${endpoint}`) }) it('respects explicit traces-specific endpoint as-is', () => { process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = 'http://custom-collector:9999' const config = getConfigFresh() - assert.strictEqual(config.otelTracesUrl, 'http://custom-collector:9999') - }) - - it('appends /v1/traces to generic OTEL_EXPORTER_OTLP_ENDPOINT with no path', () => { - delete process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - process.env.OTEL_EXPORTER_OTLP_ENDPOINT = 'http://collector:4318' - - const config = getConfigFresh() - assert.strictEqual(config.otelTracesUrl, 'http://collector:4318/v1/traces') + assert.strictEqual(config.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT, 'http://custom-collector:9999') }) - it('appends /v1/traces to generic OTEL_EXPORTER_OTLP_ENDPOINT with a custom path', () => { - delete process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + it('appends /v1/traces to the generic OTEL_EXPORTER_OTLP_ENDPOINT base URL', () => { process.env.OTEL_EXPORTER_OTLP_ENDPOINT = 'http://collector:4318/custom' const config = getConfigFresh() - assert.strictEqual(config.otelTracesUrl, 'http://collector:4318/custom/v1/traces') + assert.strictEqual(config.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT, 'http://collector:4318/custom/v1/traces') }) it('traces-specific endpoint takes precedence over generic endpoint', () => { @@ -639,22 +631,27 @@ describe('OpenTelemetry Traces', () => { process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = 'http://traces-specific:9999' const config = getConfigFresh() - assert.strictEqual(config.otelTracesUrl, 'http://traces-specific:9999') + assert.strictEqual(config.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT, 'http://traces-specific:9999') }) - // Note: Configuration env var tests are skipped due to test setup complexity. - // The configuration mapping works correctly (verified in config/index.js), - // but the test setup doesn't properly reload config between tests. - // The implementation correctly reads OTEL_EXPORTER_OTLP_TRACES_* env vars - // with fallback to OTEL_EXPORTER_OTLP_* generic vars. + it('exporter setUrl preserves a bare URL as-is without adding a signal path', () => { + const exporter = new OtlpHttpTraceExporter('http://collector:4318', {}, 1000, {}) + assert.strictEqual(exporter.options.path, '/') + }) - it('does not initialize OTLP trace export when disabled', () => { - const tracer = setupTracer(false) - const processor = tracer._tracer._processor - const exporter = processor._exporter + it('exporter setUrl preserves an explicit signal-specific path as-is', () => { + const exporter = new OtlpHttpTraceExporter('http://collector:4318/custom', {}, 1000, {}) + assert.strictEqual(exporter.options.path, '/custom') + }) - // When disabled, the exporter should be the original (not wrapped) - assert(!exporter._originalExporter, 'Exporter should not be wrapped when OTLP traces are disabled') + it('exporter setUrl preserves a trailing-slash signal-specific path', () => { + const exporter = new OtlpHttpTraceExporter('http://collector:4318/v1/traces/', {}, 1000, {}) + assert.strictEqual(exporter.options.path, '/v1/traces/') + }) + + it('exporter setUrl keeps /v1/traces when already present', () => { + const exporter = new OtlpHttpTraceExporter('http://collector:4318/v1/traces', {}, 1000, {}) + assert.strictEqual(exporter.options.path, '/v1/traces') }) it('exports resource with service, version, env, and hostname', () => { @@ -674,14 +671,14 @@ describe('OpenTelemetry Traces', () => { { 'service.name': resourceAttrs['service.name'], 'service.version': resourceAttrs['service.version'], - 'deployment.environment': resourceAttrs['deployment.environment'], + 'deployment.environment.name': resourceAttrs['deployment.environment.name'], 'telemetry.sdk.name': resourceAttrs['telemetry.sdk.name'], 'telemetry.sdk.language': resourceAttrs['telemetry.sdk.language'], }, { 'service.name': 'my-trace-service', 'service.version': 'v2.0.0', - 'deployment.environment': 'staging', + 'deployment.environment.name': 'staging', 'telemetry.sdk.name': 'datadog', 'telemetry.sdk.language': 'nodejs', } @@ -689,9 +686,7 @@ describe('OpenTelemetry Traces', () => { assert.ok(resourceAttrs['telemetry.sdk.version'], 'telemetry.sdk.version should be set') }) - const tracer = setupTracer() - const processor = tracer._tracer._processor - const exporter = processor._exporter + const exporter = buildExporter({ OTEL_TRACES_EXPORTER: 'otlp' }) exporter.export([createMockSpan()]) }) @@ -708,10 +703,38 @@ describe('OpenTelemetry Traces', () => { }), }) - const exporter = new MockedExporter('http://localhost:4318/v1/traces', '', 1000, {}) + const exporter = new MockedExporter('http://localhost:4318/v1/traces', {}, 1000, {}) exporter.export([createMockSpan()]) assert(telemetryMetrics.manager.namespace().count().inc.calledWith(1)) }) }) + + describe('setUrl', () => { + it('retargets hostname and port and preserves an explicit custom path as-is', () => { + const exporter = new OtlpHttpTraceExporter('http://localhost:4318/v1/traces', {}, 1000, {}) + + exporter.setUrl('http://otel-collector:9999/custom/path') + + assert.strictEqual(exporter.options.hostname, 'otel-collector') + assert.strictEqual(exporter.options.port, '9999') + assert.strictEqual(exporter.options.path, '/custom/path') + }) + + it('uses a bare URL as-is without adding a signal path', () => { + const exporter = new OtlpHttpTraceExporter('http://localhost:4318/v1/traces', {}, 1000, {}) + + exporter.setUrl('http://otel-collector:9999') + + assert.strictEqual(exporter.options.path, '/') + }) + + it('keeps /v1/traces when already present and preserves the query string', () => { + const exporter = new OtlpHttpTraceExporter('http://localhost:4318/v1/traces', {}, 1000, {}) + + exporter.setUrl('http://otel-collector:9999/v1/traces?token=abc') + + assert.strictEqual(exporter.options.path, '/v1/traces?token=abc') + }) + }) }) diff --git a/packages/dd-trace/test/plugins/externals.js b/packages/dd-trace/test/plugins/externals.js index 011eab892c7..dcfdc7b2e88 100644 --- a/packages/dd-trace/test/plugins/externals.js +++ b/packages/dd-trace/test/plugins/externals.js @@ -391,6 +391,12 @@ module.exports = { versions: ['>=2'], }, ], + 'light-my-request': [ + { + name: 'fastify', + versions: ['>=3'], + }, + ], lodash: [ { name: 'lodash', @@ -413,6 +419,12 @@ module.exports = { versions: ['>=2.0.1'], }, ], + modelcontextprotocol_sdk: [ + { + name: '@modelcontextprotocol/sdk', + versions: ['>=1.27.1'], + }, + ], moleculer: [ { name: 'bluebird', diff --git a/packages/dd-trace/test/plugins/util/url.spec.js b/packages/dd-trace/test/plugins/util/url.spec.js index 510c4a809d6..fb13049cc46 100644 --- a/packages/dd-trace/test/plugins/util/url.spec.js +++ b/packages/dd-trace/test/plugins/util/url.spec.js @@ -54,7 +54,7 @@ describe('plugins/util/url', () => { assert.strictEqual(result, 'https://secure.example.com/secure/path') }) - it('should extract full URL from HTTPS request with connection.encrypted', () => { + it('should not read `connection.encrypted` (deprecated alias for `socket.encrypted`)', () => { const req = { headers: { host: 'secure.example.com', @@ -65,7 +65,7 @@ describe('plugins/util/url', () => { } const result = url.extractURL(req) - assert.strictEqual(result, 'https://secure.example.com/secure/path') + assert.strictEqual(result, 'http://secure.example.com/secure/path') }) it('should extract full URL from HTTP/2 request', () => { diff --git a/packages/dd-trace/test/plugins/versions/package.json b/packages/dd-trace/test/plugins/versions/package.json index c91739244a8..9f506b67243 100644 --- a/packages/dd-trace/test/plugins/versions/package.json +++ b/packages/dd-trace/test/plugins/versions/package.json @@ -23,6 +23,7 @@ "@aws-sdk/smithy-client": "3.374.0", "@azure/event-hubs": "6.0.2", "@azure/functions": "4.11.0", + "@modelcontextprotocol/sdk": "1.27.1", "durable-functions": "3.3.0", "@azure/service-bus": "7.9.5", "@confluentinc/kafka-javascript": "1.8.0", diff --git a/packages/dd-trace/test/profiling/config.spec.js b/packages/dd-trace/test/profiling/config.spec.js index 60361002fb6..24fc5bd2f4d 100644 --- a/packages/dd-trace/test/profiling/config.spec.js +++ b/packages/dd-trace/test/profiling/config.spec.js @@ -611,7 +611,8 @@ describe('config', () => { }) if (warning) { - assert.match(compressionWarnings.join('\n'), new RegExp(RegExp.escape(warning))) + const joined = compressionWarnings.join('\n') + assert.ok(joined.includes(warning), `Expected warning "${warning}" in:\n${joined}`) } else { assert.deepStrictEqual(compressionWarnings, []) } diff --git a/packages/dd-trace/test/setup/helpers/plugin-test-helpers/index.js b/packages/dd-trace/test/setup/helpers/plugin-test-helpers/index.js index e8f2c405c86..c82b4bd2703 100644 --- a/packages/dd-trace/test/setup/helpers/plugin-test-helpers/index.js +++ b/packages/dd-trace/test/setup/helpers/plugin-test-helpers/index.js @@ -24,6 +24,7 @@ function createIntegrationTestSuite (pluginName, packageName, options, testCallb before(async () => { meta.tracer = require('../../../../../dd-trace').init() const mod = require(`../../../../../../versions/${packageName}@${version}`) + meta.versionMod = mod meta.mod = options.subModule ? mod.get(options.subModule) : mod.get() }) diff --git a/packages/dd-trace/test/telemetry/send-data.spec.js b/packages/dd-trace/test/telemetry/send-data.spec.js index a9cc9a2c0c4..f2139d60148 100644 --- a/packages/dd-trace/test/telemetry/send-data.spec.js +++ b/packages/dd-trace/test/telemetry/send-data.spec.js @@ -158,11 +158,10 @@ describe('sendData', () => { }) it('uses the CI Visibility agentless intake when agentless mode is enabled', () => { - process.env.DD_CIVISIBILITY_AGENTLESS_ENABLED = '1' - sendDataModule.sendData( { isCiVisibility: true, + DD_CIVISIBILITY_AGENTLESS_ENABLED: true, tags: { 'runtime-id': '123' }, site: 'datadoghq.eu', }, @@ -179,6 +178,5 @@ describe('sendData', () => { }) const { url } = options assert.deepStrictEqual(url, new URL('https://instrumentation-telemetry-intake.datadoghq.eu')) - delete process.env.DD_CIVISIBILITY_AGENTLESS_ENABLED }) }) diff --git a/scripts/flakiness.mjs b/scripts/flakiness.mjs index 1ed27d1e13e..8d68c00bbbf 100644 --- a/scripts/flakiness.mjs +++ b/scripts/flakiness.mjs @@ -184,15 +184,22 @@ if (Object.keys(flaky).length === 0) { if (!reported.has(workflow)) continue markdown += `* ${workflow}\n` - slack += String.raw` ● ${workflow}\n` - for (const [job, urls] of Object.entries(jobs).sort()) { - if (urls.length < OCCURRENCES) continue + const reportedJobs = Object.entries(jobs).filter(([, urls]) => urls.length >= OCCURRENCES) + const failedJobCount = reportedJobs.length + const totalFailedRuns = reportedJobs.reduce((sum, [, urls]) => sum + urls.length, 0) + const maxJobFailures = Math.max(...reportedJobs.map(([, urls]) => urls.length)) + const workflowBadge = (failedJobCount > 3 || maxJobFailures > 3) + ? ' 🔴' + : (failedJobCount >= 2 || maxJobFailures >= 2) ? ' 🟡' : '' + + slack += String.raw` ● ${workflow} (${failedJobCount} jobs, ${totalFailedRuns} flakes)${workflowBadge}\n` + + for (const [job, urls] of reportedJobs.sort()) { // Padding is needed because Slack doesn't show single digits as links. const markdownLinks = urls.map((url, idx) => `[${String(idx + 1).padStart(2, '0')}](${url})`) const runsBadge = urls.length >= 3 ? ' 🔴' : urls.length === 2 ? ' 🟡' : '' markdown += ` * ${job} (${markdownLinks.join(', ')})${runsBadge}\n` - slack += String.raw` ○ ${job} (${urls.length})${runsBadge}\n` } } diff --git a/scripts/generate-config-types.js b/scripts/generate-config-types.js index f3654d605b4..9a52e6df2bc 100644 --- a/scripts/generate-config-types.js +++ b/scripts/generate-config-types.js @@ -15,6 +15,11 @@ const OUTPUT_PATH = path.join( '..', 'packages/dd-trace/src/config/generated-config-types.d.ts' ) +const CONFIG_INDEX_PATH = path.join( + __dirname, + '..', + 'packages/dd-trace/src/config/index.js' +) const BASE_TYPES = { array: 'string[]', @@ -54,8 +59,78 @@ function getPropertyName (canonicalName, entry) { return configurationNames?.[0] ?? canonicalName } -function withUndefined (type, entry) { - return entry.default === null ? `${type} | undefined` : type +const FALLBACK_PATTERN = + /if\s*\(\s*!\s*this\.([\w.]+)\s*\)\s*\{[\s\S]*?setAndTrack\s*\(\s*this\s*,\s*['"]([\w.]+)['"]\s*,/g + +// Expression whose tail (after any top-level `||`/`??`, or the whole expression) is a string or +// template literal — i.e. the result is guaranteed defined at runtime. +const GUARANTEED_DEFINED = /(?:^|\|\||\?\?)\s*(?:'[^']*'|"[^"]*"|`(?:\$\{[^}`]*\}|[^`])*`)\s*$/ + +// Returns the index right after the `close` that balances the `open` preceding `start`, or -1 if +// unbalanced. Skips over string and template literals so their contents don't affect depth. +function balancedEnd (s, start, open, close) { + let depth = 1 + let i = start + while (i < s.length) { + const ch = s[i] + if (ch === open) { + depth++ + i++ + } else if (ch === close) { + i++ + if (--depth === 0) return i + } else if (ch === '"' || ch === '\'' || ch === '`') { + i = skipQuoted(s, i, ch) + } else { + i++ + } + } + return -1 +} + +function skipQuoted (s, i, quote) { + const isTemplate = quote === '`' + i++ + while (i < s.length) { + if (s[i] === '\\') { i += 2; continue } + if (s[i] === quote) return i + 1 + if (isTemplate && s[i] === '$' && s[i + 1] === '{') { + i = balancedEnd(s, i + 2, '{', '}') + if (i === -1) return s.length + continue + } + i++ + } + return i +} + +function findCalculatedFallbackProperties () { + const source = readFileSync(CONFIG_INDEX_PATH, 'utf8') + const marker = /#applyCalculated\s*\(\s*\)\s*\{/.exec(source) + if (!marker) throw new Error('Could not locate #applyCalculated() in config/index.js') + + const bodyStart = marker.index + marker[0].length + const body = source.slice(bodyStart, balancedEnd(source, bodyStart, '{', '}') - 1) + + const properties = new Set() + let match + while ((match = FALLBACK_PATTERN.exec(body)) !== null) { + if (match[1] !== match[2]) continue + const valueStart = match.index + match[0].length + const valueEnd = balancedEnd(body, valueStart, '(', ')') + if (valueEnd === -1) continue + const value = body.slice(valueStart, valueEnd - 1).trim() + if (GUARANTEED_DEFINED.test(value)) properties.add(match[1]) + } + return properties +} + +const CALCULATED_FALLBACK_PROPERTIES = findCalculatedFallbackProperties() + +function withUndefined (type, entry, propertyName) { + if (entry.default !== null) return type + if (CALCULATED_FALLBACK_PROPERTIES.has(propertyName)) return type + return `${type} | undefined` } function getAllowedType (entry) { @@ -93,7 +168,7 @@ function getTypeForEntry (propertyName, entry) { throw new Error(`Unsupported configuration type for ${propertyName}: ${entry.type}`) } - return withUndefined(override, entry) + return withUndefined(override, entry, propertyName) } function addProperty (root, propertyName, type) { diff --git a/scripts/verify-exercised-tests.js b/scripts/verify-exercised-tests.js index d2949658548..c7b8937b7bd 100644 --- a/scripts/verify-exercised-tests.js +++ b/scripts/verify-exercised-tests.js @@ -587,6 +587,43 @@ function expandLocalCompositeActionRuns (repoRoot, uses, env, visiting) { return out } +/** + * Returns all combinations of matrix scalar/array values (ignores include/exclude). + * @param {Record} matrix + * @returns {Record[]} + */ +function getMatrixCombinations (matrix) { + const keys = Object.keys(matrix).filter(k => Array.isArray(matrix[k])) + if (keys.length === 0) return [{}] + + /** @type {Record[]} */ + let combinations = [{}] + for (const key of keys) { + const values = /** @type {unknown[]} */ (matrix[key]) + /** @type {Record[]} */ + const next = [] + for (const combo of combinations) { + for (const val of values) { + next.push({ ...combo, [key]: String(val) }) + } + } + combinations = next + } + return combinations +} + +/** + * Expands `${{ matrix.X }}` expressions in a string using the given matrix values. + * @param {string} s + * @param {Record} matrixValues + * @returns {string} + */ +function expandMatrixExpressions (s, matrixValues) { + return s.replaceAll(/\$\{\{\s*matrix\.([A-Za-z_][A-Za-z0-9_]*)\s*\}\}/g, (_m, name) => { + return Object.hasOwn(matrixValues, name) ? matrixValues[name] : _m + }) +} + /** * @param {string} repoRoot * @returns {{ workflowFile: string, jobId: string, run: string, env: Record }[]} @@ -608,6 +645,11 @@ function collectWorkflowRuns (repoRoot) { const jobEnv = isPlainObject(job.env) ? job.env : {} const steps = Array.isArray(job.steps) ? job.steps : [] + const matrixData = isPlainObject(job.strategy) && isPlainObject(job.strategy.matrix) + ? /** @type {Record} */ (job.strategy.matrix) + : {} + const matrixCombinations = getMatrixCombinations(matrixData) + for (const stepVal of steps) { const step = isPlainObject(stepVal) ? stepVal : {} @@ -621,20 +663,30 @@ function collectWorkflowRuns (repoRoot) { } if (typeof step.run === 'string') { - // Inline env in `run:` (export lines and prefix assignments before yarn/npm). - const exports = parseExportAssignments(step.run) - for (const [k, v] of Object.entries(exports)) env[k] = v - - const idxYarn = step.run.indexOf('yarn ') - const idxNpm = step.run.indexOf('npm ') - const idx = idxYarn === -1 ? idxNpm : (idxNpm === -1 ? idxYarn : Math.min(idxYarn, idxNpm)) - if (idx > 0) { - const prefix = step.run.slice(0, idx) - const assigns = parseInlineAssignments(prefix) - for (const [k, v] of Object.entries(assigns)) env[k] = v - } + // Expand matrix expressions and emit one entry per combination. + const seenRuns = new Set() + for (const combo of matrixCombinations) { + const run = expandMatrixExpressions(step.run, combo) + if (seenRuns.has(run)) continue + seenRuns.add(run) + + const stepEnv = { ...env } + + // Inline env in `run:` (export lines and prefix assignments before yarn/npm). + const exports = parseExportAssignments(run) + for (const [k, v] of Object.entries(exports)) stepEnv[k] = v + + const idxYarn = run.indexOf('yarn ') + const idxNpm = run.indexOf('npm ') + const idx = idxYarn === -1 ? idxNpm : (idxNpm === -1 ? idxYarn : Math.min(idxYarn, idxNpm)) + if (idx > 0) { + const prefix = run.slice(0, idx) + const assigns = parseInlineAssignments(prefix) + for (const [k, v] of Object.entries(assigns)) stepEnv[k] = v + } - out.push({ workflowFile: wf, jobId, run: step.run, env }) + out.push({ workflowFile: wf, jobId, run, env: stepEnv }) + } continue } diff --git a/yarn.lock b/yarn.lock index 8766c083e77..a1f40d27f22 100644 --- a/yarn.lock +++ b/yarn.lock @@ -17,10 +17,10 @@ dependencies: "@actions/io" "^3.0.2" -"@actions/github@^9.0.0": - version "9.0.0" - resolved "https://registry.yarnpkg.com/@actions/github/-/github-9.0.0.tgz#c86dae4128b2a6987271e2663bee9e766464840a" - integrity sha512-yJ0RoswsAaKcvkmpCE4XxBRiy/whH2SdTBHWzs0gi4wkqTDhXMChjSdqBz/F4AeiDlP28rQqL33iHb+kjAMX6w== +"@actions/github@^9.1.0": + version "9.1.0" + resolved "https://registry.yarnpkg.com/@actions/github/-/github-9.1.0.tgz#06f5b292eb207323d3f4381d30d3a12e3ba7e863" + integrity sha512-u0hDGQeCS+7VNoLA8hYG65RLdPLMaPGfka0sZ0up7P0AiShqfX6xcuXNteGkQ7X7Tod7AMNwHd4p7DS63i8zzA== dependencies: "@actions/http-client" "^3.0.2" "@octokit/core" "^7.0.6" @@ -250,25 +250,25 @@ module-details-from-path "^1.0.3" node-gyp-build "^4.5.0" -"@emnapi/core@^1.7.1": - version "1.8.1" - resolved "https://registry.yarnpkg.com/@emnapi/core/-/core-1.8.1.tgz#fd9efe721a616288345ffee17a1f26ac5dd01349" - integrity sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg== +"@emnapi/core@1.9.2": + version "1.9.2" + resolved "https://registry.yarnpkg.com/@emnapi/core/-/core-1.9.2.tgz#3870265ecffc7352d01ead62d8d83d8358a2d034" + integrity sha512-UC+ZhH3XtczQYfOlu3lNEkdW/p4dsJ1r/bP7H8+rhao3TTTMO1ATq/4DdIi23XuGoFY+Cz0JmCbdVl0hz9jZcA== dependencies: - "@emnapi/wasi-threads" "1.1.0" + "@emnapi/wasi-threads" "1.2.1" tslib "^2.4.0" -"@emnapi/runtime@^1.7.1": - version "1.8.1" - resolved "https://registry.yarnpkg.com/@emnapi/runtime/-/runtime-1.8.1.tgz#550fa7e3c0d49c5fb175a116e8cd70614f9a22a5" - integrity sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg== +"@emnapi/runtime@1.9.2": + version "1.9.2" + resolved "https://registry.yarnpkg.com/@emnapi/runtime/-/runtime-1.9.2.tgz#8b469a3db160817cadb1de9050211a9d1ea84fa2" + integrity sha512-3U4+MIWHImeyu1wnmVygh5WlgfYDtyf0k8AbLhMFxOipihf6nrWC4syIm/SwEeec0mNSafiiNnMJwbza/Is6Lw== dependencies: tslib "^2.4.0" -"@emnapi/wasi-threads@1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz#60b2102fddc9ccb78607e4a3cf8403ea69be41bf" - integrity sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ== +"@emnapi/wasi-threads@1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@emnapi/wasi-threads/-/wasi-threads-1.2.1.tgz#28fed21a1ba1ce797c44a070abc94d42f3ae8548" + integrity sha512-uTII7OYF+/Mes/MrcIOYp5yOtSMLBWSIoLPpcgwipoiKbli6k322tcoFsxoIIxPDqW01SQGAgko4EzZi2BNv2w== dependencies: tslib "^2.4.0" @@ -446,13 +446,11 @@ resolved "https://registry.yarnpkg.com/@msgpack/msgpack/-/msgpack-3.1.3.tgz#c4bff2b9539faf0882f3ee03537a7e9a4b3a7864" integrity sha512-47XIizs9XZXvuJgoaJUIE2lFoID8ugvc0jzSHP+Ptfk8nTbnR8g788wv48N03Kx0UkAv559HWRQ3yzOgzlRNUA== -"@napi-rs/wasm-runtime@^1.1.1": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@napi-rs/wasm-runtime/-/wasm-runtime-1.1.1.tgz#c3705ab549d176b8dc5172723d6156c3dc426af2" - integrity sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A== +"@napi-rs/wasm-runtime@^1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@napi-rs/wasm-runtime/-/wasm-runtime-1.1.4.tgz#a46bbfedc29751b7170c5d23bc1d8ee8c7e3c1e1" + integrity sha512-3NQNNgA1YSlJb/kMH1ildASP9HW7/7kYnRI2szWJaofaS1hWmbGI4H+d3+22aGzXXN9IJ+n+GiFVcGipJP18ow== dependencies: - "@emnapi/core" "^1.7.1" - "@emnapi/runtime" "^1.7.1" "@tybys/wasm-util" "^0.10.1" "@octokit/app@^16.1.2": @@ -694,172 +692,174 @@ resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.9.0.tgz#d03eba68273dc0f7509e2a3d5cba21eae10379fe" integrity sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg== -"@oven/bun-darwin-aarch64@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-darwin-aarch64/-/bun-darwin-aarch64-1.3.11.tgz#b283fd604e0afbd697e1d1f6ece0d69fdc74a05f" - integrity sha512-/8IzqSu4/OWGRs7Fs2ROzGVwJMFTBQkgAp6sAthkBYoN7OiM4rY/CpPVs2X9w9N1W61CHSkEdNKi8HrLZKfK3g== - -"@oven/bun-darwin-x64-baseline@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-darwin-x64-baseline/-/bun-darwin-x64-baseline-1.3.11.tgz#667ce87388a668f49607f92aa999087c51ab83b7" - integrity sha512-CYjIHWaQG7T4phfjErHr6BiXRs0K/9DqMeiohJmuYSBF+H2m56vFslOenLCguGYQL9jeiiCZBeoVCpwjxZrMgQ== - -"@oven/bun-darwin-x64@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-darwin-x64/-/bun-darwin-x64-1.3.11.tgz#843f4fa8f1969086cc53294ffaeefe0c7ee63bd2" - integrity sha512-TT7eUihnAzxM2tlZesusuC75PAOYKvUBgVU/Nm/lakZ/DpyuqhNkzUfcxSgmmK9IjVWzMmezLIGZl16XGCGJng== - -"@oven/bun-linux-aarch64-musl@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-linux-aarch64-musl/-/bun-linux-aarch64-musl-1.3.11.tgz#8776f3b7148bc73b0db463f72a41dad4f09adb32" - integrity sha512-jBwYCLG5Eb+PqtFrc3Wp2WMYlw1Id75gUcsdP+ApCOpf5oQhHxkFWCjZmcDoioDmEhMWAiM3wtwSrTlPg+sI6Q== - -"@oven/bun-linux-aarch64@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-linux-aarch64/-/bun-linux-aarch64-1.3.11.tgz#3324dfdd6054656d00a6343ba1d6f46d187ee953" - integrity sha512-8XMLyRNxHF4jfLajkWt+F8UDxsWbzysyxQVMZKUXwoeGvaxB0rVd07r3YbgDtG8U6khhRFM3oaGp+CQ0whwmdA== - -"@oven/bun-linux-x64-baseline@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-linux-x64-baseline/-/bun-linux-x64-baseline-1.3.11.tgz#3e7a943ef1d6cbdf3668904090cb7a7d54fdd3e7" - integrity sha512-KZlf1jKtf4jai8xiQv/0XRjxVVhHnw/HtUKtLdOeQpTOQ1fQFhLoz2FGGtVRd0LVa/yiRbSz9HlWIzWlmJClng== - -"@oven/bun-linux-x64-musl-baseline@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-linux-x64-musl-baseline/-/bun-linux-x64-musl-baseline-1.3.11.tgz#6e965eae97bf79476157047379f711535bfa5c00" - integrity sha512-J+qz4Al05PrNIOdj7xsWVTyx0c/gjUauG5nKV3Rrx0Q+5JO+1pPVlnfNmWbOF9pKG4f3IGad8KXJUfGMORld+Q== - -"@oven/bun-linux-x64-musl@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-linux-x64-musl/-/bun-linux-x64-musl-1.3.11.tgz#feddb578aef38005565a0ffc996401bce25fc657" - integrity sha512-ADImD4yCHNpqZu718E2chWcCaAHvua90yhmpzzV6fF4zOhwkGGbPCgUWmKyJ83uz+DXaPdYxX0ttDvtolrzx3Q== - -"@oven/bun-linux-x64@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-linux-x64/-/bun-linux-x64-1.3.11.tgz#cd3c1b33fd4de821deddb5f90c0c38cf8f11187d" - integrity sha512-z3GFCk1UBzDOOiEBHL32lVP7Edi26BhOjKb6bIc0nRyabbRiyON4++GR0zmd/H5zM5S0+UcXFgCGnD+b8avTLw== - -"@oven/bun-windows-aarch64@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-windows-aarch64/-/bun-windows-aarch64-1.3.11.tgz#f902aeaa2f02d1ca6d53093b31d40244b2a0f5f8" - integrity sha512-UOdkwScHRkGPz+n9ZJU7sTkTvqV7rD1SLCLaru1xH8WRsV7tDorPqNCzEN1msOIiPRK825nvAtEm9UsomO1GsA== - -"@oven/bun-windows-x64-baseline@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-windows-x64-baseline/-/bun-windows-x64-baseline-1.3.11.tgz#60d5ebbebb445cb403cc2cd9959d8481530441f5" - integrity sha512-cCsXK9AQ9Zf18QlVnbrFu2IKfr4sf2sfbErkF2jfCzyCO9Bnhl0KRx63zlN+Ni1xU7gcBLAssgcui5R400N2eA== - -"@oven/bun-windows-x64@1.3.11": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@oven/bun-windows-x64/-/bun-windows-x64-1.3.11.tgz#e042812756a4d6b573b490ec4e640d23c3b7b37f" - integrity sha512-E51tyWDP1l0CbjZYhiUxhDGPaY8Hf5YBREx0PHBff1LM1/q3qsJ6ZvRUa8YbbOO0Ax9QP6GHjD9vf3n6bXZ7QA== - -"@oxc-parser/binding-android-arm-eabi@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-android-arm-eabi/-/binding-android-arm-eabi-0.121.0.tgz#a9440638713cdd6541f954a006558c85cc4f9b7c" - integrity sha512-n07FQcySwOlzap424/PLMtOkbS7xOu8nsJduKL8P3COGHKgKoDYXwoAHCbChfgFpHnviehrLWIPX0lKGtbEk/A== - -"@oxc-parser/binding-android-arm64@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-android-arm64/-/binding-android-arm64-0.121.0.tgz#e58da1d3983d9292d6d47598a6808f2c92a56ef4" - integrity sha512-/Dd1xIXboYAicw+twT2utxPD7bL8qh7d3ej0qvaYIMj3/EgIrGR+tSnjCUkiCT6g6uTC0neSS4JY8LxhdSU/sA== - -"@oxc-parser/binding-darwin-arm64@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-darwin-arm64/-/binding-darwin-arm64-0.121.0.tgz#0fb029403980e2f4470ff07e8385e3f2b01c7b01" - integrity sha512-A0jNEvv7QMtCO1yk205t3DWU9sWUjQ2KNF0hSVO5W9R9r/R1BIvzG01UQAfmtC0dQm7sCrs5puixurKSfr2bRQ== - -"@oxc-parser/binding-darwin-x64@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-darwin-x64/-/binding-darwin-x64-0.121.0.tgz#037cc978673883264ded01c912dbcefbd0302ca3" - integrity sha512-SsHzipdxTKUs3I9EOAPmnIimEeJOemqRlRDOp9LIj+96wtxZejF51gNibmoGq8KoqbT1ssAI5po/E3J+vEtXGA== - -"@oxc-parser/binding-freebsd-x64@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-freebsd-x64/-/binding-freebsd-x64-0.121.0.tgz#6dc183f7dc869a2475cb7e79af58967b3c6c4b64" - integrity sha512-v1APOTkCp+RWOIDAHRoaeW/UoaHF15a60E8eUL6kUQXh+i4K7PBwq2Wi7jm8p0ymID5/m/oC1w3W31Z/+r7HQw== - -"@oxc-parser/binding-linux-arm-gnueabihf@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.121.0.tgz#a3108e318ce2e1654849c4ea7cfa87e66f22e8a3" - integrity sha512-PmqPQuqHZyFVWA4ycr0eu4VnTMmq9laOHZd+8R359w6kzuNZPvmmunmNJ8ybkm769A0nCoVp3TJ6dUz7B3FYIQ== - -"@oxc-parser/binding-linux-arm-musleabihf@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.121.0.tgz#190df3a09f183055182c2f1635c8cc7296deeffb" - integrity sha512-vF24htj+MOH+Q7y9A8NuC6pUZu8t/C2Fr/kDOi2OcNf28oogr2xadBPXAbml802E8wRAVfbta6YLDQTearz+jw== - -"@oxc-parser/binding-linux-arm64-gnu@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.121.0.tgz#9bba1a976350714f39f38a0f7fd01e2d2d090e88" - integrity sha512-wjH8cIG2Lu/3d64iZpbYr73hREMgKAfu7fqpXjgM2S16y2zhTfDIp8EQjxO8vlDtKP5Rc7waZW72lh8nZtWrpA== - -"@oxc-parser/binding-linux-arm64-musl@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.121.0.tgz#7eb768a8551a78cf5388f0e24647b6f88914fb7c" - integrity sha512-qT663J/W8yQFw3dtscbEi9LKJevr20V7uWs2MPGTnvNZ3rm8anhhE16gXGpxDOHeg9raySaSHKhd4IGa3YZvuw== - -"@oxc-parser/binding-linux-ppc64-gnu@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-0.121.0.tgz#f2fb9187417fab075d830098c4c87b48f8115365" - integrity sha512-mYNe4NhVvDBbPkAP8JaVS8lC1dsoJZWH5WCjpw5E+sjhk1R08wt3NnXYUzum7tIiWPfgQxbCMcoxgeemFASbRw== - -"@oxc-parser/binding-linux-riscv64-gnu@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.121.0.tgz#534fe6c5054573fd06773b803e43777da70b96ca" - integrity sha512-+QiFoGxhAbaI/amqX567784cDyyuZIpinBrJNxUzb+/L2aBRX67mN6Jv40pqduHf15yYByI+K5gUEygCuv0z9w== - -"@oxc-parser/binding-linux-riscv64-musl@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-0.121.0.tgz#a557ef48dce6bb6da1d0035d9fe203b21ff6fd51" - integrity sha512-9ykEgyTa5JD/Uhv2sttbKnCfl2PieUfOjyxJC/oDL2UO0qtXOtjPLl7H8Kaj5G7p3hIvFgu3YWvAxvE0sqY+hQ== - -"@oxc-parser/binding-linux-s390x-gnu@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.121.0.tgz#a4779c98e0858af55a48a391e14fec6cc80941dd" - integrity sha512-DB1EW5VHZdc1lIRjOI3bW/wV6R6y0xlfvdVrqj6kKi7Ayu2U3UqUBdq9KviVkcUGd5Oq+dROqvUEEFRXGAM7EQ== - -"@oxc-parser/binding-linux-x64-gnu@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.121.0.tgz#5c5ce9f1d4c37dd85943cd29089b87a79a3373f6" - integrity sha512-s4lfobX9p4kPTclvMiH3gcQUd88VlnkMTF6n2MTMDAyX5FPNRhhRSFZK05Ykhf8Zy5NibV4PbGR6DnK7FGNN6A== - -"@oxc-parser/binding-linux-x64-musl@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-x64-musl/-/binding-linux-x64-musl-0.121.0.tgz#7c096e2ec6c53718f38acf4a0947efcfc30db565" - integrity sha512-P9KlyTpuBuMi3NRGpJO8MicuGZfOoqZVRP1WjOecwx8yk4L/+mrCRNc5egSi0byhuReblBF2oVoDSMgV9Bj4Hw== - -"@oxc-parser/binding-openharmony-arm64@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-openharmony-arm64/-/binding-openharmony-arm64-0.121.0.tgz#cb9aaa37e238648a0313016d6f7929b352ca7403" - integrity sha512-R+4jrWOfF2OAPPhj3Eb3U5CaKNAH9/btMveMULIrcNW/hjfysFQlF8wE0GaVBr81dWz8JLgQlsxwctoL78JwXw== - -"@oxc-parser/binding-wasm32-wasi@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-wasm32-wasi/-/binding-wasm32-wasi-0.121.0.tgz#e439fec1ea805979a2f75c8167a395568f20d6d6" - integrity sha512-5TFISkPTymKvsmIlKasPVTPuWxzCcrT8pM+p77+mtQbIZDd1UC8zww4CJcRI46kolmgrEX6QpKO8AvWMVZ+ifw== - dependencies: - "@napi-rs/wasm-runtime" "^1.1.1" - -"@oxc-parser/binding-win32-arm64-msvc@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.121.0.tgz#9cbc2116b6f62387a2a5c205b08734c6f67d0104" - integrity sha512-V0pxh4mql4XTt3aiEtRNUeBAUFOw5jzZNxPABLaOKAWrVzSr9+XUaB095lY7jqMf5t8vkfh8NManGB28zanYKw== - -"@oxc-parser/binding-win32-ia32-msvc@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-0.121.0.tgz#ac0ef57500be7e1af61241e0040d49cd6be1e597" - integrity sha512-4Ob1qvYMPnlF2N9rdmKdkQFdrq16QVcQwBsO8yiPZXof0fHKFF+LmQV501XFbi7lHyrKm8rlJRfQ/M8bZZPVLw== - -"@oxc-parser/binding-win32-x64-msvc@0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-parser/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.121.0.tgz#142dadc748813ee0445a3a12003214ae28bf2c3f" - integrity sha512-BOp1KCzdboB1tPqoCPXgntgFs0jjeSyOXHzgxVFR7B/qfr3F8r4YDacHkTOUNXtDgM8YwKnkf3rE5gwALYX7NA== - -"@oxc-project/types@^0.121.0": - version "0.121.0" - resolved "https://registry.yarnpkg.com/@oxc-project/types/-/types-0.121.0.tgz#85c497d5dea608212ac041d52c8dd69a0343359c" - integrity sha512-CGtOARQb9tyv7ECgdAlFxi0Fv7lmzvmlm2rpD/RdijOO9rfk/JvB1CjT8EnoD+tjna/IYgKKw3IV7objRb+aYw== +"@oven/bun-darwin-aarch64@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-darwin-aarch64/-/bun-darwin-aarch64-1.3.12.tgz#713b8d27009286cd7853fd4127d586032d4874e0" + integrity sha512-b6CQgT28Jx7uDwMTcGo7WFqUd1+wWTdp8XyPi/4LRcL/R4deKT7cLx/Q2ZCWAiK6ZU7yexoCaIaKun6azjRLVA== + +"@oven/bun-darwin-x64-baseline@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-darwin-x64-baseline/-/bun-darwin-x64-baseline-1.3.12.tgz#c3eba66b98981693c7462a90da09b3df2697617c" + integrity sha512-9jKJNOc9ID3BxPBPR4r1Mp1Wqde89Twi5zo2LoEMLMKbqpvEM/WUGdJ0Vv7OX1QPEqVblFO6NMky5yY7rjDI2w== + +"@oven/bun-darwin-x64@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-darwin-x64/-/bun-darwin-x64-1.3.12.tgz#38121f1ce9cc8b3cb4a316949918de76ea6609eb" + integrity sha512-//6W21c+GinAMMmxD2hFrFmJH+ZlEwJYbLzAGqp0mLFTli9y74RMtDgI2n9pCupXSpU1Kr1sSylVW9yNbAG9Xg== + +"@oven/bun-linux-aarch64-musl@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-linux-aarch64-musl/-/bun-linux-aarch64-musl-1.3.12.tgz#9fac298699b9c0e8e2e86b31d52794e7633f924c" + integrity sha512-HWIwFzm5fALd9Lli0CgaKb6xOGqODYyHpUTgkn/IHHuS/f3XDCu71+GgkyvfgCYbPoBSgBOfp5TzhRehPcgxow== + +"@oven/bun-linux-aarch64@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-linux-aarch64/-/bun-linux-aarch64-1.3.12.tgz#0862d4d5cc0b04a5dec3eeca95685171e50e87f5" + integrity sha512-eTru6tk3K4Ya3SSkUqq/LbdEjwPqLlfINmIhRORrCExBdB1tQbk+WYYflaymO61fkrjnMAjmLTGqk/K37RMIGA== + +"@oven/bun-linux-x64-baseline@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-linux-x64-baseline/-/bun-linux-x64-baseline-1.3.12.tgz#aeb52705a137bfbd08875cb150668cd3a27bae6e" + integrity sha512-0y+lUiQsPvSGsyM/10KtxhVAQ20p6/D+vj01l6vo9gHpYUpyc1L9pSgaPa7SC9TuaiGASlM3Cb62bmSKW0E/3Q== + +"@oven/bun-linux-x64-musl-baseline@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-linux-x64-musl-baseline/-/bun-linux-x64-musl-baseline-1.3.12.tgz#7bcdae5356d1ec0c97b8596870a5893750dda865" + integrity sha512-jdsnuFD3H0l4AHtf1nInRHYWIMTWqok0aW8WysjzN5Isn6rBTBGK/ZWX6XjdTgDgcuVbVOYHiLUHHrvT9N6psA== + +"@oven/bun-linux-x64-musl@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-linux-x64-musl/-/bun-linux-x64-musl-1.3.12.tgz#d23aa2f8bb827332ebc3fc82f26ae0116e69441a" + integrity sha512-Zb7T3JxWlArSe44ATO5mtjLCBCt7kenWPl9CYD+zeqq9kHswMv8Cd3h/9uzdv2PA4Flrq57J5XBSuRdStTCXCw== + +"@oven/bun-linux-x64@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-linux-x64/-/bun-linux-x64-1.3.12.tgz#3e9d32b9105fb17e715b72d8fec2f7da1b391f33" + integrity sha512-H75bcEn46lMDxd+P+R6Q/jlIKl/YO0ZXaalSyWhQHr7qNmFhQt3rOHurFoCxuwQeqFoToh0JpWVyMVzByZqgBQ== + +"@oven/bun-windows-aarch64@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-windows-aarch64/-/bun-windows-aarch64-1.3.12.tgz#126745ec0e453f85a24fea633d5d3e604b763f96" + integrity sha512-Oq0FIcCgL3JWf/4qRuxI5fxsOGyWJ1j904PDx/1TxxSCWWAu0Hh2o8ck4TcaPVv/3BMc1k6UxqQQKBrdP7a+qQ== + +"@oven/bun-windows-x64-baseline@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-windows-x64-baseline/-/bun-windows-x64-baseline-1.3.12.tgz#1fb01cabc3668aebada45823f38ac191f43870ce" + integrity sha512-rV21md7QWnu3r/shev7IFMh6hX8BJHwofxESAofUT4yH866oCIbcNbzp6+fxrj4oGD8uisP6WoaTCboijv9yYg== + +"@oven/bun-windows-x64@1.3.12": + version "1.3.12" + resolved "https://registry.yarnpkg.com/@oven/bun-windows-x64/-/bun-windows-x64-1.3.12.tgz#00fe1323d37029c1870d89c1f515fb184264c867" + integrity sha512-veSntY7pDLDh4XmxZMwTqxfoEVp0BDdeqCBoWL46/TigtniPtDFSTIWBxa6l/RcGzklUA/uqLqmsK/9cBZAm8Q== + +"@oxc-parser/binding-android-arm-eabi@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-android-arm-eabi/-/binding-android-arm-eabi-0.126.0.tgz#06537ae8f5ff02f9d03073e13e31f97b4403ebfb" + integrity sha512-svyoHt25J4741QJ5aa4R+h0iiBeSRt63Lr3aAZcxy2c/NeSE1IfDeMnSij6rIg7EjxkdlXzz613wUjeCeilBNA== + +"@oxc-parser/binding-android-arm64@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-android-arm64/-/binding-android-arm64-0.126.0.tgz#5b8141d77a74c6858fd8c5c9c027b14094c66754" + integrity sha512-hPEBRKgplp1mG9GkINFsr4JVMDNrGJLOqfDaadTWpAoTnzYR5Rmv8RMvB3hJZpiNvbk1aacopdHUP1pggMQ/cw== + +"@oxc-parser/binding-darwin-arm64@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-darwin-arm64/-/binding-darwin-arm64-0.126.0.tgz#774da50ecb77704f672f895e278ad6cb496f68f1" + integrity sha512-ccRpu9sdYmznePJQG5halhs0FW5tw5a8zRSoZXOzM1OjoeZ4jiRRruFiPclsD59edoVAK1l83dvfjWz1nQi6lg== + +"@oxc-parser/binding-darwin-x64@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-darwin-x64/-/binding-darwin-x64-0.126.0.tgz#97e451ecfdfd552aa48f324a71f65d66966ed441" + integrity sha512-CHB4zVjNSKqx8Fw9pHowzQQnjjuq04i4Ng0Avj+DixlwhwAoMYqlFbocYIlbg+q3zOLGlm7vEHm83jqEMitnyg== + +"@oxc-parser/binding-freebsd-x64@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-freebsd-x64/-/binding-freebsd-x64-0.126.0.tgz#f3a1e06410ea67a106fa8a413a20a90f9c775962" + integrity sha512-RQ3nEJdcDKBfBjmLJ3Vl1d0KQERPV1P8eUrnBm7+VTYyoaJSPLVFuPg1mlD1hk3n0/879VLFMfusFkBal4ssWQ== + +"@oxc-parser/binding-linux-arm-gnueabihf@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-0.126.0.tgz#e83d29edcf802bfaa9dc17918b6f0b65a6f11a27" + integrity sha512-onipc2wCDA7Bauzb4KK1mab0GsEDf4ujiIfWECdnmY/2LlzAoX3xdQRLAUyEDB1kn3yilHBrkmXDdHluyHXxiw== + +"@oxc-parser/binding-linux-arm-musleabihf@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-0.126.0.tgz#71a5d6af8fc3648a74cfb347ca5860dda8193fe7" + integrity sha512-5BuJJPohrV5NJ8lmcYOMbfRCUGoYH5J9HZHeuqOLwkHXWAuPMN3X1h8bC/2mWjmosdbfTtmyIdX3spS/TkqKNg== + +"@oxc-parser/binding-linux-arm64-gnu@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-0.126.0.tgz#ae4e38f82679c45f137fd6d5497cdc9462ba08e2" + integrity sha512-r2KApRgm2pOJaduRm6GOT8x0whcr67AyejNkSdzPt34GJ+Y3axcXN2mwlTs+8lfO/SSmpO5ZJGYiHYnxEE0jkw== + +"@oxc-parser/binding-linux-arm64-musl@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-arm64-musl/-/binding-linux-arm64-musl-0.126.0.tgz#bd078f5d0d3a321ee814454d5ceb741cf0fa0201" + integrity sha512-FQ+MMh7MT0Dr/u8+RWmWKlfoeWPQyHDbhhxJShJlYtROXXPHsRs9EvmQOZZ3sx4Nn7JU8NX+oyw2YzQ7anBJcA== + +"@oxc-parser/binding-linux-ppc64-gnu@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-0.126.0.tgz#a3268cd429b2793fcf393507aefb12ad97db90b0" + integrity sha512-Wv/T8C98hRQhGTlx2XFyLn5raRMp9U1lOQD+YnXNgAr7wHbJJpZ8mDBU7Rw+M3WytGcGTFcr6kqgfyQeHVtLbQ== + +"@oxc-parser/binding-linux-riscv64-gnu@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-0.126.0.tgz#7bad9e07ebee79b3dfdb7922bbd66261e6bdddbd" + integrity sha512-DHx1rT1zauW0ZbLHOiQh5AC9Xs3UkWx2XmfZHs+7nnWYr3sagrufoUQC+/XPwwjMIlCFXiFGM0sFh3TyOCZwqA== + +"@oxc-parser/binding-linux-riscv64-musl@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-0.126.0.tgz#2c9a58282dea92e9bc68458324cc4fcae8286842" + integrity sha512-umDc2mTShH0U2zcEYf8mIJ163seLJNn54ZUZYeI5jD4qlg9izPwoLrC2aNPKlMJTu6u/ysmQWiEvIiaAG+INkw== + +"@oxc-parser/binding-linux-s390x-gnu@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-0.126.0.tgz#38c061558216bf956a9d99e080a57b8d2e83c69c" + integrity sha512-PXXeWayclRtO1pxQEeCpiqIglQdhK2mAI2VX5xnsWdImzSB5GpoQ8TNw7vTCKk2k+GZuxl+q1knncidjCyUP9w== + +"@oxc-parser/binding-linux-x64-gnu@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-x64-gnu/-/binding-linux-x64-gnu-0.126.0.tgz#29ca0d09f1e1441193cc7a1fb85ccfc5454dafc1" + integrity sha512-wzocjxm34TbB3bFlqG65JiLtvf6ZDg2ZxRkLLbgXwDQUNU+0MPjQN8zy/0jBKNA5fnPLk3XeVdZ7Uin+7+CVkg== + +"@oxc-parser/binding-linux-x64-musl@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-linux-x64-musl/-/binding-linux-x64-musl-0.126.0.tgz#743ef737befd6b71a6a12a5a37d4fcd7a16fa294" + integrity sha512-e83uftP60jmkPs2+CW6T6A1GYzN2H6IumDAiTntv9WyHR73PI3ImHNBkYqnA3ukeKI3xjcCbhSh9QeJWmufxGQ== + +"@oxc-parser/binding-openharmony-arm64@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-openharmony-arm64/-/binding-openharmony-arm64-0.126.0.tgz#37421078a4edf6607423e62e4b8333a1b4a5401c" + integrity sha512-4WiOILHnPrTDY2/L4mE6PZCYwLN1d3ghma6BuTJ452CCgzRMt3uFplCtR+o3r9zdUWJYb370UizpI9CUcWXr1A== + +"@oxc-parser/binding-wasm32-wasi@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-wasm32-wasi/-/binding-wasm32-wasi-0.126.0.tgz#dbd23656c1a84d932b668968b2c73fbb3f476426" + integrity sha512-Y17hhnrQTrxgAxAyAq401vnN9URsAL4s5AjqpG1NDsXSlhe1yBNnns+rC2P6xcMoitgX5nKH2ryYt9oiFRlzLw== + dependencies: + "@emnapi/core" "1.9.2" + "@emnapi/runtime" "1.9.2" + "@napi-rs/wasm-runtime" "^1.1.4" + +"@oxc-parser/binding-win32-arm64-msvc@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-0.126.0.tgz#e44678d4895955a3874037262b2559c20a88b140" + integrity sha512-Znug1u1iRvT4VC3jANz6nhGBHsFwEFMxuimYpJFwMtsB6H5FcEoZRMmH26tHkSTD03JvDmG+gB65W3ajLjPcSw== + +"@oxc-parser/binding-win32-ia32-msvc@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-0.126.0.tgz#a59f68923ec3130f560601a53f9d18e57d9c08d1" + integrity sha512-qrw7mx5hFFTxVSXToOA40hpnjgNB/DJprZchtB4rDKNLKqkD3F26HbzaQeH1nxAKej0efSZfJd5Sw3qdtOLGhw== + +"@oxc-parser/binding-win32-x64-msvc@0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-parser/binding-win32-x64-msvc/-/binding-win32-x64-msvc-0.126.0.tgz#5eafc9c7dc5aa51ec2c9a7289fdd47fb1036c118" + integrity sha512-ibB1s+mPUFXvS7MFJO2jpw/aCNs/P6ifnWlRyTYB+WYBpniOiCcHQQskZneJtwcjQMDRol3RGG3ihoYnzXSY4w== + +"@oxc-project/types@^0.126.0": + version "0.126.0" + resolved "https://registry.yarnpkg.com/@oxc-project/types/-/types-0.126.0.tgz#9d9fa6fe9af5bc6c45996c6d9b9a3b3a4cd500e5" + integrity sha512-oGfVtjAgwQVVpfBrbtk4e1XDyWHRFta6BS3GWVzrF8xYBT2VGQAk39yJS/wFSMrZqoiCU4oghT3Ch0HaHGIHcQ== "@pkgjs/parseargs@^0.11.0": version "0.11.0" @@ -883,17 +883,17 @@ dependencies: type-detect "4.0.8" -"@sinonjs/fake-timers@^15.1.1": - version "15.1.1" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-15.1.1.tgz#e1a6f7171941aadcc31d2cea1744264d58b8b34c" - integrity sha512-cO5W33JgAPbOh07tvZjUOJ7oWhtaqGHiZw+11DPbyqh2kHTBc3eF/CjJDeQ4205RLQsX6rxCuYOroFQwl7JDRw== +"@sinonjs/fake-timers@^15.3.2": + version "15.3.2" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-15.3.2.tgz#afecc36681e26aab9e0fe809fd9ad578096a3058" + integrity sha512-mrn35Jl2pCpns+mE3HaZa1yPN5EYCRgiMI+135COjr2hr8Cls9DXqIZ57vZe2cz7y2XVSq92tcs6kGQcT1J8Rw== dependencies: "@sinonjs/commons" "^3.0.1" -"@sinonjs/samsam@^9.0.3": - version "9.0.3" - resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-9.0.3.tgz#da4cad6ee24ca0c9c205da16676f7d540df71f12" - integrity sha512-ZgYY7Dc2RW+OUdnZ1DEHg00lhRt+9BjymPKHog4PRFzr1U3MbK57+djmscWyKxzO1qfunHqs4N45WWyKIFKpiQ== +"@sinonjs/samsam@^10.0.2": + version "10.0.2" + resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-10.0.2.tgz#d2cb34f0bcddb955b6971585c2f0334e68a9e66d" + integrity sha512-8lVwD1Df1BmzoaOLhMcGGcz/Jyr5QY2KSB75/YK1QgKzoabTeLdIVyhXNZK9ojfSKSdirbXqdbsXXqP9/Ve8+A== dependencies: "@sinonjs/commons" "^3.0.1" type-detect "^4.1.0" @@ -949,10 +949,10 @@ dependencies: undici-types "~5.26.4" -"@types/sinon@^21.0.0": - version "21.0.0" - resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-21.0.0.tgz#3a598a29b3aec0512a21e57ae0fd4c09aa013ca9" - integrity sha512-+oHKZ0lTI+WVLxx1IbJDNmReQaIsQJjN2e7UUrJHEeByG7bFeKJYsv1E75JxTQ9QKJDp21bAa/0W2Xo4srsDnw== +"@types/sinon@^21.0.1": + version "21.0.1" + resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-21.0.1.tgz#f995e2afdf15be832d5f1645803d82a8eb95a1bc" + integrity sha512-5yoJSqLbjH8T9V2bksgRayuhpZy+723/z6wBOR+Soe4ZlXC0eW8Na71TeaZPUWDQvM7LYKa9UGFc6LRqxiR5fQ== dependencies: "@types/sinonjs__fake-timers" "*" @@ -1258,23 +1258,23 @@ builtin-modules@^5.0.0: resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-5.0.0.tgz#9be95686dedad2e9eed05592b07733db87dcff1a" integrity sha512-bkXY9WsVpY7CvMhKSR6pZilZu9Ln5WDrKVBUXf2S443etkmEO4V58heTecXcUIsNsi4Rx8JUO4NfX1IcQl4deg== -bun@1.3.11: - version "1.3.11" - resolved "https://registry.yarnpkg.com/bun/-/bun-1.3.11.tgz#88f26443d4e6c2b5cc8ae887bf21133750a18702" - integrity sha512-AvXWYFO6j/ZQ7bhGm4X6eilq2JHsDVC90ZM32k2B7/srhC2gs3Sdki1QTbwrdRCo8o7eT+167vcB1yzOvPdbjA== +bun@1.3.12: + version "1.3.12" + resolved "https://registry.yarnpkg.com/bun/-/bun-1.3.12.tgz#1879388cb906cc085f95a2b6e59e05b347cba314" + integrity sha512-KLwDUqs5WIny/94F4xZ4QfaAE6YWyjR+s79pt/ItQhk2CG+PJQ5xL6VuOWhiyN2eP3fryZK95vog9CTLCaYV2Q== optionalDependencies: - "@oven/bun-darwin-aarch64" "1.3.11" - "@oven/bun-darwin-x64" "1.3.11" - "@oven/bun-darwin-x64-baseline" "1.3.11" - "@oven/bun-linux-aarch64" "1.3.11" - "@oven/bun-linux-aarch64-musl" "1.3.11" - "@oven/bun-linux-x64" "1.3.11" - "@oven/bun-linux-x64-baseline" "1.3.11" - "@oven/bun-linux-x64-musl" "1.3.11" - "@oven/bun-linux-x64-musl-baseline" "1.3.11" - "@oven/bun-windows-aarch64" "1.3.11" - "@oven/bun-windows-x64" "1.3.11" - "@oven/bun-windows-x64-baseline" "1.3.11" + "@oven/bun-darwin-aarch64" "1.3.12" + "@oven/bun-darwin-x64" "1.3.12" + "@oven/bun-darwin-x64-baseline" "1.3.12" + "@oven/bun-linux-aarch64" "1.3.12" + "@oven/bun-linux-aarch64-musl" "1.3.12" + "@oven/bun-linux-x64" "1.3.12" + "@oven/bun-linux-x64-baseline" "1.3.12" + "@oven/bun-linux-x64-musl" "1.3.12" + "@oven/bun-linux-x64-musl-baseline" "1.3.12" + "@oven/bun-windows-aarch64" "1.3.12" + "@oven/bun-windows-x64" "1.3.12" + "@oven/bun-windows-x64-baseline" "1.3.12" busboy@^1.6.0: version "1.6.0" @@ -1620,10 +1620,10 @@ diff@^7.0.0: resolved "https://registry.yarnpkg.com/diff/-/diff-7.0.0.tgz#3fb34d387cd76d803f6eebea67b921dab0182a9a" integrity sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw== -diff@^8.0.3: - version "8.0.3" - resolved "https://registry.yarnpkg.com/diff/-/diff-8.0.3.tgz#c7da3d9e0e8c283bb548681f8d7174653720c2d5" - integrity sha512-qejHi7bcSD4hQAZE0tNAawRK1ZtafHDmMTMkrrIGgSLl7hTnQHmKCeB45xAcbfTqK2zowkM3j3bHt/4b/ARbYQ== +diff@^8.0.4: + version "8.0.4" + resolved "https://registry.yarnpkg.com/diff/-/diff-8.0.4.tgz#4f5baf3188b9b2431117b962eb20ba330fadf696" + integrity sha512-DPi0FmjiSU5EvQV0++GFDOJ9ASQUVFh5kD+OzOnYdi7n3Wpm9hWWGfB/O2blfHcMVTL5WkQXSnRiK9makhrcnw== doctrine@^2.1.0: version "2.1.0" @@ -1830,12 +1830,12 @@ eslint-module-utils@^2.12.1: dependencies: debug "^3.2.7" -eslint-plugin-cypress@^6.2.2: - version "6.2.2" - resolved "https://registry.yarnpkg.com/eslint-plugin-cypress/-/eslint-plugin-cypress-6.2.2.tgz#26c7ab6c6dedc2ad4791ac939ef8b5386cd04f1d" - integrity sha512-lOhmMWb5/+zv28EaDOT7C3lgGgWL8DMZglTUNUaaGfleK89joDBRXX8LV01ygx3dK1RbvG54b8t025/5QEUKgg== +eslint-plugin-cypress@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-cypress/-/eslint-plugin-cypress-6.3.1.tgz#b0a4ce9073f39412c3f2b5b8b452a75c1885fec1" + integrity sha512-iTJtdIZbyCUlagEI4YlVcwgPFV7X379Qi/upujaD4kvOaQkMvzmpt90vfSnaqgqprp/HPIvhnzv3fdI7mYV4QQ== dependencies: - globals "^17.4.0" + globals "^17.5.0" eslint-plugin-es-x@^7.8.0: version "7.8.0" @@ -2361,10 +2361,10 @@ globals@^15.11.0, globals@^15.14.0: resolved "https://registry.yarnpkg.com/globals/-/globals-15.15.0.tgz#7c4761299d41c32b075715a4ce1ede7897ff72a8" integrity sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg== -globals@^17.2.0, globals@^17.4.0: - version "17.4.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-17.4.0.tgz#33d7d297ed1536b388a0e2f4bcd0ff19c8ff91b5" - integrity sha512-hjrNztw/VajQwOLsMNT1cbJiH2muO3OROCHnbehc8eY5JyD2gqz4AcMHPqgaOR59DjgUjYAYLeH699g/eWi2jw== +globals@^17.2.0, globals@^17.4.0, globals@^17.5.0: + version "17.5.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-17.5.0.tgz#a82c641d898f8dfbe0e81f66fdff7d0de43f88c6" + integrity sha512-qoV+HK2yFl/366t2/Cb3+xxPUo5BuMynomoDmiaZBIdbs+0pYbjfZU+twLhGKp4uCZ/+NbtpVepH5bGCxRyy2g== globalthis@^1.0.4: version "1.0.4" @@ -3394,33 +3394,33 @@ own-keys@^1.0.1: object-keys "^1.1.1" safe-push-apply "^1.0.0" -oxc-parser@^0.121.0: - version "0.121.0" - resolved "https://registry.yarnpkg.com/oxc-parser/-/oxc-parser-0.121.0.tgz#da2670453eecf84863b48eccad353f365b7518bf" - integrity sha512-ek9o58+SCv6AV7nchiAcUJy1DNE2CC5WRdBcO0mF+W4oRjNQfPO7b3pLjTHSFECpHkKGOZSQxx3hk8viIL5YCg== +oxc-parser@^0.126.0: + version "0.126.0" + resolved "https://registry.yarnpkg.com/oxc-parser/-/oxc-parser-0.126.0.tgz#993c07830f188980828cc5f180585c27c7bbebb9" + integrity sha512-FktCvLby/mOHyuijZt22+nOt10dS24gGUZE3XwIbUg7Kf4+rer3/5T7RgwzazlNuVsCjPloZ3p8E+4ONT3A8Kw== dependencies: - "@oxc-project/types" "^0.121.0" + "@oxc-project/types" "^0.126.0" optionalDependencies: - "@oxc-parser/binding-android-arm-eabi" "0.121.0" - "@oxc-parser/binding-android-arm64" "0.121.0" - "@oxc-parser/binding-darwin-arm64" "0.121.0" - "@oxc-parser/binding-darwin-x64" "0.121.0" - "@oxc-parser/binding-freebsd-x64" "0.121.0" - "@oxc-parser/binding-linux-arm-gnueabihf" "0.121.0" - "@oxc-parser/binding-linux-arm-musleabihf" "0.121.0" - "@oxc-parser/binding-linux-arm64-gnu" "0.121.0" - "@oxc-parser/binding-linux-arm64-musl" "0.121.0" - "@oxc-parser/binding-linux-ppc64-gnu" "0.121.0" - "@oxc-parser/binding-linux-riscv64-gnu" "0.121.0" - "@oxc-parser/binding-linux-riscv64-musl" "0.121.0" - "@oxc-parser/binding-linux-s390x-gnu" "0.121.0" - "@oxc-parser/binding-linux-x64-gnu" "0.121.0" - "@oxc-parser/binding-linux-x64-musl" "0.121.0" - "@oxc-parser/binding-openharmony-arm64" "0.121.0" - "@oxc-parser/binding-wasm32-wasi" "0.121.0" - "@oxc-parser/binding-win32-arm64-msvc" "0.121.0" - "@oxc-parser/binding-win32-ia32-msvc" "0.121.0" - "@oxc-parser/binding-win32-x64-msvc" "0.121.0" + "@oxc-parser/binding-android-arm-eabi" "0.126.0" + "@oxc-parser/binding-android-arm64" "0.126.0" + "@oxc-parser/binding-darwin-arm64" "0.126.0" + "@oxc-parser/binding-darwin-x64" "0.126.0" + "@oxc-parser/binding-freebsd-x64" "0.126.0" + "@oxc-parser/binding-linux-arm-gnueabihf" "0.126.0" + "@oxc-parser/binding-linux-arm-musleabihf" "0.126.0" + "@oxc-parser/binding-linux-arm64-gnu" "0.126.0" + "@oxc-parser/binding-linux-arm64-musl" "0.126.0" + "@oxc-parser/binding-linux-ppc64-gnu" "0.126.0" + "@oxc-parser/binding-linux-riscv64-gnu" "0.126.0" + "@oxc-parser/binding-linux-riscv64-musl" "0.126.0" + "@oxc-parser/binding-linux-s390x-gnu" "0.126.0" + "@oxc-parser/binding-linux-x64-gnu" "0.126.0" + "@oxc-parser/binding-linux-x64-musl" "0.126.0" + "@oxc-parser/binding-openharmony-arm64" "0.126.0" + "@oxc-parser/binding-wasm32-wasi" "0.126.0" + "@oxc-parser/binding-win32-arm64-msvc" "0.126.0" + "@oxc-parser/binding-win32-ia32-msvc" "0.126.0" + "@oxc-parser/binding-win32-x64-msvc" "0.126.0" p-limit@^2.2.0: version "2.3.0" @@ -3999,16 +3999,15 @@ signal-exit@^4.0.1: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== -sinon@^21.0.3: - version "21.0.3" - resolved "https://registry.yarnpkg.com/sinon/-/sinon-21.0.3.tgz#fd3a2387ffe4fdbbfbbf3a0858f18d46c4acb34e" - integrity sha512-0x8TQFr8EjADhSME01u1ZK31yv2+bd6Z5NrBCHVM+n4qL1wFqbxftmeyi3bwlr49FbbzRfrqSFOpyHCOh/YmYA== +sinon@^21.1.2: + version "21.1.2" + resolved "https://registry.yarnpkg.com/sinon/-/sinon-21.1.2.tgz#2404a6003853e6fc30430825fd21fe87675f29bf" + integrity sha512-FS6mN+/bx7e2ajpXkEmOcWB6xBzWiuNoAQT18/+a20SS4U7FSYl8Ms7N6VTUxN/1JAjkx7aXp+THMC8xdpp0gA== dependencies: "@sinonjs/commons" "^3.0.1" - "@sinonjs/fake-timers" "^15.1.1" - "@sinonjs/samsam" "^9.0.3" - diff "^8.0.3" - supports-color "^7.2.0" + "@sinonjs/fake-timers" "^15.3.2" + "@sinonjs/samsam" "^10.0.2" + diff "^8.0.4" source-map@^0.6.1: version "0.6.1" @@ -4193,7 +4192,7 @@ strip-json-comments@^3.1.1: resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== -supports-color@^7.1.0, supports-color@^7.2.0: +supports-color@^7.1.0: version "7.2.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== @@ -4372,10 +4371,10 @@ typedarray@^0.0.6: resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== -typescript@^6.0.2: - version "6.0.2" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-6.0.2.tgz#0b1bfb15f68c64b97032f3d78abbf98bdbba501f" - integrity sha512-bGdAIrZ0wiGDo5l8c++HWtbaNCWTS4UTv7RaTH/ThVIgjkveJt83m74bBHMJkuCbslY8ixgLBVZJIOiQlQTjfQ== +typescript@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-6.0.3.tgz#90251dc007916e972786cb94d74d15b185577d21" + integrity sha512-y2TvuxSZPDyQakkFRPZHKFm+KKVqIisdg9/CZwm9ftvKXLP8NRWj38/ODjNbr43SsoXqNuAisEf1GdCxqWcdBw== unbox-primitive@^1.1.0: version "1.1.0" @@ -4512,10 +4511,10 @@ word-wrap@^1.2.5: resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.5.tgz#d2c45c6dd4fbce621a66f136cbe328afd0410b34" integrity sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA== -workerpool@^10.0.0: - version "10.0.1" - resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-10.0.1.tgz#9e74de7df694f8f5ad0eec2b6d42553c664f5123" - integrity sha512-NAnKwZJxWlj/U1cp6ZkEtPE+GQY1S6KtOS3AlCiPfPFLxV3m64giSp7g2LsNJxzYCocDT7TSl+7T0sgrDp3KoQ== +workerpool@^10.0.2: + version "10.0.2" + resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-10.0.2.tgz#09e3cbf65d08296e82f7cc3df7c70fa8c6a1fd69" + integrity sha512-8PCeZlCwu0+8hXruze1ahYNsY+M0LOCmbmySZ9BWWqWIXP9TAXa6FZCxACTDL/0j47pFcC4xW98Gr8nAC5oymg== workerpool@^9.2.0: version "9.3.4"